1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f8e235f593cd/
Changeset: f8e235f593cd
User: greg
Date: 2013-04-17 23:08:01
Summary: Add support for setting the order in which repositories will be installed from the tool shed. When installing repository dependencies, this will ensure that repository dependencies that are required to be installed prior to the dependent repository are installed as required. This ability is supported by the recently introduce "prior_installation_required" attribut for the <repository> tag in repository_dependencies.xml definitions.
Some caveats:
- this feature is not yet working for reinstalling repositories, but it should be tomorrow.
- this featue does not yet been tested for complex repository dependencies, but it should work (lewt me know if it doesn't)
- repositories that are about to be installed that have repository dependencies that are not being installed along with it are not considered. Of course, if the required repositories were installed at a prvious time, all will be well.
Affected #: 6 files
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3293,16 +3293,16 @@
be installed in order for this repository to function correctly. However, those repository dependencies that are defined for this
repository with prior_installation_required set to True place them in a special category in that the required repositories must be
installed before this repository is installed. Among other things, this enables these "special" repository dependencies to include
- information that enables the successful intallation of this repository.
+ information that enables the successful intallation of this repository. This method is not used during the initial installation of
+ this repository, but only after it has been installed (metadata must be set for this repository in order for this method to be useful).
"""
required_rd_tups_that_must_be_installed = []
if self.has_repository_dependencies:
rd_tups = self.metadata[ 'repository_dependencies' ][ 'repository_dependencies' ]
for rd_tup in rd_tups:
if len( rd_tup ) == 4:
- # Metadata should have been reset on this installed tool_shed_repository, but it wasn't.
+ # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False.
tool_shed, name, owner, changeset_revision = rd_tup
- # Default prior_installation_required to False.
prior_installation_required = False
elif len( rd_tup ) == 5:
tool_shed, name, owner, changeset_revision, prior_installation_required = rd_tup
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -634,8 +634,9 @@
action='browse_repository',
**kwd ) )
elif operation == 'uninstall':
+ # TODO: I believe this block should be removed, but make sure..
repositories_for_uninstallation = []
- for repository_id in tool_shed_repository_id:
+ for repository_id in tsridslist:
repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( repository_id ) )
if repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
trans.model.ToolShedRepository.installation_status.ERROR ]:
@@ -657,13 +658,21 @@
filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
- for index, tsr_id in enumerate( tsr_ids ):
+ # Some repositories may have repository dependencies that are required to be installed before the dependent repository, so we'll
+ # order the list of tsr_ids to ensure all repositories install in the required order.
+ ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
+ repository_util.order_components_for_installation( trans, tsr_ids, repo_info_dicts, tool_panel_section_keys )
+ for tsr_id in ordered_tsr_ids:
repository = trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
if repository.status in [ trans.model.ToolShedRepository.installation_status.NEW,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
- filtered_repo_info_dicts.append( repo_info_dicts[ index ] )
- filtered_tool_panel_section_keys.append( tool_panel_section_keys[ index ] )
+ repo_info_dict, tool_panel_section_key = repository_util.get_repository_components_for_installation( tsr_id,
+ ordered_tsr_ids,
+ ordered_repo_info_dicts,
+ ordered_tool_panel_section_keys )
+ filtered_repo_info_dicts.append( repo_info_dict )
+ filtered_tool_panel_section_keys.append( tool_panel_section_key )
if repositories_for_installation:
decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
@@ -840,6 +849,7 @@
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
repository_util.handle_tool_shed_repositories( trans, installation_dict, using_api=False )
if message and len( repo_info_dicts ) == 1:
+ # We're undoubtedly attempting to install a repository that has been previously installed.
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -2440,15 +2440,20 @@
@web.expose
def view_or_manage_repository( self, trans, **kwd ):
- repository = suc.get_repository_in_tool_shed( trans, kwd[ 'id' ] )
- if trans.user_is_admin() or repository.user == trans.user:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- **kwd ) )
- else:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='view_repository',
- **kwd ) )
+ repository_id = kwd.get( 'id', None )
+ if repository_id:
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ if repository:
+ if trans.user_is_admin() or repository.user == trans.user:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_repository',
+ **kwd ) )
+ return trans.show_error_message( "Invalid repository id '%s' received." % repository_id )
+ return trans.show_error_message( "The repository id was not received." )
@web.expose
def view_repository( self, trans, id, **kwd ):
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -95,6 +95,27 @@
tool_dependencies )
return repo_info_dict
+def get_prior_install_required_dict( trans, tsr_ids, repo_info_dicts ):
+ """
+ Return a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received list of tsr_ids
+ and whose associated repository must be installed prior to the repository associated with the tsr_id key.
+ """
+ # Initialize the dictionary.
+ prior_install_required_dict = {}
+ for tsr_id in tsr_ids:
+ prior_install_required_dict[ tsr_id ] = []
+ # inspect the repository dependencies for each repository about to be installed and populate the dictionary.
+ for repo_info_dict in repo_info_dicts:
+ repository, repository_dependencies = get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict )
+ if repository:
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ if encoded_repository_id in tsr_ids:
+ # We've located the database table record for one of the repositories we're about to install, so find out if it has any repository
+ # dependencies that require prior installation.
+ prior_install_ids = get_repository_ids_requiring_prior_install( trans, tsr_ids, repository_dependencies )
+ prior_install_required_dict[ encoded_repository_id ] = prior_install_ids
+ return prior_install_required_dict
+
def get_repo_info_dict( trans, repository_id, changeset_revision ):
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
@@ -128,6 +149,51 @@
repository_dependencies=None )
return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies
+def get_repository_components_for_installation( encoded_tsr_id, encoded_tsr_ids, repo_info_dicts, tool_panel_section_keys ):
+ """
+ The received encoded_tsr_ids, repo_info_dicts, and tool_panel_section_keys are 3 lists that contain associated elements at each location in
+ the list. This method will return the elements from repo_info_dicts and tool_panel_section_keys associated with the received encoded_tsr_id
+ by determining it's location in the received encoded_tsr_ids list.
+ """
+ for index, tsr_id in enumerate( encoded_tsr_ids ):
+ if tsr_id == encoded_tsr_id:
+ repo_info_dict = repo_info_dicts[ index ]
+ tool_panel_section_key = tool_panel_section_keys[ index ]
+ return repo_info_dict, tool_panel_section_key
+ return None, None
+
+def get_repository_and_repository_dependencies_from_repo_info_dict( trans, repo_info_dict ):
+ """Return a tool_shed_repository record defined by the information in the received repo_info_dict."""
+ repository_name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ repository_name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, repository_name, repository_owner, changeset_revision )
+ return repository, repository_dependencies
+
+def get_repository_ids_requiring_prior_install( trans, tsr_ids, repository_dependencies ):
+ """
+ Inspect the received repository_dependencies and determine if the encoded id of each required repository is in the received tsr_ids. If so,
+ then determine whether that required repository should be installed prior to it's dependent repository. Return a list of encoded repository
+ ids, each of which is contained in the received list of tsr_ids, and whose associated repositories must be installed prior to the dependent
+ repository associated with the received repository_dependencies.
+ """
+ prior_install_ids = []
+ if repository_dependencies:
+ for key, rd_tups in repository_dependencies.items():
+ if key in [ 'description', 'root_key' ]:
+ continue
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
+ if prior_installation_required:
+ repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
+ if repository:
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ if encoded_repository_id in tsr_ids:
+ prior_install_ids.append( encoded_repository_id )
+ return prior_install_ids
+
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
changeset_revision_dict = {}
@@ -319,10 +385,6 @@
tool_panel_section_key = None
tool_section = None
encoded_repository_ids = [ trans.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
- # Create a one-to-one mapping of tool shed repository id and tool panel section key. All tools contained in the repositories being installed will be loaded
- # into the same section in the tool panel.
- for tsr in created_or_updated_tool_shed_repositories:
- tool_panel_section_keys.append( tool_panel_section_key )
new_kwd = dict( includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
has_repository_dependencies=has_repository_dependencies,
@@ -516,6 +578,39 @@
lock.release()
return new_containers_dict
+def order_components_for_installation( trans, tsr_ids, repo_info_dicts, tool_panel_section_keys ):
+ """
+ Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will inspect the list of
+ repositories about to be installed and make sure to order them appropriately. For each repository about to be installed, if required repositories are not
+ contained in the list of repositories about to be installed, then they are not considered. Repository dependency definitions that contain circular dependencies
+ should not result in an infinite loop, but obviously prior installation will not be handled for one or more of the repositories that require prior installation.
+ """
+ ordered_tsr_ids = []
+ ordered_repo_info_dicts = []
+ ordered_tool_panel_section_keys = []
+ # Create a dictionary whose keys are the received tsr_ids and whose values are a list of tsr_ids, each of which is contained in the received list of tsr_ids
+ # and whose associated repository must be installed prior to the repository associated with the tsr_id key.
+ prior_install_required_dict = get_prior_install_required_dict( trans, tsr_ids, repo_info_dicts )
+ # Create the ordered_tsr_ids, the ordered_repo_info_dicts and the ordered_tool_panel_section_keys lists.
+ for tsr_id in tsr_ids:
+ if tsr_id not in ordered_tsr_ids:
+ prior_install_required_ids = prior_install_required_dict.get( tsr_id, [] )
+ for prior_install_required_id in prior_install_required_ids:
+ if prior_install_required_id not in ordered_tsr_ids:
+ # Install the associated repository dependency first.
+ prior_repo_info_dict, prior_tool_panel_section_key = get_repository_components_for_installation( prior_install_required_id,
+ tsr_ids,
+ repo_info_dicts,
+ tool_panel_section_keys )
+ ordered_tsr_ids.append( prior_install_required_id )
+ ordered_repo_info_dicts.append( prior_repo_info_dict )
+ ordered_tool_panel_section_keys.append( prior_tool_panel_section_key )
+ repo_info_dict, tool_panel_section_key = get_repository_components_for_installation( tsr_id, tsr_ids, repo_info_dicts, tool_panel_section_keys )
+ ordered_tsr_ids.append( tsr_id )
+ ordered_repo_info_dicts.append( repo_info_dict )
+ ordered_tool_panel_section_keys.append( tool_panel_section_key )
+ return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
+
def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, missing_repository_dependencies,
installed_tool_dependencies, missing_tool_dependencies ):
"""Return the populated containers for a repository being installed for the first time."""
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -79,7 +79,7 @@
suc.get_repo_info_tuple_contents( repo_info_tuple )
if repository_dependencies:
missing_td = {}
- # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the reinstallation process.
+ # Handle the scenario where a repository was installed, then uninstalled and an error occurred during the re-installation process.
# In this case, a record for the repository will exist in the database with the status of 'New'.
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed_url, name, repository_owner, changeset_revision )
if repository and repository.metadata:
diff -r daa4930ca870cba6faa4e5e1c3c54f42f1445e9e -r f8e235f593cd24958e52fcb436d5ab47fb9d1cee lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -640,11 +640,11 @@
def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
"""Return a tool shed repository database record that is defined by either the current changeset revision or the installed_changeset_revision."""
# This method is used only in Galaxy, not the tool shed.
- repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
- tool_shed=tool_shed,
- name=name,
- owner=owner,
- installed_changeset_revision=changeset_revision )
+ repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
+ tool_shed=tool_shed,
+ name=name,
+ owner=owner,
+ installed_changeset_revision=changeset_revision )
if not repository:
repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( app=app,
tool_shed=tool_shed,
@@ -1056,7 +1056,8 @@
in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled.
"""
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = get_repo_info_tuple_contents( repo_info_tuple )
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ get_repo_info_tuple_contents( repo_info_tuple )
tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/392b82ef5b56/
Changeset: 392b82ef5b56
User: inithello
Date: 2013-04-17 22:11:20
Summary: Account for condition where output filename is not defined.
Affected #: 1 file
diff -r 6e7ad33e263d821e82dce33054b9e10c1357f0e3 -r 392b82ef5b56ebb98b8802f94bd00b5761e5924d lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -382,6 +382,9 @@
required_test_files.append( input_file )
for output in test_dict[ 'outputs' ]:
fieldname, filename = output
+ # In rare cases, the filename may be None. If that is the case, skip that output definition.
+ if filename is None:
+ continue
if filename not in required_test_files:
required_test_files.append( filename )
# Make sure each specified file actually does exist in the test data path of the cloned repository.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9e49d3b25e1b/
Changeset: 9e49d3b25e1b
User: inithello
Date: 2013-04-17 20:31:27
Summary: Improve recording of invalid tests. Check for the existence each file required by functional tests.
Affected #: 1 file
diff -r da5a5077ef1ebffa9e0ad9039ca4463298815de1 -r 9e49d3b25e1b7cc2172f09ae59d93bd173f56d6c lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -167,21 +167,22 @@
no_tools = 0
valid_revisions = 0
invalid_revisions = 0
- # Get the list of metadata records to check for functional tests and test data. Limit this to records that have not been flagged do_not_test
- # or tools_functionally_correct. Also filter out changeset revisions that are not downloadable, because it's redundant to test a revision that
- # a user can't install.
- # Initialize the repository_status dict with the test environment, but leave the test_errors empty.
- repository_status = {}
- repository_status[ 'invalid_tests' ] = []
+ # Get the list of metadata records to check for functional tests and test data. Limit this to records that have not been flagged do_not_test,
+ # since there's no need to check them again if they won't be tested anyway. Also filter out changeset revisions that are not downloadable,
+ # because it's redundant to test a revision that a user can't install.
metadata_records_to_check = app.sa_session.query( app.model.RepositoryMetadata ) \
.filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
app.model.RepositoryMetadata.table.c.includes_tools == True,
- app.model.RepositoryMetadata.table.c.do_not_test == False,
- app.model.RepositoryMetadata.table.c.tools_functionally_correct == False ) ) \
+ app.model.RepositoryMetadata.table.c.do_not_test == False ) ) \
.all()
for metadata_record in metadata_records_to_check:
+ # Initialize the repository_status dict with the test environment, but leave the test_errors empty.
+ repository_status = {}
if metadata_record.tool_test_errors:
repository_status = metadata_record.tool_test_errors
+ # Clear any old invalid tests for this metadata revision, since this could lead to duplication of invalid test rows,
+ # or tests incorrectly labeled as invalid.
+ repository_status[ 'invalid_tests' ] = []
if 'test_environment' in repository_status:
repository_status[ 'test_environment' ] = get_test_environment( repository_status[ 'test_environment' ] )
else:
@@ -192,7 +193,6 @@
name = metadata_record.repository.name
owner = metadata_record.repository.user.username
changeset_revision = str( metadata_record.changeset_revision )
- repository_status[ 'invalid_tests' ] = []
if metadata_record.repository.id not in checked_repository_ids:
checked_repository_ids.append( metadata_record.repository.id )
if verbosity >= 1:
@@ -218,15 +218,13 @@
dirs.remove( '.hg' )
if 'test-data' in dirs:
has_test_data = True
+ test_data_path = os.path.join( root, dirs[ dirs.index( 'test-data' ) ] )
break
- # Remove the cloned repository path.
- if os.path.exists( work_dir ):
- shutil.rmtree( work_dir )
if verbosity >= 1:
if not has_test_data:
- print '# Test data missing in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
+ print '# Test data directory missing in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
else:
- print '# Test data found in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
+ print '# Test data directory found in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
print '# Checking for functional tests in changeset revision %s of %s, owned by %s.' % \
( changeset_revision, name, owner )
# Loop through all the tools in this metadata record, checking each one for defined functional tests.
@@ -255,12 +253,22 @@
has_tests += 1
failure_reason = ''
problem_found = False
+ missing_test_files = []
+ if tool_has_tests and has_test_data:
+ missing_test_files = check_for_missing_test_files( tool_metadata[ 'tests' ], test_data_path )
+ if missing_test_files:
+ if verbosity >= 2:
+ print "# Tool ID '%s' in changeset revision %s of %s is missing one or more required test files: %s" % \
+ ( tool_id, changeset_revision, name, ', '.join( missing_test_files ) )
if not has_test_data:
failure_reason += 'Repository does not have a test-data directory. '
problem_found = True
if not tool_has_tests:
failure_reason += 'Functional test definitions missing for %s. ' % tool_id
problem_found = True
+ if missing_test_files:
+ failure_reason += 'One or more test files are missing for tool %s: %s' % ( tool_id, ', '.join( missing_test_files ) )
+ problem_found = True
test_errors = dict( tool_id=tool_id, tool_version=tool_version, tool_guid=tool_guid,
reason_test_is_invalid=failure_reason )
# The repository_metadata.tool_test_errors attribute should always have the following structure:
@@ -312,6 +320,9 @@
if problem_found:
if test_errors not in repository_status[ 'invalid_tests' ]:
repository_status[ 'invalid_tests' ].append( test_errors )
+ # Remove the cloned repository path. This has to be done after the check for required test files, for obvious reasons.
+ if os.path.exists( work_dir ):
+ shutil.rmtree( work_dir )
if not repository_status[ 'invalid_tests' ]:
valid_revisions += 1
if verbosity >= 1:
@@ -319,17 +330,21 @@
else:
invalid_revisions += 1
if verbosity >= 1:
- print '# Some tools missing functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
+ print '# Some tools have problematic functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
+ if verbosity >= 2:
+ for invalid_test in repository_status[ 'invalid_tests' ]:
+ if 'reason_test_is_invalid' in invalid_test:
+ print '# %s' % invalid_test[ 'reason_test_is_invalid' ]
if not info_only:
# If repository_status[ 'test_errors' ] is empty, no issues were found, and we can just update time_last_tested with the platform
# on which this script was run.
if repository_status[ 'invalid_tests' ]:
# If functional test definitions or test data are missing, set do_not_test = True if and only if:
- # a) There are multiple downloadable revisions, and the revision being tested is not the most recent downloadable revision. In this case,
- # the revision will never be updated with correct data, and re-testing it would be redundant.
- # b) There are one or more downloadable revisions, and the revision being tested is the most recent downloadable revision. In this case, if
- # the repository is updated with test data or functional tests, the downloadable changeset revision that was tested will be replaced
- # with the new changeset revision, which will be automatically tested.
+ # a) There are multiple downloadable revisions, and the revision being tested is not the most recent downloadable revision.
+ # In this case, the revision will never be updated with correct data, and re-testing it would be redundant.
+ # b) There are one or more downloadable revisions, and the revision being tested is the most recent downloadable revision.
+ # In this case, if the repository is updated with test data or functional tests, the downloadable changeset revision
+ # that was tested will be replaced with the new changeset revision, which will be automatically tested.
if should_set_do_not_test_flag( app, metadata_record.repository, changeset_revision ):
metadata_record.do_not_test = True
metadata_record.tools_functionally_correct = False
@@ -357,6 +372,25 @@
changelog_tuples.append( ( ctx.rev(), str( ctx ) ) )
return changelog_tuples
+def check_for_missing_test_files( test_definition, test_data_path ):
+ '''Process the tool's functional test definitions and check for each file specified as an input or output.'''
+ missing_test_files = []
+ required_test_files = []
+ for test_dict in test_definition:
+ for input_file in test_dict[ 'required_files' ]:
+ if input_file not in required_test_files:
+ required_test_files.append( input_file )
+ for output in test_dict[ 'outputs' ]:
+ fieldname, filename = output
+ if filename not in required_test_files:
+ required_test_files.append( filename )
+ # Make sure each specified file actually does exist in the test data path of the cloned repository.
+ for required_file in required_test_files:
+ required_file_full_path = os.path.join( test_data_path, required_file )
+ if not os.path.exists( required_file_full_path ):
+ missing_test_files.append( required_file )
+ return missing_test_files
+
def is_most_recent_downloadable_revision( app, repository, changeset_revision, downloadable_revisions ):
# Get a list of ( numeric revision, changeset hash ) tuples from the changelog.
changelog = get_repo_changelog_tuples( repository.repo_path( app ) )
@@ -376,8 +410,10 @@
a) There are multiple downloadable revisions, and the provided changeset revision is not the most recent downloadable revision. In this case,
the revision will never be updated with correct data, and re-testing it would be redundant.
b) There are one or more downloadable revisions, and the provided changeset revision is the most recent downloadable revision. In this case, if
- the repository is updated with test data or functional tests, the downloadable changeset revision that was tested will be replaced
- with the new changeset revision, which will be automatically tested.
+ the repository is updated with test data or functional tests, the downloadable changeset revision that was tested will either be replaced
+ with the new changeset revision, or a new downloadable changeset revision will be created, either of which will be automatically checked and
+ flagged as appropriate. In the install and test script, this behavior is slightly different, since we do want to always run functional tests
+ on the most recent downloadable changeset revision.
'''
metadata_records = app.sa_session.query( app.model.RepositoryMetadata ) \
.filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/da5a5077ef1e/
Changeset: da5a5077ef1e
User: dannon
Date: 2013-04-17 16:55:50
Summary: Strip whitespace from version_string_cmd, contributed by Peter Cock
Affected #: 1 file
diff -r 28edc7bec4048b5ef1564396ab3e6f91b03f62d6 -r da5a5077ef1ebffa9e0ad9039ca4463298815de1 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1094,7 +1094,7 @@
self.version_string_cmd = None
version_cmd = root.find("version_command")
if version_cmd is not None:
- self.version_string_cmd = version_cmd.text
+ self.version_string_cmd = version_cmd.text.strip()
version_cmd_interpreter = version_cmd.get( "interpreter", None )
if version_cmd_interpreter:
executable = self.version_string_cmd.split()[0]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.