galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2013
- 2 participants
- 189 discussions
commit/galaxy-central: greg: Fix tool shed functional test that was broken in commit 8852:d04aa3ad6132.
by Bitbucket 17 Feb '13
by Bitbucket 17 Feb '13
17 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/40a7ef4efb22/
changeset: 40a7ef4efb22
user: greg
date: 2013-02-17 18:46:13
summary: Fix tool shed functional test that was broken in commit 8852:d04aa3ad6132.
affected #: 2 files
diff -r 64b4ef5e2cc8e18a2872eaeb64fa46f50ae7f814 -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -903,28 +903,58 @@
self.reset_installed_repository_metadata( installed_repository )
new_metadata = installed_repository.metadata
assert metadata == new_metadata, 'Metadata for installed repository %s differs after metadata reset.' % name
- def verify_installed_repository_data_table_entries( self, data_tables=[] ):
- data_table = util.parse_xml( self.shed_tool_data_table_conf )
+ def verify_installed_repository_data_table_entries( self, required_data_table_entries ):
+ # The value of the received required_data_table_entries will be something like: [ 'sam_fa_indexes' ]
+ data_tables = util.parse_xml( self.shed_tool_data_table_conf )
found = False
+ # With the tool shed, the "path" attribute that is hard-coded into the tool_data_tble_conf.xml
+ # file is ignored. This is because the tool shed requires the directory location to which this
+ # path points to be empty except when a specific tool is loaded. The default location for this
+ # directory configured for the tool shed is <Galaxy root>/shed-tool-data. When a tool is loaded
+ # in the tool shed, all contained .loc.sample files are copied to this directory and the
+ # ToolDataTableManager parses and loads the files in the same way that Galaxy does with a very
+ # important exception. When the tool shed loads a tool and parses and loads the copied ,loc.sample
+ # files, the ToolDataTableManager is already instantiated, and so it's add_new_entries_from_config_file()
+ # method is called and the tool_data_path parameter is used to over-ride the hard-coded "tool-data"
+ # directory that Galaxy always uses.
+ #
# Tool data table xml structure:
# <tables>
- # <!-- Locations of all fasta files under genome directory -->
- # <table name="all_fasta" comment_char="#">
- # <columns>value, dbkey, name, path</columns>
- # <file path="tool-data/all_fasta.loc" />
+ # <table comment_char="#" name="sam_fa_indexes">
+ # <columns>line_type, value, path</columns>
+ # <file path="tool-data/sam_fa_indices.loc" />
# </table>
# </tables>
- for table_elem in data_table.findall( 'table' ):
- for data_table in data_tables:
- if 'name' in table_elem.attrib and table_elem.attrib[ 'name' ] == data_table:
+ for table_elem in data_tables.findall( 'table' ):
+ # The value of table_elem will be something like: <table comment_char="#" name="sam_fa_indexes">
+ for required_data_table_entry in required_data_table_entries:
+ # The value of required_data_table_entry will be something like: 'sam_fa_indexes'
+ if 'name' in table_elem.attrib and table_elem.attrib[ 'name' ] == required_data_table_entry:
+ found = True
+ # We're processing something like: sam_fa_indexes
file_elem = table_elem.find( 'file' )
+ # We have something like: <file path="tool-data/sam_fa_indices.loc" />
+ # The "path" attribute of the "file" tag is the location that Galaxy always uses because the
+ # Galaxy ToolDataTableManager was implemented in such a way that the hard-coded path is used
+ # rather than allowing the location to be a configurable setting like the tool shed requires.
file_path = file_elem.get( 'path', None )
- full_path = os.path.join( self.tool_data_path, file_path )
- assert file_path is not None, 'No file path configured for this data table.'
- assert os.path.exists( full_path ), 'Tool data table file %s not found.' % full_path
- found = True
+ # The value of file_path will be something like: "tool-data/all_fasta.loc"
+ assert file_path is not None, 'The "path" attribute is missing for the %s entry.' % name
+ # The following test is probably not necesary, but the tool-data directory should exist!
+ galaxy_tool_data_dir, loc_file_name = os.path.split( file_path )
+ assert galaxy_tool_data_dir is not None, 'The hard-coded Galaxy tool-data directory is missing for the %s entry.' % name
+ assert os.path.exists( galaxy_tool_data_dir ), 'The Galaxy tool-data directory does not exist.'
+ # Make sure the loc_file_name was correctly copied into the configured directory location.
+ configured_file_location = os.path.join( self.tool_data_path, loc_file_name )
+ assert os.path.isfile( configured_file_location ), 'The expected copied file "%s" is missing.' % configured_file_location
+ # We've found the value of the required_data_table_entry in data_tables, which is the parsed
+ # shed_tool_data_table_conf.xml, so all is well!
break
- assert found, 'No entry for %s in %s.' % ( data_table, self.shed_tool_data_table_conf )
+ if found:
+ break
+ # We better have an entry like: <table comment_char="#" name="sam_fa_indexes"> in our parsed data_tables
+ # or we know that the repository was not correctly installed!
+ assert found, 'No entry for %s in %s.' % ( required_data_table_entry, self.shed_tool_data_table_conf )
def verify_repository_reviews( self, repository, reviewer=None, strings_displayed=[], strings_not_displayed=[] ):
changeset_revision = self.get_repository_tip( repository )
# Verify that the currently logged in user has a repository review for the specified repository, reviewer, and changeset revision.
diff -r 64b4ef5e2cc8e18a2872eaeb64fa46f50ae7f814 -r 40a7ef4efb22afdc6bde4bce4e75d691e4ac7a7f test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -127,5 +127,5 @@
self.verify_installed_repository_metadata_unchanged( repository_name, common.test_user_1_name )
def test_0025_verify_sample_files( self ):
'''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
- self.verify_installed_repository_data_table_entries( data_tables=[ 'sam_fa_indexes' ] )
+ self.verify_installed_repository_data_table_entries( required_data_table_entries=[ 'sam_fa_indexes' ] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add some comments to the tool shed functional test script that tests behavior of complex repository dependencies.
by Bitbucket 16 Feb '13
by Bitbucket 16 Feb '13
16 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/64b4ef5e2cc8/
changeset: 64b4ef5e2cc8
user: greg
date: 2013-02-16 22:21:40
summary: Add some comments to the tool shed functional test script that tests behavior of complex repository dependencies.
affected #: 1 file
diff -r a41446bf66fda81772b6fe7d02f921a4955f7ce9 -r 64b4ef5e2cc8e18a2872eaeb64fa46f50ae7f814 test/tool_shed/functional/test_0100_complex_repository_dependencies.py
--- a/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
@@ -27,10 +27,11 @@
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_bwa_tool_repository( self ):
- '''Create and populate bwa_tool_0100.'''
+ '''Create and populate bwa_tool_repository_0100.'''
category = self.create_category( name=category_name, description=category_description )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ # Create a repository named bwa_tool_repository_0100 owned by user1.
repository = self.get_or_create_repository( name=bwa_tool_repository_name,
description=bwa_tool_repository_description,
long_description=bwa_tool_repository_long_description,
@@ -46,19 +47,21 @@
commit_message='Uploaded tool_dependencies.xml.',
strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool' ],
strings_not_displayed=[] )
+ # Visit the manage repository page for bwa_tool_repository_0100.
self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
def test_0010_create_bwa_base_repository( self ):
'''Create and populate bwa_base_0100.'''
category = self.create_category( name=category_name, description=category_description )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ # Create a repository named bwa_base_repository_0100 owned by user1.
repository = self.get_or_create_repository( name=bwa_base_repository_name,
description=bwa_base_repository_description,
long_description=bwa_base_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ),
strings_displayed=[] )
- tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ # Populate the repository named bwa_base_repository_0100 with a bwa_base tool archive.
self.upload_file( repository,
filename='bwa/complex/bwa_base.tar',
filepath=None,
@@ -72,14 +75,17 @@
'''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ # The repository named bwa_base_repository_0100 is the dependent repository.
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = 'http://http://this is not an url!'
- name = tool_repository.name
- owner = tool_repository.user.username
+ name = 'bwa_tool_repository_0100'
+ owner = 'user1'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
+ # Populate the dependent repository named bwa_base_repository_0100 with an invalid tool_dependencies.xml file.
self.upload_file( repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -93,14 +99,17 @@
'''Generate and upload a complex repository definition that specifies an invalid repository name.'''
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ # The base_repository named bwa_base_repository_0100 is the dependent repository.
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
name = 'invalid_repository!?'
- owner = tool_repository.user.username
+ owner = 'user1'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'because the name is invalid' ]
+ strings_displayed = [ 'because the name is invalid' ]
+ # # Populate the dependent base_repository named bwa_tool_repository_0100 with an invalid tool_dependencies.xml file.
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -114,10 +123,12 @@
'''Generate and upload a complex repository definition that specifies an invalid owner.'''
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ # The base_repository named bwa_base_repository_0100 is the dependent repository.
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = tool_repository.name
+ name = 'bwa_tool_repository_0100'
owner = 'invalid_owner!?'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
@@ -135,11 +146,13 @@
'''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ # The base_repository named bwa_base_repository_0100 is the dependent repository.
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = tool_repository.name
- owner = tool_repository.user.username
+ name = 'bwa_tool_repository_0100'
+ owner = 'user1'
changeset_revision = '1234abcd'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
strings_displayed = [ 'because the changeset revision is invalid.' ]
@@ -154,15 +167,19 @@
strings_not_displayed=[] )
def test_0035_generate_complex_repository_dependency( self ):
'''Generate and upload a valid tool_dependencies.xml file that specifies bwa_tool_repository_0100.'''
+ # The base_repository named bwa_base_repository_0100 is the dependent repository.
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
url = self.url
- name = tool_repository.name
- owner = tool_repository.user.username
+ name = 'bwa_tool_repository_0100'
+ owner = 'user1'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_repository_dependency_xml( [ tool_repository ], xml_filename, complex=True, package='bwa', version='0.5.9' )
+ # Upload the valid tool_dependencies.xml file to bwa_base_repository_0100 that specifies bwa_tool_repository_0100
+ # as a repository dependency via a complex repository dependency definition.
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -176,7 +193,9 @@
self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
def test_0040_generate_tool_dependency( self ):
'''Generate and upload a new tool_dependencies.xml file that specifies an arbitrary file on the filesystem, and verify that bwa_base depends on the new changeset revision.'''
+ # The base_repository named bwa_base_repository_0100 is the dependent repository.
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ # The tool_repository named bwa_tool_repository_0100 is the required repository.
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
previous_changeset = self.get_repository_tip( tool_repository )
old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'readme', 'tool_dependencies.xml' ) )
@@ -195,6 +214,21 @@
strings_displayed=[],
strings_not_displayed=[] )
# Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
+ repository_tip = self.get_repository_tip( tool_repository )
+ strings_displayed = [ 'bwa', '0.5.9', 'package' ]
+ strings_displayed.append( repository_tip )
+ strings_not_displayed=[ previous_changeset ]
+ self.display_manage_repository_page( tool_repository,
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed )
+ # Visit the manage page of the bwa_tool_repository_0100 to confirm the valid tool dependency definition.
+ self.display_manage_repository_page( tool_repository,
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed )
+ # Visit the manage page of the bwa_base_repository_0100 to confirm the valid tool dependency definition
+ # and the updated changeset revision (updated tip) of the bwa_tool_repository_0100 repository is displayed
+ # as the required repository revision. The original revision defined in the previously uploaded
+ # tool_dependencies.xml file will be updated.
self.display_manage_repository_page( base_repository,
- strings_displayed=[ self.get_repository_tip( tool_repository ), 'bwa', '0.5.9', 'package' ],
- strings_not_displayed=[ previous_changeset ] )
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for handling a complex repository dependency defintiion and a bit of code cleanup.
by Bitbucket 16 Feb '13
by Bitbucket 16 Feb '13
16 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a41446bf66fd/
changeset: a41446bf66fd
user: greg
date: 2013-02-16 22:20:33
summary: Fix for handling a complex repository dependency defintiion and a bit of code cleanup.
affected #: 1 file
diff -r 2fc2d5ef636913bdb29c4f9ecea42e440a2c326a -r a41446bf66fda81772b6fe7d02f921a4955f7ce9 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1429,8 +1429,8 @@
# We have a complex repository dependency. If the retruned value of repository_dependency_is_valid is True, the tool
# dependency definition will be set as invalid. This is currently the only case where a tool dependency definition is
# considered invalid.
- repository_dependencies_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app=app,
- repository_elem=sub_elem )
+ repository_dependency_tup, repository_dependency_is_valid, error_message = \
+ handle_repository_elem( app=app, repository_elem=sub_elem )
if requirements_dict:
dependency_key = '%s/%s' % ( package_name, package_version )
if repository_dependency_is_valid:
@@ -1490,19 +1490,6 @@
If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository,
then update the received metadata_dict with information from the parsed tool_dependencies_config.
"""
- """
- "{"orphan_tool_dependencies":
- {"bwa/0.5.9":
- {"name": "bwa",
- "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
- "type": "package", "version": "0.5.9"}},
- "tool_dependencies":
- {"bwa/0.5.9":
- {"name": "bwa",
- "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
- "type": "package",
- "version": "0.5.9"}}}"
- """
error_message = ''
if original_repository_metadata:
# Keep a copy of the original tool dependencies dictionary and the list of tool dictionaries in the metadata.
@@ -1538,7 +1525,8 @@
# We have an invalid complex repository dependency, so mark the tool dependency as invalid.
tool_dependency_is_valid = False
# Append the error message to the invalid repository dependency tuple.
- repository_dependency_tup.append( message )
+ toolshed, name, owner, changeset_revision = repository_dependency_tup
+ repository_dependency_tup = ( toolshed, name, owner, changeset_revision, message )
invalid_repository_dependency_tups.append( repository_dependency_tup )
error_message = '%s %s' % ( error_message, message )
elif elem.tag == 'set_environment':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ef882ec18f1c/
changeset: ef882ec18f1c
user: jmchilton
date: 2013-02-15 22:58:30
summary: Fix to allow multiple input tool parameters in conditionals and repeats.
affected #: 1 file
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r ef882ec18f1c6c779796cde62ca035c826197bc9 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -76,10 +76,10 @@
else:
raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions )
if parent:
- parent[input.name] = input_datasets[ prefix + input.name + str( i + 1 ) ]
+ parent[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
#allow explicit conversion to be stored in job_parameter table
- parent[ conversion_name ] = conversion_data.id #a more robust way to determine JSONable value is desired
+ parent[ conversion_name ][i] = conversion_data.id #a more robust way to determine JSONable value is desired
else:
param_values[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
https://bitbucket.org/galaxy/galaxy-central/commits/2fc2d5ef6369/
changeset: 2fc2d5ef6369
user: dannon
date: 2013-02-15 23:18:46
summary: Merged in jmchilton/galaxy-central-multi-input-tool-fixes-2 (pull request #125)
Fix to allow multiple input tool parameters in conditionals and repeats.
affected #: 1 file
diff -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a -r 2fc2d5ef636913bdb29c4f9ecea42e440a2c326a lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -76,10 +76,10 @@
else:
raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions )
if parent:
- parent[input.name] = input_datasets[ prefix + input.name + str( i + 1 ) ]
+ parent[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
#allow explicit conversion to be stored in job_parameter table
- parent[ conversion_name ] = conversion_data.id #a more robust way to determine JSONable value is desired
+ parent[ conversion_name ][i] = conversion_data.id #a more robust way to determine JSONable value is desired
else:
param_values[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ]
for conversion_name, conversion_data in conversions:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional test refactoring for clarity. Functional tests for limiting access to repository component reviews.
by Bitbucket 15 Feb '13
by Bitbucket 15 Feb '13
15 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8e93ca10b0e2/
changeset: 8e93ca10b0e2
user: inithello
date: 2013-02-15 22:55:29
summary: Tool shed functional test refactoring for clarity. Functional tests for limiting access to repository component reviews.
affected #: 22 files
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -83,20 +83,20 @@
tool_panel_section = tool_panel_section_metadata[ tool_guid ][ 0 ][ 'name' ]
assert tool_panel_section == expected_tool_panel_section, 'Expected tool panel section %s, found %s\nMetadata: %s\n' % \
( expected_tool_panel_section, tool_panel_section, metadata )
- def check_installed_repository_tool_dependencies( self, installed_repository, dependencies_installed=False ):
+ def check_installed_repository_tool_dependencies( self,
+ installed_repository,
+ strings_displayed=[],
+ strings_not_displayed=[],
+ dependencies_installed=False ):
# Tool dependencies are not being installed in these functional tests. If this is changed, the test method will also need to be updated.
- strings_not_displayed = []
if not dependencies_installed:
- strings_displayed = [ 'Missing tool dependencies' ]
+ strings_displayed.append( 'Missing tool dependencies' )
else:
- strings_displayed = [ 'Tool dependencies' ]
- for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
- tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
- strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
- if dependencies_installed:
- strings_displayed.append( 'Installed' )
- else:
- strings_displayed.append( 'Never installed' )
+ strings_displayed.append( 'Tool dependencies' )
+ if dependencies_installed:
+ strings_displayed.append( 'Installed' )
+ else:
+ strings_displayed.append( 'Never installed' )
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
@@ -122,6 +122,7 @@
Loop through each tool dictionary in the repository metadata associated with the received changeset_revision.
For each of these, check for a tools attribute, and load the tool metadata page if it exists, then display that tool's page.
'''
+ test_db_util.refresh( repository )
repository_metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
metadata = repository_metadata.metadata
if 'tools' not in metadata:
@@ -297,11 +298,7 @@
def display_installed_repository_manage_page( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
- strings_displayed.extend( [ installed_repository.name,
- installed_repository.description,
- installed_repository.owner,
- installed_repository.tool_shed,
- installed_repository.installed_changeset_revision ] )
+ strings_displayed.append( installed_repository.installed_changeset_revision )
self.check_for_strings( strings_displayed, strings_not_displayed )
def display_installed_workflow_image( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/generate_workflow_image?repository_id=%s&workflow_name=%s' % \
@@ -316,19 +313,6 @@
changeset_revision = self.get_repository_tip( repository )
url = base_url
self.visit_url( url )
- metadata = self.get_repository_metadata_by_changeset_revision( repository, changeset_revision )
- if metadata:
- if 'tool_dependencies' in metadata.metadata:
- strings_displayed.append( 'Tool dependencies' )
- for dependency in metadata.metadata[ 'tool_dependencies' ]:
- if dependency == 'set_environment':
- for environment_dependency in metadata.metadata[ 'tool_dependencies' ][ dependency ]:
- strings_displayed.append( environment_dependency[ 'name' ] )
- strings_displayed.append( environment_dependency[ 'type' ] )
- else:
- strings_displayed.append( metadata.metadata[ 'tool_dependencies' ][ dependency ][ 'name' ] )
- strings_displayed.append( metadata.metadata[ 'tool_dependencies' ][ dependency ][ 'version' ] )
- strings_displayed.append( metadata.metadata[ 'tool_dependencies' ][ dependency ][ 'type' ] )
self.check_for_strings( strings_displayed, strings_not_displayed )
def display_repository_clone_page( self, owner_name, repository_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/repos/%s/%s' % ( owner_name, repository_name )
@@ -907,19 +891,12 @@
# else:
# time.sleep( 1 )
# continue
- def verify_installed_uninstalled_repositories( self, installed_repositories=[], uninstalled_repositories=[] ):
- strings_displayed = []
- strings_not_displayed = []
- for repository_name, repository_owner in uninstalled_repositories:
- repository = test_db_util.get_repository_by_name_and_owner( repository_name, repository_owner )
- strings_not_displayed.extend( [ repository_name, self.get_repository_tip( repository ) ] )
+ def verify_installed_repositories( self, installed_repositories=[], uninstalled_repositories=[] ):
for repository_name, repository_owner in installed_repositories:
- repository = test_db_util.get_repository_by_name_and_owner( repository_name, repository_owner )
galaxy_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, repository_owner )
if galaxy_repository:
- assert galaxy_repository.status == 'Installed', 'Repository %s should be installed, but is %s' % ( repository_name, galaxy_repository.status )
- strings_displayed.extend( [ repository_name, self.get_repository_tip( repository ) ] )
- self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ assert galaxy_repository.status == 'Installed', \
+ 'Repository %s should be installed, but is %s' % ( repository_name, galaxy_repository.status )
def verify_installed_repository_metadata_unchanged( self, name, owner ):
installed_repository = test_db_util.get_installed_repository_by_name_owner( name, owner )
metadata = installed_repository.metadata
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -157,7 +157,7 @@
category = test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
tip = self.get_repository_tip( repository )
self.check_for_valid_tools( repository )
- strings_displayed = self.get_repository_metadata_revisions( repository ).append( 'Select a revision' )
+ strings_displayed = [ 'Select a revision' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
self.check_repository_tools_for_changeset_revision( repository, tip )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -116,5 +116,5 @@
'''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository,
- strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
+ strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ],
strings_not_displayed=[ 'Invalid tools' ] )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -178,7 +178,7 @@
# Iterate through all metadata revisions and check for repository dependencies.
for metadata, changeset_revision in repository_metadata:
# Add the dependency description and datatypes repository details to the strings to check.
- strings_displayed.extend( [ 'Emboss requires the Emboss', 'emboss_datatypes_0030', 'user1', datatypes_tip ] )
+ strings_displayed = [ 'Emboss requires the Emboss', 'emboss_datatypes_0030', 'user1', datatypes_tip ]
strings_displayed.extend( [ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
self.display_manage_repository_page( repository,
changeset_revision=changeset_revision,
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -127,5 +127,5 @@
'''Verify that freebayes displays tool dependencies.'''
repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository,
- strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
+ strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools', 'package' ],
strings_not_displayed=[ 'Invalid tools' ] )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
--- a/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
@@ -261,7 +261,7 @@
for repository in [ emboss_datatypes_repository, emboss_repository, column_repository ]:
self.check_repository_dependency( freebayes_repository, repository )
freebayes_dependencies = [ freebayes_repository, emboss_datatypes_repository, emboss_repository, column_repository ]
- strings_displayed = [ '%s depends on %s.' % ( freebayes_repository.name, ', '.join( repo.name for repo in freebayes_dependencies ) ) ]
+ strings_displayed = [ 'freebayes_0050 depends on freebayes_0050, emboss_datatypes_0050, emboss_0050, column_maker_0050.' ]
self.display_manage_repository_page( freebayes_repository,
strings_displayed=strings_displayed )
def test_0050_verify_tool_dependencies( self ):
@@ -269,7 +269,7 @@
freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.display_manage_repository_page( freebayes_repository,
- strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Tool dependencies' ] )
+ strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Tool dependencies', 'package' ] )
self.display_manage_repository_page( emboss_repository, strings_displayed=[ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
def test_0055_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_0410_repository_component_review_access_control.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0410_repository_component_review_access_control.py
@@ -0,0 +1,196 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+repository_name = 'filtering_0410'
+repository_description = 'Galaxy filtering tool for test 0410'
+repository_long_description = 'Long description of Galaxy filtering tool for test 0410'
+
+'''
+1. Create a repository in the tool shed owned by test_user_1.
+2. Have test_user_2 complete a review of the repository.
+3. Have test_user_1 browse the review.
+4. Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
+5. Have test_user_1 give write permission on the repository to the test_user_3.
+6. Have test_user_3 browse the repository again and they should now have the ability to browse the review.
+7. Have test_user_3 browse the review.
+'''
+
+class TestRepositoryComponentReviews( ShedTwillTestCase ):
+ '''Test repository component review features.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ """
+ Create all the user accounts that are needed for this test script to run independently of other test.
+ Previously created accounts will not be re-created.
+ """
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ test_user_2 = test_db_util.get_user( common.test_user_2_email )
+ assert test_user_2 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_2_email
+ test_user_2_private_role = test_db_util.get_private_role( test_user_2 )
+ self.logout()
+ self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+ test_user_3 = test_db_util.get_user( common.test_user_3_email )
+ assert test_user_3 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_3_email
+ test_user_3_private_role = test_db_util.get_private_role( test_user_3 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_grant_reviewer_role( self ):
+ '''Grant the repository reviewer role to test_user_2.'''
+ """
+ We now have an admin user (admin_user) and three non-admin users (test_user_1, test_user_2, and test_user_3). Grant the repository
+ reviewer role to test_user_2, who will not be the owner of the reviewed repositories, and do not grant any roles to test_user_3 yet.
+ """
+ reviewer_role = test_db_util.get_role_by_name( 'Repository Reviewer' )
+ test_user_2 = test_db_util.get_user( common.test_user_2_email )
+ self.grant_role_to_user( test_user_2, reviewer_role )
+ def test_0010_verify_repository_review_components( self ):
+ '''Ensure that the required review components exist.'''
+ """
+ Make sure all the components we are to review are recorded in the database.
+ """
+ self.add_repository_review_component( name='Repository dependencies',
+ description='Repository dependencies defined in a file named repository_dependencies.xml included in the repository' )
+ strings_displayed=[ 'Data types', 'Functional tests', 'README', 'Repository dependencies', 'Tool dependencies', 'Tools', 'Workflows' ]
+ self.manage_review_components( strings_displayed=strings_displayed )
+ def test_0015_create_repository( self ):
+ """Create and populate the filtering repository"""
+ """
+ We are at step 1.
+ Log in as test_user_1 and create the filtering repository, then upload a basic set of
+ components to be reviewed in subsequent tests.
+ """
+ category = self.create_category( name='Test 0400 Repository Component Reviews', description='Test 0400 Repository Component Reviews' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
+ 'Repository %s has been created' % "'%s'" % repository_name ]
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=strings_displayed )
+ self.upload_file( repository,
+ filename='filtering/filtering_1.1.0.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded filtering 1.1.0 tarball.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ self.upload_file( repository,
+ filename='filtering/filtering_test_data.tar',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded filtering test data.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ self.upload_file( repository,
+ filename='readme.txt',
+ filepath=None,
+ valid_tools_only=True,
+ uncompress_file=True,
+ remove_repo_files_not_in_tar=False,
+ commit_message='Uploaded readme.txt.',
+ strings_displayed=[],
+ strings_not_displayed=[] )
+ def test_0020_review_repository( self ):
+ '''Complete a review of the filtering repository.'''
+ '''
+ We are at step 2 - Have test_user_2 complete a review of the repository.
+ Review all components of the filtering repository, with the appropriate contents and approved/not approved/not applicable status.
+ '''
+ self.logout()
+ self.login( email=common.test_user_2_email, username=common.test_user_2_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ review_contents_dict = {
+ 'Data types': dict(),
+ 'README': dict( rating=5, comment='Clear and concise readme file, a true pleasure to read.', approved='yes', private='no' ),
+ 'Functional tests': dict( rating=5, comment='A good set of functional tests.', approved='yes', private='no' ),
+ 'Repository dependencies': dict(),
+ 'Tool dependencies': dict(),
+ 'Tools': dict( rating=5, comment='Excellent tool, easy to use.', approved='yes', private='no' ),
+ 'Workflows': dict()
+ }
+ self.create_repository_review( repository, review_contents_dict )
+ def test_0025_verify_repository_review( self ):
+ '''Verify that the review was completed and displays properly.'''
+ '''
+ We are at step 3 - Have test_user_1 browse the review.
+ Verify that all the review components were submitted, and that the repository owner can see the review.
+ '''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed = [ 'Data types', 'Functional tests', 'yes', 'A good set of functional tests.', 'README', 'yes', 'Workflows', 'Tools' ]
+ strings_displayed.extend( [ 'Clear and concise readme file, a true pleasure to read.', 'Tool dependencies', 'not_applicable' ] )
+ strings_displayed.extend( [ 'Repository dependencies', 'Excellent tool, easy to use.' ] )
+ strings_displayed = [ 'Browse reviews of this repository' ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ self.verify_repository_reviews( repository, reviewer=user, strings_displayed=strings_displayed )
+ def test_0030_browse_with_other_user( self ):
+ '''Verify that test_user_3 is blocked from browsing the review.'''
+ '''
+ We are at step 4 - Have test_user_3 browse the repository and make sure they are not allowed to browse the review.
+ '''
+ self.logout()
+ self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_not_displayed = [ 'Browse reviews of this repository' ]
+ self.display_manage_repository_page( repository, strings_not_displayed=strings_not_displayed )
+ strings_not_displayed = [ 'A good set of functional tests.', 'Clear and concise readme file, a true pleasure to read.' ]
+ strings_not_displayed.append( 'Excellent tool, easy to use.' )
+ changeset_revision = self.get_repository_tip( repository )
+ review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+ self.browse_component_review( review, strings_not_displayed=strings_not_displayed )
+ def test_0035_grant_write_access_to_other_user( self ):
+ '''Grant write access on the filtering_0410 repository to test_user_3.'''
+ '''
+ We are at step 5 - Have test_user_1 give write permission on the repository to the test_user_3.
+ '''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.grant_write_access( repository, usernames=[ common.test_user_3_name ] )
+ def test_0040_verify_test_user_3_can_browse_reviews( self ):
+ '''Check that test_user_3 can now browse reviews.'''
+ '''
+ We are at step 6 - Have test_user_3 browse the repository again and they should now have the ability to browse the review.
+ '''
+ self.logout()
+ self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ strings_displayed = [ 'Browse reviews of this repository' ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ def test_0045_verify_browse_review_with_write_access( self ):
+ '''Check that test_user_3 can now display reviews.'''
+ '''
+ We are at step 7 - Have test_user_3 browse the review.
+ '''
+ self.logout()
+ self.login( email=common.test_user_3_email, username=common.test_user_3_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ user = test_db_util.get_user( common.test_user_2_email )
+ strings_displayed = [ 'A good set of functional tests.',
+ 'Clear and concise readme file',
+ 'a true pleasure to read.',
+ 'Excellent tool, easy to use.' ]
+ changeset_revision = self.get_repository_tip( repository )
+ review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision )
+ self.browse_component_review( review, strings_displayed=strings_displayed )
+
\ No newline at end of file
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1000_install_basic_repository.py
--- a/test/tool_shed/functional/test_1000_install_basic_repository.py
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -88,14 +88,14 @@
'Test 0000 Basic Repository Features 1',
new_tool_panel_section='test_1000' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- strings_displayed = [ installed_repository.name,
- installed_repository.description,
- installed_repository.owner,
- installed_repository.tool_shed,
+ strings_displayed = [ 'filtering_0000',
+ "Galaxy's filtering tool",
+ 'user1',
+ self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0030_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -117,9 +117,10 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
- self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+ strings_displayed = [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+ self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0020_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -109,9 +109,10 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
- self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+ strings_displayed = [ 'emboss', '5.0.0', 'package' ]
+ self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
current_datatypes = int( self.get_datatypes_count() )
assert current_datatypes == base_datatypes_count + repository_datatypes_count, 'Installing emboss did not add new datatypes. Expected: %d. Found: %d' % \
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -175,13 +175,14 @@
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
strings_displayed = [ 'emboss_0030',
'Galaxy wrappers for Emboss version 5.0.0 tools for test 0030',
- installed_repository.owner,
- installed_repository.tool_shed,
+ 'user1',
+ self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
- self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
+ strings_displayed = [ 'emboss', '5.0.0', 'package' ]
+ self.check_installed_repository_tool_dependencies( installed_repository, strings_displayed=strings_displayed, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
current_datatypes = int( self.get_datatypes_count() )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1040_install_repository_basic_circular_dependencies.py
--- a/test/tool_shed/functional/test_1040_install_repository_basic_circular_dependencies.py
+++ b/test/tool_shed/functional/test_1040_install_repository_basic_circular_dependencies.py
@@ -129,7 +129,7 @@
self.uninstall_repository( installed_freebayes_repository, remove_from_disk=True )
test_db_util.ga_refresh( installed_freebayes_repository )
self.check_galaxy_repository_tool_panel_section( installed_freebayes_repository, '' )
- strings_displayed = [ 'Missing repository', 'freebayes' ]
+ strings_displayed = [ 'Missing repository', 'freebayes', 'filtering_0040', 'user1', "Galaxy's filtering tool for test 0040" ]
self.display_installed_repository_manage_page( installed_filtering_repository, strings_displayed=strings_displayed )
self.check_galaxy_repository_db_status( freebayes_repository_name,
common.test_user_1_name,
@@ -150,7 +150,7 @@
self.uninstall_repository( installed_filtering_repository, remove_from_disk=True )
test_db_util.ga_refresh( installed_filtering_repository )
self.check_galaxy_repository_tool_panel_section( installed_filtering_repository, '' )
- strings_displayed = [ 'Missing repository', 'filtering' ]
+ strings_displayed = [ 'Missing repository', 'filtering', 'freebayes_0040', 'user1', "Galaxy's freebayes tool for test 0040" ]
self.display_installed_repository_manage_page( installed_freebayes_repository, strings_displayed=strings_displayed )
self.check_galaxy_repository_db_status( filtering_repository_name,
common.test_user_1_name,
@@ -162,7 +162,7 @@
self.uninstall_repository( installed_freebayes_repository, remove_from_disk=True )
test_db_util.ga_refresh( installed_freebayes_repository )
self.check_galaxy_repository_tool_panel_section( installed_freebayes_repository, 'freebayes' )
- strings_displayed = [ 'Missing repository', 'freebayes' ]
+ strings_displayed = [ 'Missing repository', 'freebayes', 'filtering_0040', 'user1', "Galaxy's filtering tool for test 0040" ]
self.display_installed_repository_manage_page( installed_filtering_repository, strings_displayed=strings_displayed )
self.check_galaxy_repository_db_status( 'freebayes_0040',
'user1',
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
--- a/test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
+++ b/test/tool_shed/functional/test_1050_circular_dependencies_4_levels.py
@@ -267,12 +267,10 @@
# This should result in column_maker and convert_chars being installed, and the rest never installed.
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
- ( emboss_repository_name, common.test_user_1_name ),
- ( filtering_repository_name, common.test_user_1_name ),
- ( freebayes_repository_name, common.test_user_1_name ),
- ( bismark_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'column_maker_0050', 'convert_chars_0050' ]
+ strings_not_displayed = [ 'emboss_datatypes_0050', 'emboss_0050', 'filtering_0050', 'freebayes_0050', 'bismark_0050' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories=installed_repositories )
def test_0060_install_emboss_repository( self ):
'''Install emboss_5 with repository dependencies.'''
global running_standalone
@@ -291,9 +289,10 @@
( emboss_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
( bismark_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
- ( freebayes_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'emboss_datatypes_0050', 'emboss_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+ strings_not_displayed = [ 'filtering_0050', 'freebayes_0050' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
def test_0065_deactivate_datatypes_repository( self ):
'''Deactivate emboss_datatypes and verify that the datatypes count is reduced.'''
original_datatypes = self.get_datatypes_count()
@@ -305,12 +304,10 @@
( emboss_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
( bismark_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
- ( filtering_repository_name, common.test_user_1_name ),
- ( freebayes_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
- strings_not_displayed = [ repository.name, repository.installed_changeset_revision ]
- self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ strings_displayed = [ 'emboss_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+ strings_not_displayed = [ 'emboss_datatypes_0050', 'filtering_0050', 'freebayes_0050' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
def test_0070_uninstall_emboss_repository( self ):
'''Uninstall the emboss_5 repository.'''
repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
@@ -324,11 +321,10 @@
installed_repositories = [ ( column_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
( bismark_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( emboss_datatypes_repository_name, common.test_user_1_name ),
- ( emboss_repository_name, common.test_user_1_name ),
- ( filtering_repository_name, common.test_user_1_name ),
- ( freebayes_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'column_maker_0050', 'convert_chars_0050', 'bismark_0050' ]
+ strings_not_displayed = [ 'emboss_0050', 'emboss_datatypes_0050', 'filtering_0050', 'freebayes_0050' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
def test_0075_install_freebayes_repository( self ):
'''Install freebayes with repository dependencies. This should also automatically reactivate emboss_datatypes and reinstall emboss_5.'''
original_datatypes = self.get_datatypes_count()
@@ -356,6 +352,8 @@
( freebayes_repository_name, common.test_user_1_name ),
( convert_repository_name, common.test_user_1_name ),
( bismark_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( filtering_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'emboss_0050', 'emboss_datatypes_0050', 'column_maker_0050', 'convert_chars_0050', 'bismark_0050', 'freebayes_0050' ]
+ strings_not_displayed = [ 'filtering_0050' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1070_invalid_tool.py
--- a/test/tool_shed/functional/test_1070_invalid_tool.py
+++ b/test/tool_shed/functional/test_1070_invalid_tool.py
@@ -79,8 +79,9 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ strings_displayed.extend( [ 'methylation extractor', 'Invalid tools' ] )
self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'methylation extractor', 'Invalid tools' ],
+ strings_displayed=strings_displayed,
strings_not_displayed=[ 'bisulfite mapper' ] )
self.verify_tool_metadata_for_installed_repository( installed_repository )
self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
--- a/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
+++ b/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
@@ -199,12 +199,16 @@
common.test_user_1_name )
self.reactivate_repository( installed_convert_repository )
strings_displayed = [ 'convert_chars_0080',
- 'Convert delimiters',
- self.url.replace( 'http://', '' ),
- installed_convert_repository.installed_changeset_revision,
+ 'Compute',
+ 'an expression on every row',
+ '1.1.0',
'column_maker_0080',
- installed_column_repository.installed_changeset_revision,
- 'Installed repository dependencies' ]
+ 'Installed repository dependencies',
+ self.url.replace( 'http://', '' ),
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.installed_changeset_revision ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
def test_0040_deactivate_column_repository( self ):
'''Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- a/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -108,5 +108,5 @@
url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_column_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed )
- uninstalled_repositories = [ ( column_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( uninstalled_repositories=uninstalled_repositories, installed_repositories=[] )
+ strings_not_displayed = [ 'column_maker_1087' ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1090_install_tool_from_tool_search.py
--- a/test/tool_shed/functional/test_1090_install_tool_from_tool_search.py
+++ b/test/tool_shed/functional/test_1090_install_tool_from_tool_search.py
@@ -213,12 +213,10 @@
install_repository_dependencies=False,
new_tool_panel_section='freebayes_1090' )
installed_repositories = [ ( freebayes_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
- ( emboss_repository_name, common.test_user_1_name ),
- ( emboss_datatypes_repository_name, common.test_user_1_name ),
- ( bwa_color_repository_name, common.test_user_1_name ),
- ( bwa_base_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'freebayes_0090' ]
+ strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
def test_0050_install_deactivate_filtering_repository( self ):
'''Install and deactivate filtering.'''
global running_standalone
@@ -235,13 +233,15 @@
new_tool_panel_section='filtering_1090' )
installed_repositories = [ ( filtering_repository_name, common.test_user_1_name ),
( freebayes_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = [ ( emboss_repository_name, common.test_user_1_name ),
- ( emboss_datatypes_repository_name, common.test_user_1_name ),
- ( bwa_color_repository_name, common.test_user_1_name ),
- ( bwa_base_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'filtering_0090', 'freebayes_0090' ]
+ strings_not_displayed = [ 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
filtering_repository = test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
self.uninstall_repository( filtering_repository, remove_from_disk=False )
+ strings_displayed = [ 'freebayes_0090' ]
+ strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'emboss_datatypes_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
def test_0055_install_uninstall_datatypes_repository( self ):
'''Install and uninstall emboss_datatypes.'''
# After this test, the repositories should be in the following states:
@@ -259,9 +259,15 @@
( filtering_repository_name, common.test_user_1_name ),
( bwa_color_repository_name, common.test_user_1_name ),
( bwa_base_repository_name, common.test_user_1_name ) ]
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'emboss_datatypes_0090', 'freebayes_0090' ]
+ strings_not_displayed = [ 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ self.verify_installed_repositories( installed_repositories )
datatypes_repository = test_db_util.get_installed_repository_by_name_owner( emboss_datatypes_repository_name, common.test_user_1_name )
self.uninstall_repository( datatypes_repository, remove_from_disk=True )
+ strings_displayed = [ 'freebayes_0090' ]
+ strings_not_displayed = [ 'emboss_datatypes_0090', 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
def test_0060_search_for_bwa_tools( self ):
'''Search for and install the repositories with BWA tools, and verify that this reinstalls emboss_datatypes and reactivates filtering.'''
bwa_color_repository = test_db_util.get_repository_by_name_and_owner( bwa_color_repository_name, common.test_user_1_name )
@@ -292,5 +298,6 @@
( bwa_base_repository_name, common.test_user_1_name ),
( emboss_datatypes_repository_name, common.test_user_1_name ),
( freebayes_repository_name, common.test_user_1_name ) ]
- uninstalled_repositories = []
- self.verify_installed_uninstalled_repositories( installed_repositories=installed_repositories, uninstalled_repositories=uninstalled_repositories )
+ strings_displayed = [ 'emboss_datatypes_0090', 'filtering_0090', 'emboss_0090', 'bwa_color_0090', 'bwa_base_0090', 'freebayes_0090' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.verify_installed_repositories( installed_repositories )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- a/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -238,19 +238,24 @@
'''Verify that the installed repositories are displayed properly.'''
base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
tool_repository = test_db_util.get_installed_repository_by_name_owner( bwa_tool_repository_name, common.test_user_1_name )
- strings_displayed = [ base_repository.name, base_repository.owner, base_repository.installed_changeset_revision ]
- strings_displayed.extend( [ tool_repository.name, tool_repository.owner, tool_repository.installed_changeset_revision ] )
+ strings_displayed = [ 'bwa_base_repository_0100', 'user1', base_repository.installed_changeset_revision ]
+ strings_displayed.extend( [ 'bwa_tool_repository_0100', 'user1', tool_repository.installed_changeset_revision ] )
strings_displayed.append( self.url.replace( 'http://', '' ) )
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=[] )
- checks = [ ( tool_repository,
- [ 'bwa_tool_repository_0100', 'user1', tool_repository.installed_changeset_revision ],
- [ 'Missing tool dependencies' ] ),
- ( base_repository,
- [ 'bwa_base_repository_0100', 'user1', base_repository.installed_changeset_revision, 'bwa_tool_repository_0100',
- tool_repository.installed_changeset_revision ],
- [ 'Missing tool dependencies' ] ) ]
- for repository, strings_displayed, strings_not_displayed in checks:
- self.display_installed_repository_manage_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ strings_displayed = [ 'bwa_tool_repository_0100', 'user1', tool_repository.installed_changeset_revision ]
+ strings_not_displayed = [ 'Missing tool dependencies' ]
+ self.display_installed_repository_manage_page( tool_repository,
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed )
+ strings_displayed = [ 'bwa_base_repository_0100',
+ 'user1',
+ 'bwa_tool_repository_0100',
+ base_repository.installed_changeset_revision,
+ tool_repository.installed_changeset_revision ]
+ strings_not_displayed = [ 'Missing tool dependencies' ]
+ self.display_installed_repository_manage_page( base_repository,
+ strings_displayed=strings_displayed,
+ strings_not_displayed=strings_not_displayed )
def test_0055_verify_complex_tool_dependency( self ):
'''Verify that the generated env.sh contains the right data.'''
base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
--- a/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
+++ b/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
@@ -101,8 +101,8 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0025_deactivate_filtering_repository( self ):
'''Deactivate the filtering repository without removing it from disk.'''
@@ -122,6 +122,6 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -118,8 +118,8 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0025_deactivate_freebayes_repository( self ):
'''Deactivate the freebayes repository without removing it from disk.'''
@@ -137,6 +137,6 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
@@ -128,8 +128,8 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0025_deactivate_emboss_repository( self ):
'''Deactivate the emboss repository without removing it from disk.'''
@@ -148,6 +148,6 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r ec4da708e45175610a33d16fcfc68732e425f54f -r 8e93ca10b0e2523ba78a02bfb36e371db5f17e0a test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
@@ -188,8 +188,8 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0025_deactivate_emboss_repository( self ):
'''Deactivate the emboss repository without removing it from disk.'''
@@ -207,6 +207,6 @@
self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ strings_displayed.extend( [ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.verify_tool_metadata_for_installed_repository( installed_repository )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ec4da708e451/
changeset: ec4da708e451
user: greg
date: 2013-02-15 22:41:24
summary: Enhance tool shed repository metadata to include information about invalid tool dependencies, invalid repository dependencies and orphan tool dependenccies. Display invalid dependencies in separate containers, and display appropriate warning messages when browsing repositories that contain orphan dependencies.
affected #: 14 files
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -129,30 +129,32 @@
requirements_dict[ 'install_dir' ] = install_dir
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
-def add_tool_shed_orphan_settings_to_tool_dependencies( tool_dependencies, tools ):
- """Inspect all received tool dependencies and label those that are orphans within the repository in the tool shed."""
+def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ):
+ """Inspect all received tool dependencies and label those that are orphans within the repository."""
+ orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None )
new_tool_dependencies = {}
if tool_dependencies:
for td_key, requirements_dict in tool_dependencies.items():
if td_key in [ 'set_environment' ]:
- new_set_environment_dict_list = []
- for set_environment_dict in requirements_dict:
- type = 'set_environment'
- name = set_environment_dict.get( 'name', None )
- version = None
- is_orphan_in_tool_shed = tool_dependency_is_orphan_in_tool_shed( type, name, version, tools )
- set_environment_dict[ 'is_orphan_in_tool_shed' ] = is_orphan_in_tool_shed
- new_set_environment_dict_list.append( set_environment_dict )
+ # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
+ if orphan_env_dependencies:
+ new_set_environment_dict_list = []
+ for set_environment_dict in requirements_dict:
+ if set_environment_dict in orphan_env_dependencies:
+ set_environment_dict[ 'is_orphan' ] = True
+ else:
+ set_environment_dict[ 'is_orphan' ] = False
+ new_set_environment_dict_list.append( set_environment_dict )
new_tool_dependencies[ td_key ] = new_set_environment_dict_list
- new_tool_dependencies[ td_key ] = new_set_environment_dict_list
+ else:
+ new_tool_dependencies[ td_key ] = requirements_dict
else:
- type = requirements_dict.get( 'type', 'package' )
- name = requirements_dict.get( 'name', None )
- version = requirements_dict.get( 'version', None )
- if type and name:
- is_orphan_in_tool_shed = tool_dependency_is_orphan_in_tool_shed( type, name, version, tools )
- requirements_dict[ 'is_orphan_in_tool_shed' ] = is_orphan_in_tool_shed
- new_tool_dependencies[ td_key ] = requirements_dict
+ # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
+ if td_key in orphan_tool_dependencies:
+ requirements_dict[ 'is_orphan' ] = True
+ else:
+ requirements_dict[ 'is_orphan' ] = False
+ new_tool_dependencies[ td_key ] = requirements_dict
return new_tool_dependencies
def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
# Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
@@ -318,6 +320,24 @@
datatypes = metadata[ 'datatypes' ]
folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
+ # Invalid repository dependencies container.
+ if metadata:
+ if 'invalid_repository_dependencies' in metadata:
+ invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ]
+ folder_id, invalid_repository_dependencies_root_folder = \
+ container_util.build_invalid_repository_dependencies_root_folder( trans,
+ folder_id,
+ invalid_repository_dependencies )
+ containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder
+ # Invalid tool dependencies container.
+ if metadata:
+ if 'invalid_tool_dependencies' in metadata:
+ invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ]
+ folder_id, invalid_tool_dependencies_root_folder = \
+ container_util.build_invalid_tool_dependencies_root_folder( trans,
+ folder_id,
+ invalid_tool_dependencies )
+ containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder
# Invalid tools container.
if metadata:
if 'invalid_tools' in metadata:
@@ -348,8 +368,9 @@
if 'tool_dependencies' in metadata:
tool_dependencies = metadata[ 'tool_dependencies' ]
if trans.webapp.name == 'community':
- tools = metadata.get( 'tools', None )
- tool_dependencies = add_tool_shed_orphan_settings_to_tool_dependencies( tool_dependencies, tools )
+ if 'orphan_tool_dependencies' in metadata:
+ orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
+ tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies )
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
@@ -1123,20 +1144,28 @@
if datatypes:
metadata_dict[ 'datatypes' ] = datatypes
return metadata_dict
-def generate_environment_dependency_metadata( elem, tool_dependencies_dict ):
- """The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set."""
+def generate_environment_dependency_metadata( elem, valid_tool_dependencies_dict ):
+ """
+ The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set, or the tool dependency
+ will be considered an orphan. Tool dependencies of type set_environment are always defined as valid, but may be orphans.
+ """
+ # The value of the received elem looks something like this:
+ # <set_environment version="1.0">
+ # <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
+ # </set_environment>
requirements_dict = {}
for env_elem in elem:
+ # <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
env_name = env_elem.get( 'name', None )
if env_name:
requirements_dict[ 'name' ] = env_name
requirements_dict[ 'type' ] = 'set_environment'
if requirements_dict:
- if 'set_environment' in tool_dependencies_dict:
- tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
+ if 'set_environment' in valid_tool_dependencies_dict:
+ valid_tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
else:
- tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
- return tool_dependencies_dict
+ valid_tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
+ return valid_tool_dependencies_dict
def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
if as_html:
new_line = '<br/>'
@@ -1173,6 +1202,43 @@
correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
return message
+def generate_message_for_orphan_tool_dependencies( metadata_dict ):
+ """
+ The introduction of the support for orphan tool dependency definitions in tool shed repositories has resulted in the inability
+ to define an improperly configured tool dependency definition / tool config requirements tag combination as an invalid tool
+ dependency. This is certainly a weakness which cannot be correctly handled since now the only way to categorize a tool dependency
+ as invalid is if it consists of a complex repository dependency that is invalid. Any tool dependency definition other than those
+ is considered valid but perhaps an orphan due to it's actual invalidity.
+ """
+ message = ''
+ status = 'done'
+ if metadata_dict:
+ orphan_tool_dependencies = metadata_dict.get( 'orphan_tool_dependencies', None )
+ if orphan_tool_dependencies:
+ if 'tools' not in metadata_dict and 'invalid_tools' not in metadata_dict:
+ message += "This repository contains no tools, so these tool dependencies are considered orphans within this repository.<br/>"
+ for td_key, requirements_dict in orphan_tool_dependencies.items():
+ if td_key == 'set_environment':
+ # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}]
+ message += "The settings for <b>name</b> and <b>type</b> from a contained tool configuration file's <b>requirement</b> tag "
+ message += "does not match the information for the following tool dependency definitions in the <b>tool_dependencies.xml</b> "
+ message += "file, so these tool dependencies are considered orphans within this repository.<br/>"
+ for env_requirements_dict in requirements_dict:
+ name = env_requirements_dict[ 'name' ]
+ type = env_requirements_dict[ 'type' ]
+ message += "<b>* name:</b> %s, <b>type:</b> %s<br/>" % ( str( name ), str( type ) )
+ else:
+ # "R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"}
+ message += "The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration file's "
+ message += "<b>requirement</b> tag does not match the information for the following tool dependency definitions in the "
+ message += "<b>tool_dependencies.xml</b> file, so these tool dependencies are considered orphans within this repository.<br/>"
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version' ]
+ message += "<b>* name:</b> %s, <b>type:</b> %s, <b>version:</b> %s<br/>" % ( str( name ), str( type ), str( version ) )
+ message += "<br/>"
+ status = 'warning'
+ return message, status
def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url, shed_config_dict=None, relative_install_dir=None,
repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False,
persist=False ):
@@ -1255,18 +1321,9 @@
# See if we have a repository dependencies defined.
if name == 'repository_dependencies.xml':
path_to_repository_dependencies_config = os.path.join( root, name )
- if app.name == 'community':
- metadata_dict, error_message = generate_repository_dependency_metadata_for_tool_shed( app,
- path_to_repository_dependencies_config,
- metadata_dict )
- if error_message:
- invalid_file_tups.append( ( name, error_message ) )
- elif app.name == 'galaxy':
- metadata_dict, error_message = generate_repository_dependency_metadata_for_installed_repository( app,
- path_to_repository_dependencies_config,
- metadata_dict )
- if error_message:
- invalid_file_tups.append( ( name, error_message ) )
+ metadata_dict, error_message = generate_repository_dependency_metadata( app, path_to_repository_dependencies_config, metadata_dict )
+ if error_message:
+ invalid_file_tups.append( ( name, error_message ) )
# See if we have one or more READ_ME files.
elif name.lower() in readme_file_names:
relative_path_to_readme = get_relative_path_to_repository_file( root,
@@ -1350,11 +1407,12 @@
app.config.tool_data_path = original_tool_data_path
app.config.tool_data_table_config_path = original_tool_data_table_config_path
return metadata_dict, invalid_file_tups
-def generate_package_dependency_metadata( app, elem, tool_dependencies_dict ):
+def generate_package_dependency_metadata( app, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ):
"""
Generate the metadata for a tool dependencies package defined for a repository. The value of package_name must match the value of the "package"
type in the tool config's <requirements> tag set. This method is called from both Galaxy and the tool shed.
"""
+ repository_dependency_is_valid = True
repository_dependency_tup = []
requirements_dict = {}
error_message = ''
@@ -1368,75 +1426,55 @@
if sub_elem.tag == 'readme':
requirements_dict[ 'readme' ] = sub_elem.text
elif sub_elem.tag == 'repository':
- # We have a complex repository dependency.
- current_rd_tups, error_message = handle_repository_elem( app=app,
- repository_elem=sub_elem,
- repository_dependencies_tups=None )
- if current_rd_tups:
- repository_dependency_tup = current_rd_tups[ 0 ]
+ # We have a complex repository dependency. If the retruned value of repository_dependency_is_valid is True, the tool
+ # dependency definition will be set as invalid. This is currently the only case where a tool dependency definition is
+ # considered invalid.
+ repository_dependencies_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app=app,
+ repository_elem=sub_elem )
if requirements_dict:
dependency_key = '%s/%s' % ( package_name, package_version )
- tool_dependencies_dict[ dependency_key ] = requirements_dict
- return tool_dependencies_dict, repository_dependency_tup, error_message
-def generate_repository_dependency_metadata_for_installed_repository( app, repository_dependencies_config, metadata_dict ):
+ if repository_dependency_is_valid:
+ valid_tool_dependencies_dict[ dependency_key ] = requirements_dict
+ else:
+ # Append the error message to the requirements_dict.
+ requirements_dict[ 'error' ] = error_message
+ invalid_tool_dependencies_dict[ dependency_key ] = requirements_dict
+ return valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, repository_dependency_is_valid, error_message
+def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict ):
"""
Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method
- is called only from Galaxy.
+ is called from the tool shed as well as from Galaxy.
"""
- repository_dependencies_tups = []
error_message = ''
try:
# Make sure we're looking at a valid repository_dependencies.xml file.
tree = util.parse_xml( repository_dependencies_config )
root = tree.getroot()
- is_valid = root.tag == 'repositories'
+ xml_is_valid = root.tag == 'repositories'
except Exception, e:
error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) )
log.debug( error_message )
- is_valid = False
- if is_valid:
- sa_session = app.model.context.current
+ xml_is_valid = False
+ if xml_is_valid:
+ invalid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
+ invalid_repository_dependencies_tups = []
+ valid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
+ valid_repository_dependencies_tups = []
for repository_elem in root.findall( 'repository' ):
- toolshed = repository_elem.attrib[ 'toolshed' ]
- name = repository_elem.attrib[ 'name' ]
- owner = repository_elem.attrib[ 'owner']
- changeset_revision = repository_elem.attrib[ 'changeset_revision' ]
- repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
- if repository_dependencies_tup not in repository_dependencies_tups:
- repository_dependencies_tups.append( repository_dependencies_tup )
- if repository_dependencies_tups:
- repository_dependencies_dict = dict( description=root.get( 'description' ),
- repository_dependencies=repository_dependencies_tups )
- metadata_dict[ 'repository_dependencies' ] = repository_dependencies_dict
- return metadata_dict, error_message
-def generate_repository_dependency_metadata_for_tool_shed( app, repository_dependencies_config, metadata_dict ):
- """
- Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method
- is called only from the tool shed.
- """
- repository_dependencies_tups = []
- error_message = ''
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = util.parse_xml( repository_dependencies_config )
- root = tree.getroot()
- is_valid = root.tag == 'repositories'
- except Exception, e:
- error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) )
- log.debug( error_message )
- is_valid = False
- if is_valid:
- for repository_elem in root.findall( 'repository' ):
- current_rd_tups, error_message = handle_repository_elem( app, repository_elem, repository_dependencies_tups )
- if error_message:
- # Log the problem, but generate metadata for the invalid repository dependencies.
- log.debug( error_message )
- for crdt in current_rd_tups:
- repository_dependencies_tups.append( crdt )
- if repository_dependencies_tups:
- repository_dependencies_dict = dict( description=root.get( 'description' ),
- repository_dependencies=repository_dependencies_tups )
- metadata_dict[ 'repository_dependencies' ] = repository_dependencies_dict
+ repository_dependencies_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app, repository_elem )
+ if repository_dependency_is_valid:
+ valid_repository_dependencies_tups.append( repository_dependencies_tup )
+ else:
+ # Append the error_message to the repository dependencies tuple.
+ toolshed, name, owner, changeset_revision = repository_dependencies_tup
+ repository_dependencies_tup = ( toolshed, name, owner, changeset_revision, error_message )
+ invalid_repository_dependencies_tups.append( repository_dependencies_tup )
+ if invalid_repository_dependencies_tups:
+ invalid_repository_dependencies_dict[ 'repository_dependencies' ] = invalid_repository_dependencies_tups
+ metadata_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_dict
+ if valid_repository_dependencies_tups:
+ valid_repository_dependencies_dict[ 'repository_dependencies' ] = valid_repository_dependencies_tups
+ metadata_dict[ 'repository_dependencies' ] = valid_repository_dependencies_dict
return metadata_dict, error_message
def generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None ):
"""Generate the URL for sharing a repository that is in the tool shed."""
@@ -1452,12 +1490,27 @@
If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository,
then update the received metadata_dict with information from the parsed tool_dependencies_config.
"""
+ """
+ "{"orphan_tool_dependencies":
+ {"bwa/0.5.9":
+ {"name": "bwa",
+ "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
+ "type": "package", "version": "0.5.9"}},
+ "tool_dependencies":
+ {"bwa/0.5.9":
+ {"name": "bwa",
+ "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
+ "type": "package",
+ "version": "0.5.9"}}}"
+ """
error_message = ''
if original_repository_metadata:
# Keep a copy of the original tool dependencies dictionary and the list of tool dictionaries in the metadata.
- original_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None )
+ original_valid_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None )
+ original_invalid_tool_dependencies_dict = original_repository_metadata.get( 'invalid_tool_dependencies', None )
else:
- original_tool_dependencies_dict = None
+ original_valid_tool_dependencies_dict = None
+ original_invalid_tool_dependencies_dict = None
try:
tree = ElementTree.parse( tool_dependencies_config )
except Exception, e:
@@ -1466,81 +1519,53 @@
return metadata_dict, error_message
root = tree.getroot()
ElementInclude.include( root )
- tool_dependencies_dict = {}
- repository_dependency_tups = []
+ tool_dependency_is_valid = True
+ valid_tool_dependencies_dict = {}
+ invalid_tool_dependencies_dict = {}
+ valid_repository_dependency_tups = []
+ invalid_repository_dependency_tups = []
+ description = root.get( 'description' )
for elem in root:
if elem.tag == 'package':
- tool_dependencies_dict, repository_dependency_tup, message = generate_package_dependency_metadata( app, elem, tool_dependencies_dict )
- if repository_dependency_tup and repository_dependency_tup not in repository_dependency_tups:
- repository_dependency_tups.append( repository_dependency_tup )
- if message:
- log.debug( message )
- error_message = '%s %s' % ( error_message, message )
+ valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, repository_dependency_is_valid, message = \
+ generate_package_dependency_metadata( app, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict )
+ if repository_dependency_is_valid:
+ if repository_dependency_tup and repository_dependency_tup not in valid_repository_dependency_tups:
+ # We have a valid complex repository dependency.
+ valid_repository_dependency_tups.append( repository_dependency_tup )
+ else:
+ if repository_dependency_tup and repository_dependency_tup not in invalid_repository_dependency_tups:
+ # We have an invalid complex repository dependency, so mark the tool dependency as invalid.
+ tool_dependency_is_valid = False
+ # Append the error message to the invalid repository dependency tuple.
+ repository_dependency_tup.append( message )
+ invalid_repository_dependency_tups.append( repository_dependency_tup )
+ error_message = '%s %s' % ( error_message, message )
elif elem.tag == 'set_environment':
- tool_dependencies_dict = generate_environment_dependency_metadata( elem, tool_dependencies_dict )
- if tool_dependencies_dict:
- if original_tool_dependencies_dict:
+ # Tool dependencies of this type are always considered valid, but may be orphans.
+ valid_tool_dependencies_dict = generate_environment_dependency_metadata( elem, valid_tool_dependencies_dict )
+ if valid_tool_dependencies_dict:
+ if original_valid_tool_dependencies_dict:
# We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to
# tool dependencies appropriately.
- handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict )
- metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
- if repository_dependency_tups:
- repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', None )
- for repository_dependency_tup in repository_dependency_tups:
- rd_tool_shed, rd_name, rd_owner, rd_changeset_revision = repository_dependency_tup
- if app.name == 'community':
- if tool_shed_is_this_tool_shed( rd_tool_shed ):
- # Make sure the repository name id valid.
- valid_named_repository = get_repository_by_name( app, rd_name )
- if valid_named_repository:
- # See if the owner is valid.
- valid_owned_repository = get_repository_by_name_and_owner( app, rd_name, rd_owner )
- if valid_owned_repository:
- # See if the defined changeset revision is valid.
- if not changeset_is_valid( app, valid_owned_repository, rd_changeset_revision ):
- err_msg = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( rd_tool_shed, rd_name, rd_owner, rd_changeset_revision )
- err_msg += "because the changeset revision is invalid. "
- log.debug( err_msg )
- error_message += err_msg
- else:
- err_msg = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( rd_tool_shed, rd_name, rd_owner, rd_changeset_revision )
- err_msg += "because the owner is invalid. "
- log.debug( err_msg )
- error_message += err_msg
- else:
- err_msg = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
- ( rd_tool_shed, rd_name, rd_owner, rd_changeset_revision )
- err_msg += "because the name is invalid. "
- log.debug( err_msg )
- error_message += err_msg
- else:
- err_msg = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
- err_msg += "for tool shed %s, name %s, owner %s, changeset revision %s. " % ( rd_tool_shed, rd_name, rd_owner, rd_changeset_revision )
- log.debug( err_msg )
- error_message += err_msg
- else:
- repository_owner = repository.owner
- rd_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=rd_tool_shed,
- repository_name=rd_name,
- repository_owner=rd_owner,
- changeset_revision=rd_changeset_revision )
- if repository_dependencies_dict:
- if rd_key in repository_dependencies_dict:
- repository_dependencies = repository_dependencies_dict[ rd_key ]
- for repository_dependency_tup in repository_dependency_tups:
- if repository_dependency_tup not in repository_dependencies:
- repository_dependencies.append( repository_dependency_tup )
- repository_dependencies_dict[ rd_key ] = repository_dependencies
- else:
- repository_dependencies_dict[ rd_key ] = repository_dependency_tups
- else:
- repository_dependencies_dict = dict( root_key=rd_key,
- description=root.get( 'description' ),
- repository_dependencies=repository_dependency_tups )
- if repository_dependencies_dict:
- metadata_dict[ 'repository_dependencies' ] = repository_dependencies_dict
+ handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_valid_tool_dependencies_dict, valid_tool_dependencies_dict )
+ metadata_dict[ 'tool_dependencies' ] = valid_tool_dependencies_dict
+ if invalid_tool_dependencies_dict:
+ metadata_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_dict
+ if valid_repository_dependency_tups:
+ metadata_dict = update_repository_dependencies_metadata( metadata=metadata_dict,
+ repository_dependency_tups=valid_repository_dependency_tups,
+ is_valid=True,
+ description=description )
+ if invalid_repository_dependency_tups:
+ metadata_dict = update_repository_dependencies_metadata( metadata=metadata_dict,
+ repository_dependency_tups=invalid_repository_dependency_tups,
+ is_valid=False,
+ description=description )
+ # Determine and store orphan tool dependencies.
+ orphan_tool_dependencies = get_orphan_tool_dependencies( metadata_dict )
+ if orphan_tool_dependencies:
+ metadata_dict[ 'orphan_tool_dependencies' ] = orphan_tool_dependencies
return metadata_dict, error_message
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
@@ -1926,6 +1951,33 @@
sorted_changeset_tups = sorted( changeset_tups )
sorted_changeset_revisions = [ changeset_tup[ 1 ] for changeset_tup in sorted_changeset_tups ]
return sorted_changeset_revisions
+def get_orphan_tool_dependencies( metadata ):
+ """Inspect tool dependencies included in the received metadata and determine if any of them are orphans within the repository."""
+ orphan_tool_dependencies_dict = {}
+ if metadata:
+ tools = metadata.get( 'tools', None )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ if tool_dependencies:
+ for td_key, requirements_dict in tool_dependencies.items():
+ if td_key in [ 'set_environment' ]:
+ for set_environment_dict in requirements_dict:
+ type = 'set_environment'
+ name = set_environment_dict.get( 'name', None )
+ version = None
+ if name:
+ if tool_dependency_is_orphan( type, name, version, tools ):
+ if td_key in orphan_tool_dependencies_dict:
+ orphan_tool_dependencies_dict[ td_key ].append( set_environment_dict )
+ else:
+ orphan_tool_dependencies_dict[ td_key ] = [ set_environment_dict ]
+ else:
+ type = requirements_dict.get( 'type', None )
+ name = requirements_dict.get( 'name', None )
+ version = requirements_dict.get( 'version', None )
+ if type and name:
+ if tool_dependency_is_orphan( type, name, version, tools ):
+ orphan_tool_dependencies_dict[ td_key ] = requirements_dict
+ return orphan_tool_dependencies_dict
def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
parent_id = None
# Compare from most recent to oldest.
@@ -2409,6 +2461,14 @@
( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) )
log.debug( message )
return updated_key_rd_dicts
+def get_updated_changeset_revisions_from_tool_shed( tool_shed_url, name, owner, changeset_revision ):
+ """Get all appropriate newer changeset revisions for the repository defined by the received tool_shed_url / name / owner combination."""
+ url = url_join( tool_shed_url,
+ 'repository/updated_changeset_revisions?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ return text
def get_url_from_repository_tool_shed( app, repository ):
"""
The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
@@ -2635,63 +2695,78 @@
all_repository_dependencies=all_repository_dependencies,
handled_key_rd_dicts=handled_key_rd_dicts,
circular_repository_dependencies=circular_repository_dependencies )
-def handle_repository_elem( app, repository_elem, repository_dependencies_tups ):
+def handle_repository_elem( app, repository_elem ):
"""
Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml file or a tool_dependencies.xml file.
If the former, we're generating repository dependencies metadata for a repository in the tool shed. If the latter, we're generating package
- dependency metadata with in Galaxy or the tool shed.
+ dependency metadata within Galaxy or the tool shed.
"""
- if repository_dependencies_tups is None:
- new_rd_tups = []
- else:
- new_rd_tups = [ rdt for rdt in repository_dependencies_tups ]
+ sa_session = app.model.context.current
+ is_valid = True
error_message = ''
- sa_session = app.model.context.current
toolshed = repository_elem.attrib[ 'toolshed' ]
name = repository_elem.attrib[ 'name' ]
owner = repository_elem.attrib[ 'owner' ]
changeset_revision = repository_elem.attrib[ 'changeset_revision' ]
+ repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
user = None
repository = None
if app.name == 'galaxy':
- # We're in Galaxy.
- try:
- repository = sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner ) ) \
- .first()
- except:
- error_message = "Invalid name <b>%s</b> or owner <b>%s</b> defined for repository. Repository dependencies will be ignored." % ( name, owner )
+ # We're in Galaxy. We reach here when we're generating the metadata for a tool dependencies package defined for a repository or when we're
+ # generating metadata for an installed repository..
+ #try:
+ # See if we can locate the installed repository via the changeset_revision defined in the repository_elem (it may be outdated). If we're
+ # successful in locating an installed repository with the attributes defined in the repository_elem, we know it is valid.
+ repository = get_repository_for_dependency_relationship( app, toolshed, name, owner, changeset_revision )
+ if repository:
+ return repository_dependencies_tup, is_valid, error_message
+ else:
+ # Send a request to the tool shed to retrieve appropriate additional changeset revisions with which the repository may have been installed.
+ try:
+ # Hopefully the tool shed is accessible.
+ text = get_updated_changeset_revisions_from_tool_shed( toolshed, name, owner, changeset_revision )
+ except:
+ text = None
+ if text:
+ updated_changeset_revisions = util.listify( text )
+ for updated_changeset_revision in updated_changeset_revisions:
+ repository = get_repository_for_dependency_relationship( app, toolshed, name, owner, updated_changeset_revision )
+ if repository:
+ return repository_dependencies_tup, is_valid, error_message
+ # We'll currently default to setting the repository dependency definition as invalid if an installed repository cannot be found.
+ # This may not be ideal because the tool shed may have simply been inaccessible when metadata was being generated for the installed
+ # tool shed repository.
+ error_message = "Ignoring invalid repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
+ ( toolshed, name, owner, changeset_revision )
log.debug( error_message )
- return new_rd_tups, error_message
- repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
- if repository_dependencies_tup not in new_rd_tups:
- new_rd_tups.append( repository_dependencies_tup )
+ is_valid = False
+ return repository_dependencies_tup, is_valid, error_message
else:
# We're in the tool shed.
if tool_shed_is_this_tool_shed( toolshed ):
- # Append the repository dependency definition regardless of whether it's valid or not, as Galaxy needs this to
- # properly display an error when the repository dependency is invalid at the time of installation.
- repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
- if repository_dependencies_tup not in new_rd_tups:
- new_rd_tups.append( repository_dependencies_tup )
try:
user = sa_session.query( app.model.User ) \
.filter( app.model.User.table.c.username == owner ) \
.one()
- except Exception, e:
- error_message = "Invalid owner <b>%s</b> defined for repository <b>%s</b>. Repository dependencies will be ignored." % ( str( owner ), str( name ) )
+ except Exception, e:
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
+ ( toolshed, name, owner, changeset_revision )
+ error_message += "because the owner is invalid. "
log.debug( error_message )
- return new_rd_tups, error_message
+ is_valid = False
+ return repository_dependencies_tup, is_valid, error_message
try:
repository = sa_session.query( app.model.Repository ) \
.filter( and_( app.model.Repository.table.c.name == name,
app.model.Repository.table.c.user_id == user.id ) ) \
.one()
except:
- error_message = "Invalid repository name <b>%s</b> defined. Repository dependencies will be ignored." % str( name )
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
+ ( toolshed, name, owner, changeset_revision )
+ error_message += "because the name is invalid. "
log.debug( error_message )
- return new_rd_tups, error_message
+ is_valid = False
+ return repository_dependencies_tup, is_valid, error_message
# Find the specified changeset revision in the repository's changelog to see if it's valid.
found = False
repo = hg.repository( get_configured_ui(), repository.repo_path( app ) )
@@ -2701,15 +2776,20 @@
found = True
break
if not found:
- error_message = "Invalid changeset revision <b>%s</b> defined. Repository dependencies will be ignored." % str( changeset_revision )
+ error_message = "Ignoring repository dependency definition for tool shed %s, name %s, owner %s, changeset revision %s "% \
+ ( toolshed, name, owner, changeset_revision )
+ error_message += "because the changeset revision is invalid. "
log.debug( error_message )
- return new_rd_tups, error_message
+ is_valid = False
+ return repository_dependencies_tup, is_valid, error_message
else:
- # Repository dependencies are currentlhy supported within a single tool shed.
- error_message = "Invalid tool shed <b>%s</b> defined for repository <b>%s</b>. " % ( toolshed, name )
- error_message += "Repository dependencies are currently supported within a single tool shed."
+ # Repository dependencies are currently supported within a single tool shed.
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
+ error_message += "for tool shed %s, name %s, owner %s, changeset revision %s. " % ( toolshed, name, owner, changeset_revision )
log.debug( error_message )
- return new_rd_tups, error_message
+ is_valid = False
+ return repository_dependencies_tup, is_valid, error_message
+ return repository_dependencies_tup, is_valid, error_message
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
@@ -2761,31 +2841,6 @@
message = str( e )
error = True
return error, message
-def has_orphan_tool_dependencies_in_tool_shed( metadata ):
- """Inspect tool dependencies included in the received metadata and determine if any of them are orphans within the repository in the tool shed."""
- if metadata:
- tools = metadata.get( 'tools', None )
- tool_dependencies = metadata.get( 'tool_dependencies', None )
- if tool_dependencies:
- for td_key, requirements_dict in tool_dependencies.items():
- if td_key in [ 'set_environment' ]:
- for set_environment_dict in requirements_dict:
- type = 'set_environment'
- name = set_environment_dict.get( 'name', None )
- version = None
- if name:
- is_orphan_in_tool_shed = tool_dependency_is_orphan_in_tool_shed( type, name, version, tools )
- if is_orphan_in_tool_shed:
- return True
- else:
- type = requirements_dict.get( 'type', None )
- name = requirements_dict.get( 'name', None )
- version = requirements_dict.get( 'version', None )
- if type and name:
- is_orphan_in_tool_shed = tool_dependency_is_orphan_in_tool_shed( type, name, version, tools )
- if is_orphan_in_tool_shed:
- return True
- return False
def has_previous_repository_reviews( trans, repository, changeset_revision ):
"""Determine if a repository has a changeset revision review prior to the received changeset revision."""
repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) )
@@ -2839,6 +2894,8 @@
return True
return False
def is_downloadable( metadata_dict ):
+ # NOTE: although repository README files are considered Galaxy utilities, they have no effect on determining if a revision is instakllable.
+ # See the comments in the compare_readme_files() method.
if 'datatypes' in metadata_dict:
# We have proprietary datatypes.
return True
@@ -3640,6 +3697,7 @@
"""Set metadata on the repository tip in the tool shed - this method is not called from Galaxy."""
error_message, status = set_repository_metadata( trans, repository, content_alert_str=content_alert_str, **kwd )
if error_message:
+ # FIXME: This probably should not redirect since this method is called from the upload controller as well as the repository controller.
# If there is an error, display it.
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
@@ -3654,7 +3712,7 @@
except:
file_name = fpath
return file_name
-def tool_dependency_is_orphan_in_tool_shed( type, name, version, tools ):
+def tool_dependency_is_orphan( type, name, version, tools ):
"""
Determine if the combination of the received type, name and version is defined in the <requirement> tag for at least one tool in the received list of tools.
If not, the tool dependency defined by the combination is considered an orphan in it's repository in the tool shed.
@@ -3841,6 +3899,39 @@
# It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
# purging is not supported by the mercurial API.
commands.update( get_configured_ui(), repo, rev=ctx_rev )
+def update_repository_dependencies_metadata( metadata, repository_dependency_tups, is_valid, description ):
+ if is_valid:
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', None )
+ else:
+ repository_dependencies_dict = metadata.get( 'invalid_repository_dependencies', None )
+ for repository_dependency_tup in repository_dependency_tups:
+ if is_valid:
+ tool_shed, name, owner, changeset_revision = repository_dependency_tup
+ else:
+ tool_shed, name, owner, changeset_revision, error_message = repository_dependency_tup
+ rd_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=tool_shed,
+ repository_name=name,
+ repository_owner=owner,
+ changeset_revision=changeset_revision )
+ if repository_dependencies_dict:
+ if rd_key in repository_dependencies_dict:
+ repository_dependencies = repository_dependencies_dict[ rd_key ]
+ for repository_dependency_tup in repository_dependency_tups:
+ if repository_dependency_tup not in repository_dependencies:
+ repository_dependencies.append( repository_dependency_tup )
+ repository_dependencies_dict[ rd_key ] = repository_dependencies
+ else:
+ repository_dependencies_dict[ rd_key ] = repository_dependency_tups
+ else:
+ repository_dependencies_dict = dict( root_key=rd_key,
+ description=description,
+ repository_dependencies=repository_dependency_tups )
+ if repository_dependencies_dict:
+ if is_valid:
+ metadata[ 'repository_dependencies' ] = repository_dependencies_dict
+ else:
+ metadata[ 'invalid_repository_dependencies' ] = repository_dependencies_dict
+ return metadata
def url_join( *args ):
parts = []
for arg in args:
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -2001,11 +2001,9 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None )
if metadata:
- if 'repository_dependencies' in metadata and not repository_dependencies:
- message += 'The repository dependency definitions for this repository are invalid and will be ignored. Make sure valid <b>toolshed</b>, '
- message += '<b>name</b>, <b>owner</b> and <b>changeset_revision</b> values are defined in the contained <b>repository_dependencies.xml</b> '
- message += 'file to correct this problem.'
- status = 'error'
+ if 'orphan_tool_dependencies' in metadata:
+ orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ message += orphan_message
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -2221,7 +2219,7 @@
message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
- message = "All repository metadata has been reset."
+ message = "All repository metadata has been reset. "
status = 'done'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
@@ -2755,9 +2753,9 @@
all_repository_dependencies=None,
handled_key_rd_dicts=None )
if metadata:
- if 'repository_dependencies' in metadata and not repository_dependencies:
- message += 'The repository dependency definitions for this repository are invalid and will be ignored.'
- status = 'error'
+ if 'orphan_tool_dependencies' in metadata:
+ orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata )
+ message += orphan_message
else:
repository_metadata_id = None
metadata = None
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -156,7 +156,7 @@
else:
source_type = "file"
source = uploaded_file_filename
- message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str )
+ message = "The %s <b>%s</b> has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str )
if istar and ( undesirable_dirs_removed or undesirable_files_removed ):
items_removed = undesirable_dirs_removed + undesirable_files_removed
message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed
@@ -169,16 +169,32 @@
suc.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
# Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies weren't loaded due to a requirement tag mismatch
# or some other problem.
- if suc.get_config_from_disk( 'tool_dependencies.xml', repo_dir ):
- if repository.metadata_revisions:
- # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
- metadata_dict = repository.metadata_revisions[0].metadata
- else:
- metadata_dict = {}
- if suc.has_orphan_tool_dependencies_in_tool_shed( metadata_dict ):
- message += 'Name, version and type from a tool requirement tag does not match the information in the "tool_dependencies.xml file", '
- message += 'so one or more of the defined tool dependencies are considered orphans within this repository.'
- status = 'warning'
+ if repository.metadata_revisions:
+ # A repository's metadata revisions are order descending by update_time, so the zeroth revision will be the tip just after an upload.
+ metadata_dict = repository.metadata_revisions[0].metadata
+ else:
+ metadata_dict = {}
+ # Handle messaging for orphan tool dependencies.
+ orphan_message, status = suc.generate_message_for_orphan_tool_dependencies( metadata_dict )
+ if orphan_message:
+ message += orphan_message
+ # Display message for invalid tool sependencies.
+ invalid_tool_dependencies = metadata_dict.get( 'invalid_tool_dependencies', None )
+ if invalid_tool_dependencies:
+ for td_key, requirement_dict in invalid_tool_dependencies.items():
+ error = requirement_dict.get( 'error', None )
+ if error:
+ message = "%s %s" % ( message, str( error ) )
+ status = 'error'
+ # Display message for invalid repository dependencies.
+ invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
+ if invalid_repository_dependencies_dict:
+ invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'invalid_repository_dependencies' ]
+ for repository_dependency_tup in invalid_repository_dependencies:
+ toolshed, name, owner, changeset_revision, error = repository_dependency_tup
+ if error:
+ message += "%s %s" % ( message, str( error ) )
+ status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
suc.reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -15,6 +15,8 @@
self.description = None
self.datatypes = []
self.folders = []
+ self.invalid_repository_dependencies = []
+ self.invalid_tool_dependencies = []
self.invalid_tools = []
self.valid_tools = []
self.tool_dependencies = []
@@ -54,6 +56,16 @@
self.mimetype = mimetype
self.subclass = subclass
+class InvalidRepositoryDependency( object ):
+ """Invalid repository dependency definition object"""
+ def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, error=None ):
+ self.id = id
+ self.toolshed = toolshed
+ self.repository_name = repository_name
+ self.repository_owner = repository_owner
+ self.changeset_revision = changeset_revision
+ self.error = error
+
class InvalidTool( object ):
"""Invalid tool object"""
def __init__( self, id=None, tool_config=None, repository_id=None, changeset_revision=None, repository_installation_status=None ):
@@ -63,6 +75,15 @@
self.changeset_revision = changeset_revision
self.repository_installation_status = repository_installation_status
+class InvalidToolDependency( object ):
+ """Invalid tool dependency definition object"""
+ def __init__( self, id=None, name=None, version=None, type=None, error=None ):
+ self.id = id
+ self.name = name
+ self.version = version
+ self.type = type
+ self.error = error
+
class ReadMe( object ):
"""Readme text object"""
def __init__( self, id=None, name=None, text=None ):
@@ -158,6 +179,79 @@
else:
datatypes_root_folder = None
return folder_id, datatypes_root_folder
+def build_invalid_repository_dependencies_root_folder( trans, folder_id, invalid_repository_dependencies_dict ):
+ """Return a folder hierarchy containing invalid repository dependencies."""
+ label = 'Invalid repository dependencies'
+ if invalid_repository_dependencies_dict:
+ invalid_repository_dependency_id = 0
+ folder_id += 1
+ invalid_repository_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ invalid_repository_dependencies_folder = Folder( id=folder_id,
+ key='invalid_repository_dependencies',
+ label=label,
+ parent=invalid_repository_dependencies_root_folder )
+ invalid_repository_dependencies_root_folder.folders.append( invalid_repository_dependencies_folder )
+ invalid_repository_dependencies = invalid_repository_dependencies_dict[ 'repository_dependencies' ]
+ for invalid_repository_dependency in invalid_repository_dependencies:
+ folder_id += 1
+ invalid_repository_dependency_id += 1
+ toolshed, name, owner, changeset_revision, error = invalid_repository_dependency
+ key = generate_repository_dependencies_key_for_repository( toolshed, name, owner, changeset_revision )
+ label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( name, changeset_revision, owner )
+ folder = Folder( id=folder_id,
+ key=key,
+ label=label,
+ parent=invalid_repository_dependencies_folder )
+ ird = InvalidRepositoryDependency( id=invalid_repository_dependency_id,
+ toolshed=toolshed,
+ repository_name=name,
+ repository_owner=owner,
+ changeset_revision=changeset_revision,
+ error=error )
+ folder.invalid_repository_dependencies.append( ird )
+ invalid_repository_dependencies_folder.folders.append( folder )
+ else:
+ invalid_repository_dependencies_root_folder = None
+ return folder_id, invalid_repository_dependencies_root_folder
+def build_invalid_tool_dependencies_root_folder( trans, folder_id, invalid_tool_dependencies_dict ):
+ """Return a folder hierarchy containing invalid tool dependencies."""
+ # # INvalid tool dependencies are always packages like:
+ # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1" "error" : "some sting" }
+ label = 'Invalid tool dependencies'
+ if invalid_tool_dependencies_dict:
+ invalid_tool_dependency_id = 0
+ folder_id += 1
+ invalid_tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
+ folder_id += 1
+ invalid_tool_dependencies_folder = Folder( id=folder_id,
+ key='invalid_tool_dependencies',
+ label=label,
+ parent=invalid_tool_dependencies_root_folder )
+ invalid_tool_dependencies_root_folder.folders.append( invalid_tool_dependencies_folder )
+ for td_key, requirements_dict in invalid_tool_dependencies_dict.items():
+ folder_id += 1
+ invalid_tool_dependency_id += 1
+ name = requirements_dict[ 'name' ]
+ type = requirements_dict[ 'type' ]
+ version = requirements_dict[ 'version' ]
+ error = requirements_dict[ 'error' ]
+ key = generate_tool_dependencies_key( name, version, type )
+ label = "Version <b>%s</b> of the <b>%s</b><b>%s</b>" % ( version, name, type )
+ folder = Folder( id=folder_id,
+ key=key,
+ label=label,
+ parent=invalid_tool_dependencies_folder )
+ itd = InvalidToolDependency( id=invalid_tool_dependency_id,
+ name=name,
+ version=version,
+ type=type,
+ error=error )
+ folder.invalid_tool_dependencies.append( itd )
+ invalid_tool_dependencies_folder.folders.append( folder )
+ else:
+ invalid_tool_dependencies_root_folder = None
+ return folder_id, invalid_tool_dependencies_root_folder
def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ):
"""Return a folder hierarchy containing invalid tools."""
# TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy?
@@ -346,7 +440,7 @@
if dependency_key in [ 'set_environment' ]:
for set_environment_dict in requirements_dict:
if trans.webapp.name == 'community':
- is_orphan = set_environment_dict.get( 'is_orphan_in_tool_shed', False )
+ is_orphan = set_environment_dict.get( 'is_orphan', False )
else:
# TODO: handle this is Galaxy
is_orphan = False
@@ -373,7 +467,7 @@
folder.tool_dependencies.append( tool_dependency )
else:
if trans.webapp.name == 'community':
- is_orphan = requirements_dict.get( 'is_orphan_in_tool_shed', False )
+ is_orphan = requirements_dict.get( 'is_orphan', False )
else:
# TODO: handle this is Galaxy
is_orphan = False
@@ -476,6 +570,8 @@
str( repository_owner ),
STRSEP,
str( changeset_revision ) )
+def generate_tool_dependencies_key( name, version, type ):
+ return '%s%s%s%s%s' % ( str( name ), STRSEP, str( version ), STRSEP, str( type ) )
def get_folder( folder, key ):
if folder.key == key:
return folder
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -224,6 +224,10 @@
folder_label = "%s<i> - installation of these additional repositories is required</i>" % folder_label
if trans.webapp.name == 'galaxy':
col_span_str = 'colspan="4"'
+ elif folder.label == 'Invalid repository dependencies':
+ folder_label = "%s<i> - click the repository dependency to see why it is invalid</i>" % folder_label
+ elif folder.label == 'Invalid tool dependencies':
+ folder_label = "%s<i> - click the tool dependency to see why it is invalid</i>" % folder_label
elif folder.label == 'Valid tools':
col_span_str = 'colspan="3"'
if folder.description:
@@ -266,10 +270,16 @@
%for readme in folder.readme_files:
${render_readme( readme, pad, my_row, row_counter )}
%endfor
+ %for invalid_repository_dependency in folder.invalid_repository_dependencies:
+ ${render_invalid_repository_dependency( invalid_repository_dependency, pad, my_row, row_counter )}
+ %endfor
%for index, repository_dependency in enumerate( folder.repository_dependencies ):
<% row_is_header = index == 0 %>
${render_repository_dependency( repository_dependency, pad, my_row, row_counter, row_is_header )}
%endfor
+ %for invalid_tool_dependency in folder.invalid_tool_dependencies:
+ ${render_invalid_tool_dependency( invalid_tool_dependency, pad, my_row, row_counter )}
+ %endfor
%for index, tool_dependency in enumerate( folder.tool_dependencies ):
<% row_is_header = index == 0 %>
${render_tool_dependency( tool_dependency, pad, my_row, row_counter, row_is_header )}
@@ -321,6 +331,25 @@
%></%def>
+<%def name="render_invalid_repository_dependency( invalid_repository_dependency, pad, parent, row_counter )">
+ <%
+ encoded_id = trans.security.encode_id( invalid_repository_dependency.id )
+ %>
+ <tr class="datasetRow"
+ %if parent is not None:
+ parent="${parent}"
+ %endif
+ id="libraryItem-${encoded_id}">
+ <td style="padding-left: ${pad+20}px;">
+ ${ invalid_repository_dependency.error | h }
+ </td>
+ </tr>
+ <%
+ my_row = row_counter.count
+ row_counter.increment()
+ %>
+</%def>
+
<%def name="render_invalid_tool( invalid_tool, pad, parent, row_counter, valid=True )"><% encoded_id = trans.security.encode_id( invalid_tool.id ) %><tr class="datasetRow"
@@ -344,6 +373,25 @@
%></%def>
+<%def name="render_invalid_tool_dependency( invalid_tool_dependency, pad, parent, row_counter )">
+ <%
+ encoded_id = trans.security.encode_id( invalid_tool_dependency.id )
+ %>
+ <tr class="datasetRow"
+ %if parent is not None:
+ parent="${parent}"
+ %endif
+ id="libraryItem-${encoded_id}">
+ <td style="padding-left: ${pad+20}px;">
+ ${ invalid_tool_dependency.error | h }
+ </td>
+ </tr>
+ <%
+ my_row = row_counter.count
+ row_counter.increment()
+ %>
+</%def>
+
<%def name="render_readme( readme, pad, parent, row_counter )"><%
from galaxy.util.shed_util_common import to_safe_string
@@ -581,15 +629,24 @@
datatypes_root_folder = containers_dict.get( 'datatypes', None )
invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
+ invalid_repository_dependencies_root_folder = containers_dict.get( 'invalid_repository_dependencies', None )
readme_files_root_folder = containers_dict.get( 'readme_files', None )
repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
+ invalid_tool_dependencies_root_folder = containers_dict.get( 'invalid_tool_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
valid_tools_root_folder = containers_dict.get( 'valid_tools', None )
workflows_root_folder = containers_dict.get( 'workflows', None )
has_contents = datatypes_root_folder or invalid_tools_root_folder or valid_tools_root_folder or workflows_root_folder
+ has_dependencies = \
+ invalid_repository_dependencies_root_folder or \
+ invalid_tool_dependencies_root_folder or \
+ missing_repository_dependencies_root_folder or \
+ repository_dependencies_root_folder or \
+ tool_dependencies_root_folder or \
+ missing_tool_dependencies_root_folder
class RowCounter( object ):
def __init__( self ):
@@ -611,10 +668,17 @@
</div></div>
%endif
- %if missing_repository_dependencies_root_folder or repository_dependencies_root_folder or tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
+ %if has_dependencies:
<div class="toolForm"><div class="toolFormTitle">Dependencies of this repository</div><div class="toolFormBody">
+ %if invalid_repository_dependencies_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_repository_dependencies">
+ ${render_folder( invalid_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
%if missing_repository_dependencies_root_folder:
<p/><% row_counter = RowCounter() %>
@@ -629,6 +693,13 @@
${render_folder( repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
</table>
%endif
+ %if invalid_tool_dependencies_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="invalid_tool_dependencies">
+ ${render_folder( invalid_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
%if tool_dependencies_root_folder:
<p/><% row_counter = RowCounter() %>
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f templates/webapps/community/repository/view_workflow.mako
--- a/templates/webapps/community/repository/view_workflow.mako
+++ b/templates/webapps/community/repository/view_workflow.mako
@@ -21,7 +21,6 @@
browse_label = 'Browse or delete repository tip files'
else:
browse_label = 'Browse repository tip files'
- has_readme = metadata and 'readme_files' in metadata
# <li><a class="action-button" href="${h.url_for( controller='repository', action='install_repositories_by_revision', repository_ids=trans.security.encode_id( repository.id ), changeset_revisions=changeset_revision )}">Install repository to Galaxy</a></li>
%>
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -98,7 +98,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded invalid tool dependency XML.',
- strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
+ strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
strings_not_displayed=[] )
def test_0035_upload_valid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -174,6 +174,7 @@
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
datatypes_tip = self.get_repository_tip( datatypes_repository )
+ strings_displayed = []
# Iterate through all metadata revisions and check for repository dependencies.
for metadata, changeset_revision in repository_metadata:
# Add the dependency description and datatypes repository details to the strings to check.
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_0100_complex_repository_dependencies.py
--- a/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
@@ -44,7 +44,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded tool_dependencies.xml.',
- strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
+ strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool' ],
strings_not_displayed=[] )
self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
def test_0010_create_bwa_base_repository( self ):
@@ -79,7 +79,7 @@
owner = tool_repository.user.username
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
+ strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
self.upload_file( repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -100,7 +100,7 @@
owner = tool_repository.user.username
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ strings_displayed = [ 'because the name is invalid' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -121,7 +121,7 @@
owner = 'invalid_owner!?'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
+ strings_displayed = [ 'because the owner is invalid.' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -142,7 +142,7 @@
owner = tool_repository.user.username
changeset_revision = '1234abcd'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ strings_displayed = [ 'because the changeset revision is invalid.' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
--- a/test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
@@ -82,7 +82,7 @@
owner = repository.user.username
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -103,7 +103,7 @@
owner = repository.user.username
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ strings_displayed = [ 'because the name is invalid.' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -124,7 +124,7 @@
owner = '!?invalid?!'
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ strings_displayed = [ 'because the owner is invalid.' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -145,7 +145,7 @@
owner = repository.user.username
changeset_revision = '!?invalid?!'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ strings_displayed = [ 'because the changeset revision is invalid.' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -80,7 +80,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded invalid tool dependency XML.',
- strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
+ strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- a/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -53,7 +53,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded tool_dependencies.xml.',
- strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
+ strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool' ],
strings_not_displayed=[] )
self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
def test_0010_create_bwa_base_repository( self ):
@@ -92,7 +92,7 @@
owner = tool_repository.user.username
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
+ strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
self.upload_file( repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -115,7 +115,7 @@
owner = tool_repository.user.username
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ strings_displayed = [ 'because the name is invalid.' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -138,7 +138,7 @@
owner = 'invalid_owner!?'
changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
+ strings_displayed = [ 'because the owner is invalid.' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
@@ -161,7 +161,7 @@
owner = tool_repository.user.username
changeset_revision = '1234abcd'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ strings_displayed = [ 'because the changeset revision is invalid.' ]
self.upload_file( base_repository,
filename='tool_dependencies.xml',
filepath=dependency_path,
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
--- a/test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
+++ b/test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
@@ -90,7 +90,7 @@
owner = repository.user.username
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ strings_displayed = [ 'Repository dependencies are currently supported only within the same tool shed' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -113,7 +113,7 @@
owner = repository.user.username
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ strings_displayed = [ 'because the name is invalid.' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -136,7 +136,7 @@
owner = '!?invalid?!'
changeset_revision = self.get_repository_tip( repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ strings_displayed = [ 'because the owner is invalid' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -159,7 +159,7 @@
owner = repository.user.username
changeset_revision = '!?invalid?!'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
- strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ strings_displayed = [ 'because the changeset revision is invalid.' ]
self.upload_file( emboss_repository,
filename='repository_dependencies.xml',
filepath=dependency_path,
@@ -174,7 +174,7 @@
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- preview_strings_displayed = [ 'emboss_0110', self.get_repository_tip( repository ), 'will be ignored' ]
+ preview_strings_displayed = [ 'emboss_0110', self.get_repository_tip( repository ), 'Ignoring repository dependency definition' ]
self.install_repository( emboss_repository_name,
common.test_user_1_name,
category_name,
diff -r b9a5aee402b31c2a316de0f68efe66fdb141418e -r ec4da708e45175610a33d16fcfc68732e425f54f test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -74,7 +74,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded invalid tool dependency XML.',
- strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
+ strings_displayed=[ 'The settings for <b>name</b>, <b>version</b> and <b>type</b> from a contained tool configuration' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'tool_dependencies.xml' ),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
15 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b9a5aee402b3/
changeset: b9a5aee402b3
user: inithello
date: 2013-02-15 20:13:33
summary: Core twill framework for automatically installing a defined list of tool shed repositories. This is the first component of a suite of scripts to install a repository, then run all functional tests defined in its tools, and record the result.
affected #: 7 files
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/__init__.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/__init__.py
@@ -0,0 +1,1 @@
+"""Install and test tool shed repositories."""
\ No newline at end of file
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/base/test_db_util.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/base/test_db_util.py
@@ -0,0 +1,47 @@
+import logging
+import galaxy.model as model
+from galaxy.model.orm import and_
+from galaxy.model.mapping import context as sa_session
+
+log = logging.getLogger(__name__)
+
+def delete_obj( obj ):
+ sa_session.delete( obj )
+ sa_session.flush()
+def delete_user_roles( user ):
+ for ura in user.roles:
+ sa_session.delete( ura )
+ sa_session.flush()
+def flush( obj ):
+ sa_session.add( obj )
+ sa_session.flush()
+def get_repository( repository_id ):
+ return sa_session.query( model.ToolShedRepository ) \
+ .filter( model.ToolShedRepository.table.c.id == repository_id ) \
+ .first()
+def get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision ):
+ return sa_session.query( model.ToolShedRepository ) \
+ .filter( and_( model.ToolShedRepository.table.c.name == name,
+ model.ToolShedRepository.table.c.owner == owner,
+ model.ToolShedRepository.table.c.installed_changeset_revision == changeset_revision ) ) \
+ .one()
+def get_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def mark_obj_deleted( obj ):
+ obj.deleted = True
+ sa_session.add( obj )
+ sa_session.flush()
+def refresh( obj ):
+ sa_session.refresh( obj )
+def get_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_user( email ):
+ return sa_session.query( model.User ) \
+ .filter( model.User.table.c.email==email ) \
+ .first()
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/base/twilltestcase.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/base/twilltestcase.py
@@ -0,0 +1,124 @@
+import galaxy.model as model
+import common, string, os, re, test_db_util, simplejson, logging, time, sys
+import galaxy.util as util
+from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
+from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+
+log = logging.getLogger( __name__ )
+
+class InstallTestRepository( TwillTestCase ):
+ def setUp( self ):
+ # Security helper
+ id_secret = os.environ.get( 'GALAXY_INSTALL_TEST_SECRET', 'changethisinproductiontoo' )
+ self.security = security.SecurityHelper( id_secret=id_secret )
+ self.history_id = None
+ self.test_tmp_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TMP_DIR', None)
+ self.host = os.environ.get( 'GALAXY_INSTALL_TEST_HOST' )
+ self.port = os.environ.get( 'GALAXY_INSTALL_TEST_PORT' )
+ self.url = "http://%s:%s" % ( self.host, self.port )
+ self.shed_tool_data_table_conf = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF' )
+ self.file_dir = os.environ.get( 'GALAXY_INSTALL_TEST_FILE_DIR', None )
+ self.tool_data_path = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_PATH' )
+ self.shed_tool_conf = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_CONF' )
+ # TODO: Figure out a way to alter these attributes during tests.
+ self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' )
+ self.shed_tools_dict = {}
+ self.home()
+ def initiate_installation_process( self,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ no_changes=True,
+ new_tool_panel_section=None ):
+ html = self.last_page()
+ # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
+ # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
+ # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
+ # admin_toolshed controller.
+ install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
+ if install_parameters:
+ iri_ids = install_parameters.group(1)
+ # In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
+ # This regex ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id
+ # will handle them correctly. It's safe to pass the cleaned list to manage_repositories, because it can parse
+ # comma-separated values.
+ repository_ids = str( iri_ids )
+ repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids )
+ encoded_kwd = install_parameters.group(2)
+ reinstalling = install_parameters.group(3)
+ url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+ ( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
+ self.visit_url( url )
+ return util.listify( repository_ids )
+ def install_repository( self, repository_info_dict, install_tool_dependencies=True, install_repository_dependencies=True,
+ strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
+ post_submit_strings_displayed=[], new_tool_panel_section=None, **kwd ):
+ name = repository_info_dict[ 'name' ]
+ owner = repository_info_dict[ 'owner' ]
+ changeset_revision = repository_info_dict[ 'changeset_revision' ]
+ encoded_repository_id = repository_info_dict[ 'encoded_repository_id' ]
+ tool_shed_url = repository_info_dict[ 'tool_shed_url' ]
+ preview_params = urllib.urlencode( dict( repository_id=encoded_repository_id, changeset_revision=changeset_revision ) )
+ self.visit_url( '%s/repository/preview_tools_in_changeset?%s' % ( tool_shed_url, preview_params ) )
+ install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
+ changeset_revisions=changeset_revision,
+ galaxy_url=self.url ) )
+ # If the tool shed does not have the same hostname as the Galaxy server being used for these tests,
+ # twill will not carry over previously set cookies for the Galaxy server when following the
+ # install_repositories_by_revision redirect, so we have to include 403 in the allowed HTTP
+ # status codes and log in again.
+ url = '%s/repository/install_repositories_by_revision?%s' % ( tool_shed_url, install_params )
+ self.visit_url( url, allowed_codes=[ 200, 403 ] )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu', username='test' )
+ install_params = urllib.urlencode( dict( repository_ids=encoded_repository_id,
+ changeset_revisions=changeset_revision,
+ tool_shed_url=tool_shed_url ) )
+ url = '/admin_toolshed/prepare_for_install?%s' % install_params
+ self.visit_url( url )
+ # This section is tricky, due to the way twill handles form submission. The tool dependency checkbox needs to
+ # be hacked in through tc.browser, putting the form field in kwd doesn't work.
+ if 'install_tool_dependencies' in self.last_page():
+ form = tc.browser.get_form( 'select_tool_panel_section' )
+ checkbox = form.find_control( id="install_tool_dependencies" )
+ checkbox.disabled = False
+ if install_tool_dependencies:
+ checkbox.selected = True
+ kwd[ 'install_tool_dependencies' ] = 'True'
+ else:
+ checkbox.selected = False
+ kwd[ 'install_tool_dependencies' ] = 'False'
+ if 'install_repository_dependencies' in self.last_page():
+ kwd[ 'install_repository_dependencies' ] = str( install_repository_dependencies ).lower()
+ if 'shed_tool_conf' not in kwd:
+ kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
+ if new_tool_panel_section:
+ kwd[ 'new_tool_panel_section' ] = new_tool_panel_section
+ self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+ self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
+ repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
+ self.wait_for_repository_installation( repository_ids )
+ def visit_url( self, url, allowed_codes=[ 200 ] ):
+ new_url = tc.go( url )
+ return_code = tc.browser.get_code()
+ assert return_code in allowed_codes, 'Invalid HTTP return code %s, allowed codes: %s' % \
+ ( return_code, ', '.join( str( code ) for code in allowed_codes ) )
+ return new_url
+ def wait_for_repository_installation( self, repository_ids ):
+ final_states = [ model.ToolShedRepository.installation_status.ERROR,
+ model.ToolShedRepository.installation_status.INSTALLED ]
+ # Wait until all repositories are in a final state before returning. This ensures that subsequent tests
+ # are running against an installed repository, and not one that is still in the process of installing.
+ if repository_ids:
+ for repository_id in repository_ids:
+ galaxy_repository = test_db_util.get_repository( self.security.decode_id( repository_id ) )
+ timeout_counter = 0
+ while galaxy_repository.status not in final_states:
+ test_db_util.refresh( galaxy_repository )
+ timeout_counter = timeout_counter + 1
+ # This timeout currently defaults to 180 seconds, or 3 minutes.
+ if timeout_counter > common.repository_installation_timeout:
+ raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
+ ( timeout_counter, repository.status ) )
+ break
+ time.sleep( 1 )
+
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/functional/__init__.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/functional/__init__.py
@@ -0,0 +1,1 @@
+'''Tests'''
\ No newline at end of file
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/functional/test_install_repositories.py
@@ -0,0 +1,39 @@
+import new
+import install_and_test_tool_shed_repositories.base.test_db_util as test_db_util
+from install_and_test_tool_shed_repositories.base.twilltestcase import InstallTestRepository
+
+class TestInstallRepositories( InstallTestRepository ):
+ """Abstract test case that installs a predefined list of repositories."""
+ def do_installation( self, repository_info_dict ):
+ self.logout()
+ self.login( email='test(a)bx.psu.edu', username='test' )
+ admin_user = test_db_util.get_user( 'test(a)bx.psu.edu' )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ self.install_repository( repository_info_dict )
+
+def build_tests( repository_dict=None ):
+ """Generate abstract test cases for the defined list of repositories."""
+ if repository_dict is None:
+ return
+ # Push all the toolbox tests to module level
+ G = globals()
+ # Eliminate all previous tests from G.
+ for key, val in G.items():
+ if key.startswith( 'TestInstallRepository_' ):
+ del G[ key ]
+ # Create a new subclass with a method named install_repository_XXX that installs the repository specified by the provided dict.
+ name = "TestInstallRepository_" + repository_dict[ 'name' ]
+ baseclasses = ( TestInstallRepositories, )
+ namespace = dict()
+ def make_install_method( repository_dict ):
+ def test_install_repository( self ):
+ self.do_installation( repository_dict )
+ return test_install_repository
+ test_method = make_install_method( repository_dict )
+ test_method.__doc__ = "Install the repository %s from %s." % ( repository_dict[ 'name' ], repository_dict[ 'tool_shed_url' ] )
+ namespace[ 'install_repository_%s' % repository_dict[ 'name' ] ] = test_method
+ # The new.classobj function returns a new class object, with name name, derived
+ # from baseclasses (which should be a tuple of classes) and with namespace dict.
+ new_class_obj = new.classobj( name, baseclasses, namespace )
+ G[ name ] = new_class_obj
diff -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 -r b9a5aee402b31c2a316de0f68efe66fdb141418e test/install_and_test_tool_shed_repositories/functional_tests.py
--- /dev/null
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+
+import os, sys, shutil, tempfile, re, string
+
+# Assume we are run from the galaxy root directory, add lib to the python path
+cwd = os.getcwd()
+sys.path.append( cwd )
+
+test_home_directory = os.path.join( cwd, 'test', 'install_and_test_tool_shed_repositories' )
+default_test_file_dir = os.path.join( test_home_directory, 'test_data' )
+# Here's the directory where everything happens. Temporary directories are created within this directory to contain
+# the hgweb.config file, the database, new repositories, etc. Since the tool shed browses repository contents via HTTP,
+# the full path to the temporary directroy wher eht repositories are located cannot contain invalid url characters.
+galaxy_test_tmp_dir = os.path.join( test_home_directory, 'tmp' )
+default_galaxy_locales = 'en'
+default_galaxy_test_file_dir = "test-data"
+os.environ[ 'GALAXY_INSTALL_TEST_TMP_DIR' ] = galaxy_test_tmp_dir
+new_path = [ os.path.join( cwd, "lib" ), os.path.join( cwd, 'test' ) ]
+new_path.extend( sys.path )
+sys.path = new_path
+
+from galaxy import eggs
+
+eggs.require( "nose" )
+eggs.require( "NoseHTML" )
+eggs.require( "NoseTestDiff" )
+eggs.require( "twill==0.9" )
+eggs.require( "Paste" )
+eggs.require( "PasteDeploy" )
+eggs.require( "Cheetah" )
+eggs.require( "simplejson" )
+
+# This should not be required, but it is under certain conditions, thanks to this bug: http://code.google.com/p/python-nose/issues/detail?id=284
+eggs.require( "pysqlite" )
+
+import atexit, logging, os, os.path, sys, tempfile, simplejson
+import twill, unittest, time
+import sys, threading, random
+import httplib, socket
+from paste import httpserver
+
+# This is for the galaxy application.
+import galaxy.app
+from galaxy.app import UniverseApplication
+from galaxy.web import buildapp
+
+import nose.core
+import nose.config
+import nose.loader
+import nose.plugins.manager
+
+log = logging.getLogger( 'install_and_test_repositories' )
+
+default_galaxy_test_port_min = 10000
+default_galaxy_test_port_max = 10999
+default_galaxy_test_host = '127.0.0.1'
+
+# Optionally, set the environment variable GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF
+# to the location of a tool sheds configuration file that includes the tool shed
+# that repositories will be installed from.
+
+tool_sheds_conf_xml = '''<?xml version="1.0"?>
+<tool_sheds>
+ <tool_shed name="Galaxy main tool shed" url="http://toolshed.g2.bx.psu.edu/"/>
+ <tool_shed name="Galaxy test tool shed" url="http://testtoolshed.g2.bx.psu.edu/"/>
+</tool_sheds>
+'''
+
+shed_tool_conf_xml_template = '''<?xml version="1.0"?>
+<toolbox tool_path="${shed_tool_path}">
+</toolbox>
+'''
+
+tool_conf_xml = '''<?xml version="1.0"?>
+<toolbox>
+ <section name="Get Data" id="getext">
+ <tool file="data_source/upload.xml"/>
+ </section>
+</toolbox>
+'''
+
+tool_data_table_conf_xml_template = '''<?xml version="1.0"?>
+<tables>
+</tables>
+'''
+
+galaxy_repository_list = os.environ.get( 'GALAXY_INSTALL_TEST_REPOSITORY_FILE', 'repository_list.json' )
+
+if 'GALAXY_INSTALL_TEST_SECRET' not in os.environ:
+ galaxy_encode_secret = 'changethisinproductiontoo'
+ os.environ[ 'GALAXY_INSTALL_TEST_SECRET' ] = galaxy_encode_secret
+else:
+ galaxy_encode_secret = os.environ[ 'GALAXY_INSTALL_TEST_SECRET' ]
+
+def get_repositories_to_install():
+ '''
+ Get a list of repository info dicts to install. This method expects a json list of dicts with the following structure:
+ [
+ {
+ "changeset_revision": <revision>,
+ "encoded_repository_id": <encoded repository id from the tool shed>,
+ "name": <name>,
+ "owner": <owner>,
+ "tool_shed_url": <url>
+ },
+ ...
+ ]
+ NOTE: If the tool shed URL specified in any dict is not present in the tool_sheds_conf.xml, the installation will fail.
+ '''
+ return simplejson.loads( file( galaxy_repository_list, 'r' ).read() )
+
+def run_tests( test_config ):
+ loader = nose.loader.TestLoader( config=test_config )
+ plug_loader = test_config.plugins.prepareTestLoader( loader )
+ if plug_loader is not None:
+ loader = plug_loader
+ tests = loader.loadTestsFromNames( test_config.testNames )
+ test_runner = nose.core.TextTestRunner( stream=test_config.stream,
+ verbosity=test_config.verbosity,
+ config=test_config )
+ plug_runner = test_config.plugins.prepareTestRunner( test_runner )
+ if plug_runner is not None:
+ test_runner = plug_runner
+ return test_runner.run( tests )
+
+def main():
+ # ---- Configuration ------------------------------------------------------
+ galaxy_test_host = os.environ.get( 'GALAXY_INSTALL_TEST_HOST', default_galaxy_test_host )
+ galaxy_test_port = os.environ.get( 'GALAXY_INSTALL_TEST_PORT', None )
+
+ tool_path = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_PATH', 'tools' )
+ if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
+ os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales
+ galaxy_test_file_dir = os.environ.get( 'GALAXY_INSTALL_TEST_FILE_DIR', default_galaxy_test_file_dir )
+ if not os.path.isabs( galaxy_test_file_dir ):
+ galaxy_test_file_dir = os.path.abspath( galaxy_test_file_dir )
+ tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
+ use_distributed_object_store = os.environ.get( 'GALAXY_INSTALL_TEST_USE_DISTRIBUTED_OBJECT_STORE', False )
+ if not os.path.isdir( galaxy_test_tmp_dir ):
+ os.mkdir( galaxy_test_tmp_dir )
+ galaxy_test_proxy_port = None
+ shed_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF', os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_data_table_conf.xml' ) )
+ galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF', os.path.join( galaxy_test_tmp_dir, 'test_tool_data_table_conf.xml' ) )
+ galaxy_tool_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_CONF', os.path.join( galaxy_test_tmp_dir, 'test_tool_conf.xml' ) )
+ galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_CONF', os.path.join( galaxy_test_tmp_dir, 'test_shed_tool_conf.xml' ) )
+ galaxy_migrated_tool_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_MIGRATED_TOOL_CONF', os.path.join( galaxy_test_tmp_dir, 'test_migrated_tool_conf.xml' ) )
+ galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF', os.path.join( galaxy_test_tmp_dir, 'test_tool_sheds_conf.xml' ) )
+ shed_tool_dict = os.environ.get( 'GALAXY_INSTALL_TEST_SHED_TOOL_DICT_FILE', os.path.join( galaxy_test_tmp_dir, 'shed_tool_dict' ) )
+ if 'GALAXY_INSTALL_TEST_TOOL_DATA_PATH' in os.environ:
+ tool_data_path = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DATA_PATH' )
+ else:
+ tool_data_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DATA_PATH' ] = tool_data_path
+ if 'GALAXY_INSTALL_TEST_DBPATH' in os.environ:
+ galaxy_db_path = os.environ[ 'GALAXY_INSTALL_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ galaxy_db_path = os.path.join( tempdir, 'database' )
+ galaxy_file_path = os.path.join( galaxy_db_path, 'files' )
+ new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' ] = galaxy_tool_dependency_dir
+ if 'GALAXY_INSTALL_TEST_DBURI' in os.environ:
+ database_connection = os.environ[ 'GALAXY_INSTALL_TEST_DBURI' ]
+ else:
+ database_connection = 'sqlite:///' + os.path.join( galaxy_db_path, 'install_and_test_repositories.sqlite' )
+ kwargs = {}
+ for dir in [ galaxy_test_tmp_dir ]:
+ try:
+ os.makedirs( dir )
+ except OSError:
+ pass
+
+ print "Database connection: ", database_connection
+
+ # Generate the tool_data_table_conf.xml file.
+ file( galaxy_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template )
+ os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DATA_TABLE_CONF' ] = galaxy_tool_data_table_conf_file
+ # Generate the shed_tool_data_table_conf.xml file.
+ file( shed_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template )
+ os.environ[ 'GALAXY_INSTALL_TEST_SHED_TOOL_DATA_TABLE_CONF' ] = shed_tool_data_table_conf_file
+ # ---- Start up a Galaxy instance ------------------------------------------------------
+ # Generate the tool_conf.xml file.
+ file( galaxy_tool_conf_file, 'w' ).write( tool_conf_xml )
+ # Generate the tool_sheds_conf.xml file, but only if a the user has not specified an existing one in the environment.
+ if 'GALAXY_INSTALL_TEST_TOOL_SHEDS_CONF' not in os.environ:
+ file( galaxy_tool_sheds_conf_file, 'w' ).write( tool_sheds_conf_xml )
+ # Generate the shed_tool_conf.xml file.
+ tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
+ shed_tool_conf_xml = tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
+ file( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+ os.environ[ 'GALAXY_INSTALL_TEST_SHED_TOOL_CONF' ] = galaxy_shed_tool_conf_file
+ # Generate the migrated_tool_conf.xml file.
+ migrated_tool_conf_xml = tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_migrated_tool_path )
+ file( galaxy_migrated_tool_conf_file, 'w' ).write( migrated_tool_conf_xml )
+
+ # ---- Build Galaxy Application --------------------------------------------------
+ global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
+ if not database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ app = UniverseApplication( admin_users = 'test(a)bx.psu.edu',
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ allow_library_path_paste = True,
+ database_connection = database_connection,
+ database_engine_option_pool_size = '10',
+ datatype_converters_config_file = "datatype_converters_conf.xml.sample",
+ file_path = galaxy_file_path,
+ global_conf = global_conf,
+ id_secret = galaxy_encode_secret,
+ job_queue_workers = 5,
+ log_destination = "stdout",
+ migrated_tools_config = galaxy_migrated_tool_conf_file,
+ new_file_path = galaxy_tempfiles,
+ running_functional_tests=True,
+ shed_tool_data_table_config = shed_tool_data_table_conf_file,
+ shed_tool_path = galaxy_shed_tool_path,
+ template_path = "templates",
+ tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
+ tool_data_path = tool_data_path,
+ tool_data_table_config_path = galaxy_tool_data_table_conf_file,
+ tool_dependency_dir = galaxy_tool_dependency_dir,
+ tool_path = tool_path,
+ tool_parse_help = False,
+ tool_sheds_config_file = galaxy_tool_sheds_conf_file,
+ update_integrated_tool_panel = False,
+ use_heartbeat = False,
+ **kwargs )
+
+ log.info( "Embedded Galaxy application started" )
+
+ # ---- Run galaxy webserver ------------------------------------------------------
+ server = None
+ webapp = buildapp.app_factory( dict( database_file=database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=app )
+
+ if galaxy_test_port is not None:
+ server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ galaxy_test_port = str( random.randint( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % galaxy_test_port )
+ server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
+ else:
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % \
+ ( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
+ if galaxy_test_proxy_port:
+ os.environ[ 'GALAXY_INSTALL_TEST_PORT' ] = galaxy_test_proxy_port
+ else:
+ os.environ[ 'GALAXY_INSTALL_TEST_PORT' ] = galaxy_test_port
+ t = threading.Thread( target=server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( galaxy_test_host, galaxy_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ log.info( "Embedded galaxy web server started" )
+ # ---- Load the module to generate installation methods -------------------
+ import install_and_test_tool_shed_repositories.functional.test_install_repositories as test_install_repositories
+ if galaxy_test_proxy_port:
+ log.info( "Tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_proxy_port ) )
+ else:
+ log.info( "Tests will be run against %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
+ success = False
+ try:
+ for repository_dict in get_repositories_to_install():
+ test_install_repositories.build_tests( repository_dict )
+ os.environ[ 'GALAXY_INSTALL_TEST_HOST' ] = galaxy_test_host
+ test_config = nose.config.Config( env=os.environ, plugins=nose.plugins.manager.DefaultPluginManager() )
+ test_config.configure( sys.argv )
+ # Run the tests.
+ result = run_tests( test_config )
+ success = result.wasSuccessful()
+ except:
+ log.exception( "Failure running tests" )
+
+ log.info( "Shutting down" )
+ # ---- Tear down -----------------------------------------------------------
+ if server:
+ log.info( "Shutting down embedded galaxy web server" )
+ server.server_close()
+ server = None
+ log.info( "Embedded galaxy server stopped" )
+ if app:
+ log.info( "Shutting down galaxy application" )
+ app.shutdown()
+ app = None
+ log.info( "Embedded galaxy application stopped" )
+ if 'GALAXY_INSTALL_TEST_NO_CLEANUP' not in os.environ:
+ try:
+ for dir in [ galaxy_test_tmp_dir ]:
+ if os.path.exists( dir ):
+ log.info( "Cleaning up temporary files in %s" % dir )
+ shutil.rmtree( dir )
+ except:
+ pass
+ else:
+ log.debug( 'GALAXY_INSTALL_TEST_NO_CLEANUP set, not cleaning up.' )
+ if success:
+ return 0
+ else:
+ return 1
+
+if __name__ == "__main__":
+ sys.exit( main() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Only write out a new shed_data_manager_conf if the file has changed.
by Bitbucket 15 Feb '13
by Bitbucket 15 Feb '13
15 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8e430a612aee/
changeset: 8e430a612aee
user: dan
date: 2013-02-15 20:04:47
summary: Only write out a new shed_data_manager_conf if the file has changed.
affected #: 1 file
diff -r d07c457ee2692868a5ab99b4d663a220abf8ac73 -r 8e430a612aee6bb14fe373ae9e67d80ba84723d9 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1188,6 +1188,7 @@
if repo_data_manager_conf_filename is None:
log.debug( "No data_manager_conf.xml file has been defined." )
return rval
+ data_manager_config_has_changes = False
relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
tree = util.parse_xml( repo_data_manager_conf_filename )
@@ -1236,8 +1237,10 @@
else:
log.warning( "Encountered unexpected element '%s':\n%s" % ( elem.tag, util.xml_to_string( elem ) ) )
config_elems.append( elem )
- # Persist the altered shed_tool_config file.
- suc.data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
+ data_manager_config_has_changes = True
+ # Persist the altered shed_data_manager_config file.
+ if data_manager_config_has_changes:
+ suc.data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
return rval
def is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Only write out a new shed_data_manager_conf if the file has changed.
by Bitbucket 15 Feb '13
by Bitbucket 15 Feb '13
15 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d07c457ee269/
changeset: d07c457ee269
user: dan
date: 2013-02-15 19:57:08
summary: Only write out a new shed_data_manager_conf if the file has changed.
affected #: 1 file
diff -r e882cfa616ec6a51ac75d7d7779d2aa7f2e516f8 -r d07c457ee2692868a5ab99b4d663a220abf8ac73 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1527,14 +1527,18 @@
root = tree.getroot()
assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
+ data_manager_config_has_changes = False
config_elems = []
for elem in root:
if elem.tag != 'data_manager' or elem.get( 'guid', None ) not in guids:
config_elems.append( elem )
+ else:
+ data_manager_config_has_changes = True
#remove data managers from in memory
app.data_managers.remove_manager( guids )
# Persist the altered shed_data_manager_config file.
- suc.data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
+ if data_manager_config_has_changes:
+ suc.data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for reinstalling a Tool Shed Repository that contains tools, but none of them are added to the tool panel.
by Bitbucket 15 Feb '13
by Bitbucket 15 Feb '13
15 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e882cfa616ec/
changeset: e882cfa616ec
user: dan
date: 2013-02-15 19:49:47
summary: Fix for reinstalling a Tool Shed Repository that contains tools, but none of them are added to the tool panel.
affected #: 1 file
diff -r 2664d904641abdc1ed6a2f972eb5aeefa8f93703 -r e882cfa616ec6a51ac75d7d7779d2aa7f2e516f8 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1058,30 +1058,32 @@
tool_section = None
tool_panel_section_key = None
if 'tools' in metadata:
- if 'tool_panel_section' in metadata:
- tool_panel_dict = metadata[ 'tool_panel_section' ]
- if not tool_panel_dict:
+ # This forces everything to be loaded into the same section (or no section) in the tool panel.
+ if no_changes_checked:
+ if 'tool_panel_section' in metadata:
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ if not tool_panel_dict:
+ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ else:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
- else:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
- # This forces everything to be loaded into the same section (or no section) in the tool panel.
- tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
- tool_section_dict = tool_section_dicts[ 0 ]
- original_section_id = tool_section_dict[ 'id' ]
- original_section_name = tool_section_dict[ 'name' ]
- if no_changes_checked:
- if original_section_id:
- tool_panel_section_key = 'section_%s' % str( original_section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = galaxy.tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ if tool_panel_dict:
+ #tool_panel_dict is empty when tools exist but are not installed into a tool panel
+ tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ tool_section_dict = tool_section_dicts[ 0 ]
+ original_section_id = tool_section_dict[ 'id' ]
+ original_section_name = tool_section_dict[ 'name' ]
+ if original_section_id:
+ tool_panel_section_key = 'section_%s' % str( original_section_id )
+ if tool_panel_section_key in trans.app.toolbox.tool_panel:
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = original_section_name
+ elem.attrib[ 'id' ] = original_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = galaxy.tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
else:
# The user elected to change the tool panel section to contain the tools.
if new_tool_panel_section:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0