galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
commit/galaxy-central: inithello: Functional tests for n levels of circular repository dependencies.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/376a3714dc15/
changeset: 376a3714dc15
user: inithello
date: 2012-12-11 20:29:28
summary: Functional tests for n levels of circular repository dependencies.
affected #: 7 files
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/common.py
--- a/test/tool_shed/base/common.py
+++ b/test/tool_shed/base/common.py
@@ -10,6 +10,8 @@
new_repository_dependencies_xml = '''<?xml version="1.0"?><repositories${description}>
- <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />
+${dependency_lines}
</repositories>
'''
+
+new_repository_dependencies_line = ''' <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />'''
\ No newline at end of file
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -22,12 +22,6 @@
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.shed_tools_dict = {}
- self.keepOutdir = os.environ.get( 'TOOL_SHED_TEST_SAVE', '' )
- if self.keepOutdir > '':
- try:
- os.makedirs( self.keepOutdir )
- except:
- pass
self.home()
def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id )
@@ -50,9 +44,11 @@
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision ):
+ def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision, changeset_revision=None ):
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username, depends_on_changeset_revision ]
- self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ self.display_manage_repository_page( repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed )
def check_repository_metadata( self, repository, tip_only=True ):
if tip_only:
assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
@@ -216,21 +212,24 @@
else:
string = string.replace( character, replacement )
return string
- def generate_repository_dependency_xml( self, repository, xml_filename, dependency_description='' ):
+ def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
os.makedirs( file_path )
- changeset_revision = self.get_repository_tip( repository )
+ dependency_entries = []
+ for repository in repositories:
+ changeset_revision = self.get_repository_tip( repository )
+ template = string.Template( common.new_repository_dependencies_line )
+ dependency_entries.append( template.safe_substitute( toolshed_url=self.url,
+ owner=repository.user.username,
+ repository_name=repository.name,
+ changeset_revision=changeset_revision ) )
if dependency_description:
description = ' description="%s"' % dependency_description
else:
description = dependency_description
template_parser = string.Template( common.new_repository_dependencies_xml )
- repository_dependency_xml = template_parser.safe_substitute( toolshed_url=self.url,
- owner=repository.user.username,
- repository_name=repository.name,
- changeset_revision=changeset_revision,
- description=description )
+ repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -55,7 +55,7 @@
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
- self.generate_repository_dependency_xml( datatypes_repository,
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
self.upload_file( repository,
'repository_dependencies.xml',
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -73,7 +73,7 @@
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
- self.generate_repository_dependency_xml( datatypes_repository,
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository,
@@ -93,7 +93,7 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
- self.generate_repository_dependency_xml( emboss_5_repository,
+ self.generate_repository_dependency_xml( [ emboss_5_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 5 repository.' )
self.upload_file( emboss_repository,
@@ -105,7 +105,7 @@
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] )
- self.generate_repository_dependency_xml( emboss_6_repository,
+ self.generate_repository_dependency_xml( [ emboss_6_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Emboss requires the Emboss 6 repository.' )
self.upload_file( emboss_repository,
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -63,7 +63,7 @@
repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
- self.generate_repository_dependency_xml( repository,
+ self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Filtering 1.1.0 depends on the freebayes repository.' )
self.upload_file( filtering_repository,
@@ -79,7 +79,7 @@
repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
- self.generate_repository_dependency_xml( repository,
+ self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
dependency_description='Freebayes depends on the filtering repository.' )
self.upload_file( freebayes_repository,
@@ -95,8 +95,5 @@
# Freebayes revision 0 -> filtering revision 1.
# Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
# In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
- filtering_strings_displayed = [ freebayes_repository_name, common.test_user_1_name, self.get_repository_tip( freebayes_repository ) ]
- freebayes_strings_displayed = [ filtering_repository_name, common.test_user_1_name, self.get_repository_tip( filtering_repository ) ]
- self.display_manage_repository_page( filtering_repository, strings_displayed=filtering_strings_displayed )
- self.display_manage_repository_page( freebayes_repository, strings_displayed=freebayes_strings_displayed )
-
+ self.check_repository_dependency( filtering_repository, freebayes_repository, self.get_repository_tip( freebayes_repository ) )
+ self.check_repository_dependency( freebayes_repository, filtering_repository, self.get_repository_tip( filtering_repository ) )
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0050_circular_n_levels.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -0,0 +1,129 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+
+emboss_datatypes_repository_name = 'emboss_datatypes_0050'
+emboss_datatypes_repository_description = "Datatypes for emboss"
+emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes"
+
+emboss_repository_name = 'emboss_0050'
+emboss_repository_description = "Galaxy's emboss tool"
+emboss_repository_long_description = "Long description of Galaxy's emboss tool"
+
+freebayes_repository_name = 'freebayes_0050'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+filtering_repository_name = 'filtering_0050'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+default_category = 'test_0050_repository_n_level_circular_dependencies'
+default_category_description = 'Testing handling of circular repository dependencies to n levels.'
+
+class TestRepositoryCircularDependenciesToNLevels( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies down to n levels.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( default_category, default_category_description )
+ def test_0010_create_emboss_datatypes_repository( self ):
+ '''Create and populate emboss_datatypes_0050.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( emboss_datatypes_repository_name,
+ emboss_datatypes_repository_description,
+ repository_long_description=emboss_datatypes_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'emboss/datatypes/datatypes_conf.xml',
+ strings_displayed=[],
+ commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_create_emboss_repository( self ):
+ '''Create and populate emboss_0050.'''
+ self.create_repository( emboss_repository_name,
+ emboss_repository_description,
+ repository_long_description=emboss_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'emboss/emboss.tar',
+ strings_displayed=[],
+ commit_message='Uploaded tool tarball.' )
+ datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'emboss' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Emboss depends on the emboss_datatypes repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss_datatypes.' )
+ def test_0020_create_filtering_repository( self ):
+ '''Create and populate filtering_0050.'''
+ self.create_repository( filtering_repository_name,
+ filtering_repository_description,
+ repository_long_description=filtering_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( [ emboss_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering depends on the emboss repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on emboss.' )
+ def test_0025_create_freebayes_repository( self ):
+ '''Create and populate freebayes_0050.'''
+ self.create_repository( freebayes_repository_name,
+ freebayes_repository_description,
+ repository_long_description=freebayes_repository_long_description,
+ categories=[ default_category ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
+ previous_tip = self.get_repository_tip( repository )
+ self.generate_repository_dependency_xml( [ emboss_datatypes_repository, emboss_repository, filtering_repository, repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Freebayes depends on the filtering repository.' )
+ self.upload_file( repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering.' )
+ self.display_manage_repository_page( repository, strings_not_displayed=[ previous_tip ] )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that the generated dependency circle does not cause an infinite loop.'''
+ emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]:
+ self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) )
+ self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -67,9 +67,7 @@
# ---- Configuration ------------------------------------------------------
tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host )
tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None )
- tool_shed_test_save = os.environ.get( 'TOOL_SHED_TEST_SAVE', None )
tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' )
- start_server = 'TOOL_SHED_TEST_EXTERNAL' not in os.environ
if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales
tool_shed_test_file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', default_tool_shed_test_file_dir )
@@ -84,57 +82,27 @@
tool_dependency_dir = os.environ.get( 'TOOL_SHED_TOOL_DEPENDENCY_DIR', None )
use_distributed_object_store = os.environ.get( 'TOOL_SHED_USE_DISTRIBUTED_OBJECT_STORE', False )
- if start_server:
- if not os.path.isdir( tool_shed_test_tmp_dir ):
- os.mkdir( tool_shed_test_tmp_dir )
- psu_production = False
- tool_shed_test_proxy_port = None
- if 'TOOL_SHED_TEST_PSU_PRODUCTION' in os.environ:
- if not tool_shed_test_port:
- raise Exception( 'Set TOOL_SHED_TEST_PORT to the port to which the proxy server will proxy' )
- tool_shed_test_proxy_port = os.environ.get( 'TOOL_SHED_TEST_PROXY_PORT', None )
- if not tool_shed_test_proxy_port:
- raise Exception( 'Set TOOL_SHED_TEST_PROXY_PORT to the port on which the proxy server is listening' )
- base_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_FILE_PATH', None )
- if not base_file_path:
- raise Exception( 'Set TOOL_SHED_TEST_BASE_FILE_PATH to the directory which will contain the dataset files directory' )
- base_new_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_NEW_FILE_PATH', None )
- if not base_new_file_path:
- raise Exception( 'Set TOOL_SHED_TEST_BASE_NEW_FILE_PATH to the directory which will contain the temporary directory' )
- database_connection = os.environ.get( 'TOOL_SHED_TEST_DBURI', None )
- if not database_connection:
- raise Exception( 'Set TOOL_SHED_TEST_DBURI to the URI of the database to be used for tests' )
- nginx_upload_store = os.environ.get( 'TOOL_SHED_TEST_NGINX_UPLOAD_STORE', None )
- if not nginx_upload_store:
- raise Exception( 'Set TOOL_SHED_TEST_NGINX_UPLOAD_STORE to the path where the nginx upload module places uploaded files' )
- file_path = tempfile.mkdtemp( dir=base_file_path )
- new_repos_path = tempfile.mkdtemp( dir=base_new_file_path )
- hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- kwargs = dict( database_engine_option_pool_size = '10',
- database_engine_option_max_overflow = '20',
- database_engine_option_strategy = 'threadlocal',
- static_enabled = 'False',
- debug = 'False' )
- psu_production = True
- else:
- if 'TOOL_SHED_TEST_DBPATH' in os.environ:
- db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
- else:
- tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- db_path = os.path.join( tempdir, 'database' )
- file_path = os.path.join( db_path, 'files' )
- hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
- if 'TOOL_SHED_TEST_DBURI' in os.environ:
- database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
- else:
- database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
- kwargs = {}
- for dir in [ tool_shed_test_tmp_dir ]:
- try:
- os.makedirs( dir )
- except OSError:
- pass
+ if not os.path.isdir( tool_shed_test_tmp_dir ):
+ os.mkdir( tool_shed_test_tmp_dir )
+ tool_shed_test_proxy_port = None
+ if 'TOOL_SHED_TEST_DBPATH' in os.environ:
+ db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ db_path = os.path.join( tempdir, 'database' )
+ file_path = os.path.join( db_path, 'files' )
+ hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ if 'TOOL_SHED_TEST_DBURI' in os.environ:
+ database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
+ else:
+ database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
+ kwargs = {}
+ for dir in [ tool_shed_test_tmp_dir ]:
+ try:
+ os.makedirs( dir )
+ except OSError:
+ pass
print "Database connection:", database_connection
@@ -145,89 +113,78 @@
# ---- Build Application --------------------------------------------------
app = None
- if start_server:
- global_conf = { '__file__' : 'community_wsgi.ini.sample' }
- if psu_production:
- global_conf = None
- if not database_connection.startswith( 'sqlite://' ):
- kwargs[ 'database_engine_option_max_overflow' ] = '20'
- if tool_dependency_dir is not None:
- kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
- if use_distributed_object_store:
- kwargs[ 'object_store' ] = 'distributed'
- kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
+ global_conf = { '__file__' : 'community_wsgi.ini.sample' }
+ if not database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ if tool_dependency_dir is not None:
+ kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
+ if use_distributed_object_store:
+ kwargs[ 'object_store' ] = 'distributed'
+ kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
- app = UniverseApplication( job_queue_workers = 5,
- id_secret = 'changethisinproductiontoo',
- template_path = 'templates',
- database_connection = database_connection,
- database_engine_option_pool_size = '10',
- file_path = file_path,
- new_file_path = new_repos_path,
- tool_path=tool_path,
- datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
- tool_parse_help = False,
- tool_data_table_config_path = tool_data_table_config_path,
- shed_tool_data_table_config = shed_tool_data_table_config,
- log_destination = "stdout",
- use_heartbeat = False,
- allow_user_creation = True,
- allow_user_deletion = True,
- admin_users = 'test(a)bx.psu.edu',
- global_conf = global_conf,
- running_functional_tests = True,
- hgweb_config_dir = hgweb_config_dir,
- **kwargs )
+ app = UniverseApplication( job_queue_workers = 5,
+ id_secret = 'changethisinproductiontoo',
+ template_path = 'templates',
+ database_connection = database_connection,
+ database_engine_option_pool_size = '10',
+ file_path = file_path,
+ new_file_path = new_repos_path,
+ tool_path=tool_path,
+ datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
+ tool_parse_help = False,
+ tool_data_table_config_path = tool_data_table_config_path,
+ shed_tool_data_table_config = shed_tool_data_table_config,
+ log_destination = "stdout",
+ use_heartbeat = False,
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ admin_users = 'test(a)bx.psu.edu',
+ global_conf = global_conf,
+ running_functional_tests = True,
+ hgweb_config_dir = hgweb_config_dir,
+ **kwargs )
- log.info( "Embedded Universe application started" )
+ log.info( "Embedded Universe application started" )
# ---- Run webserver ------------------------------------------------------
server = None
- if start_server:
- webapp = buildapp.app_factory( dict( database_file=database_connection ),
- use_translogger=False,
- static_enabled=False,
- app=app )
- if tool_shed_test_port is not None:
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ webapp = buildapp.app_factory( dict( database_file=database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=app )
+ if tool_shed_test_port is not None:
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
else:
- random.seed()
- for i in range( 0, 9 ):
- try:
- tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
- log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
- server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
- break
- except socket.error, e:
- if e[0] == 98:
- continue
- raise
- else:
- raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
- if tool_shed_test_proxy_port:
- os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
- else:
- os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
- t = threading.Thread( target=server.serve_forever )
- t.start()
- # Test if the server is up
- for i in range( 10 ):
- # Directly test the app, not the proxy.
- conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port )
- conn.request( "GET", "/" )
- if conn.getresponse().status == 200:
- break
- time.sleep( 0.1 )
- else:
- raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
- # Test if the proxy server is up.
- if psu_production:
- # Directly test the app, not the proxy.
- conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_proxy_port )
- conn.request( "GET", "/" )
- if not conn.getresponse().status == 200:
- raise Exception( "Test HTTP proxy server did not return '200 OK'" )
- log.info( "Embedded web server started" )
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ if tool_shed_test_proxy_port:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
+ else:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
+ t = threading.Thread( target=server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ log.info( "Embedded web server started" )
# We don't add the tests to the path until everything is up and running
new_path = [ os.path.join( cwd, 'test' ) ]
new_path.extend( sys.path[1:] )
@@ -239,9 +196,6 @@
log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
success = False
try:
- # What requires these? Handy for (eg) functional tests to save outputs?
- if tool_shed_test_save:
- os.environ[ 'TOOL_SHED_TEST_SAVE' ] = tool_shed_test_save
# Pass in through script set env, will leave a copy of ALL test validate files.
os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host
if tool_shed_test_file_dir:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Add 'checkers' namespace before calls to .check_*.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a7ea7728829d/
changeset: a7ea7728829d
user: dan
date: 2012-12-11 20:14:14
summary: Add 'checkers' namespace before calls to .check_*.
affected #: 1 file
diff -r eed6dd67514b5e5ab0174f181af9514dea7a8d33 -r a7ea7728829dcc5395606bd402805d2ac972d6c6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -846,17 +846,17 @@
if is_column_based( file_path ):
return True
# If the file is any of the following, don't copy it.
- if check_html( file_path ):
+ if checkers.check_html( file_path ):
return False
- if check_image( file_path ):
+ if checkers.check_image( file_path ):
return False
- if check_binary( name=file_path ):
+ if checkers.check_binary( name=file_path ):
return False
- if is_bz2( file_path ):
+ if checkers.is_bz2( file_path ):
return False
- if is_gzip( file_path ):
+ if checkers.is_gzip( file_path ):
return False
- if check_zip( file_path ):
+ if checkers.check_zip( file_path ):
return False
# Default to copying the file if none of the above are true.
return True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/eed6dd67514b/
changeset: eed6dd67514b
user: greg
date: 2012-12-11 17:54:09
summary: Fix for building tool dependencies container.
affected #: 1 file
diff -r 71c3b867efd1d8074a59659ed7320c1e6a3d9ed3 -r eed6dd67514b5e5ab0174f181af9514dea7a8d33 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -99,7 +99,7 @@
repository_dependencies=repository_dependencies )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
if tool_dependencies:
- folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for resetting all metadata on a tool shed repository.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/71c3b867efd1/
changeset: 71c3b867efd1
user: greg
date: 2012-12-11 17:37:53
summary: Fix for resetting all metadata on a tool shed repository.
affected #: 1 file
diff -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a -r 71c3b867efd1d8074a59659ed7320c1e6a3d9ed3 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -726,7 +726,7 @@
relative_path, filename = os.path.split( sample_file )
if filename == 'tool_data_table_conf.xml.sample':
new_table_elems, error_message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=original_tool_data_path,
+ tool_data_path=app.config.tool_data_path,
shed_tool_data_table_config=app.config.shed_tool_data_table_config,
persist=persist )
if error_message:
@@ -771,7 +771,7 @@
if not valid:
invalid_file_tups.append( ( name, error_message ) )
else:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -1329,32 +1329,32 @@
relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
return relative_path_to_file
-def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+def get_sample_files_from_disk( repository_files_dir, tool_path=None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
if resetting_all_metadata_on_repository:
# Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
work_dir = repository_files_dir
sample_file_metadata_paths = []
sample_file_copy_paths = []
for root, dirs, files in os.walk( repository_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- if resetting_all_metadata_on_repository:
- full_path_to_sample_file = os.path.join( root, name )
- stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
- if stripped_path_to_sample_file.startswith( '/' ):
- stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
- relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
- if os.path.exists( relative_path_to_sample_file ):
- sample_file_copy_paths.append( relative_path_to_sample_file )
- else:
- sample_file_copy_paths.append( full_path_to_sample_file )
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name.endswith( '.sample' ):
+ if resetting_all_metadata_on_repository:
+ full_path_to_sample_file = os.path.join( root, name )
+ stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
+ if stripped_path_to_sample_file.startswith( '/' ):
+ stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
+ relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
+ if os.path.exists( relative_path_to_sample_file ):
+ sample_file_copy_paths.append( relative_path_to_sample_file )
else:
- relative_path_to_sample_file = os.path.join( root, name )
- sample_file_copy_paths.append( relative_path_to_sample_file )
- if tool_path and relative_install_dir:
- if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
- relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
+ sample_file_copy_paths.append( full_path_to_sample_file )
+ else:
+ relative_path_to_sample_file = os.path.join( root, name )
+ sample_file_copy_paths.append( relative_path_to_sample_file )
+ if tool_path and relative_install_dir:
+ if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests for repositories with circular repository dependencies.
by Bitbucket 11 Dec '12
by Bitbucket 11 Dec '12
11 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1cdb5f5eb6d0/
changeset: 1cdb5f5eb6d0
user: inithello
date: 2012-12-11 16:47:31
summary: Functional tests for repositories with circular repository dependencies.
affected #: 4 files
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -50,6 +50,9 @@
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision ):
+ strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username, depends_on_changeset_revision ]
+ self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def check_repository_metadata( self, repository, tip_only=True ):
if tip_only:
assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \
@@ -353,7 +356,7 @@
**kwd ):
self.visit_url( '/upload/upload?repository_id=%s' % self.security.encode_id( repository.id ) )
if valid_tools_only:
- strings_displayed.append( "has been successfully uploaded to the repository." )
+ strings_displayed.extend( [ 'has been successfully', 'uploaded to the repository.' ] )
for key in kwd:
tc.fv( "1", key, kwd[ key ] )
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -18,7 +18,7 @@
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
test_user_1 = get_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % regular_email
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
test_user_1_private_role = get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
@@ -28,7 +28,7 @@
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
- def test_0005_create_repositories( self ):
+ def test_0010_create_repositories( self ):
'''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes, and emboss repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
@@ -69,7 +69,7 @@
strings_displayed=[] )
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
- def test_0010_generate_repository_dependencies_for_emboss_5( self ):
+ def test_0015_generate_repository_dependencies_for_emboss_5( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
@@ -80,7 +80,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_depepndencies.xml.' )
- def test_0015_generate_repository_dependencies_for_emboss_6( self ):
+ def test_0020_generate_repository_dependencies_for_emboss_6( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
@@ -88,7 +88,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_depepndencies.xml.' )
- def test_0020_generate_repository_dependency_on_emboss_5( self ):
+ def test_0025_generate_repository_dependency_on_emboss_5( self ):
'''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
@@ -100,7 +100,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_5' )
- def test_0025_generate_repository_dependency_on_emboss_6( self ):
+ def test_0030_generate_repository_dependency_on_emboss_6( self ):
'''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
@@ -112,7 +112,7 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency configuration specifying emboss_6' )
- def test_0030_verify_repository_dependency_revisions( self ):
+ def test_0035_verify_repository_dependency_revisions( self ):
'''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -0,0 +1,102 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+
+freebayes_repository_name = 'freebayes_0040'
+freebayes_repository_name_description = "Galaxy's freebayes tool"
+freebayes_repository_name_long_description = "Long description of Galaxy's freebayes tool"
+
+filtering_repository_name = 'filtering_0040'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+class TestRepositoryCircularDependencies( ShedTwillTestCase ):
+ '''Verify that the code correctly handles circular dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( 'test_0040_repository_circular_dependencies', 'Testing handling of circular repository dependencies.' )
+ def test_0010_create_freebayes_repository_name( self ):
+ '''Create and populate freebayes_0040.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( freebayes_repository_name,
+ freebayes_repository_name_description,
+ repository_long_description=freebayes_repository_name_long_description,
+ categories=[ 'test_0040_repository_circular_dependencies' ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ def test_0015_create_filtering_repository( self ):
+ '''Create and populate filtering_0040.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( filtering_repository_name,
+ filtering_repository_description,
+ repository_long_description=filtering_repository_long_description,
+ categories=[ 'test_0040_repository_circular_dependencies' ],
+ strings_displayed=[] )
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ def test_0020_create_dependency_on_freebayes( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of freebayes to the filtering_0040 repository.'''
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering 1.1.0 depends on the freebayes repository.' )
+ self.upload_file( filtering_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on freebayes' )
+ def test_0025_create_dependency_on_filtering( self ):
+ '''Upload a repository_dependencies.xml file that specifies the current revision of filtering to the freebayes_0040 repository.'''
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
+ self.generate_repository_dependency_xml( repository,
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Freebayes depends on the filtering repository.' )
+ self.upload_file( freebayes_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering' )
+ def test_0030_verify_repository_dependencies( self ):
+ '''Verify that each repository can depend on the other without causing an infinite loop.'''
+ filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ # In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
+ filtering_strings_displayed = [ freebayes_repository_name, common.test_user_1_name, self.get_repository_tip( freebayes_repository ) ]
+ freebayes_strings_displayed = [ filtering_repository_name, common.test_user_1_name, self.get_repository_tip( filtering_repository ) ]
+ self.display_manage_repository_page( filtering_repository, strings_displayed=filtering_strings_displayed )
+ self.display_manage_repository_page( freebayes_repository, strings_displayed=freebayes_strings_displayed )
+
diff -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 -r 1cdb5f5eb6d061b3232c4f7725839a9fe287355a test/tool_shed/test_data/freebayes/freebayes.tar
Binary file test/tool_shed/test_data/freebayes/freebayes.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Fixes to history functional tests; Twilltestcase: added function to parse and check json, exact string matcher
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/795a1799e7dc/
changeset: 795a1799e7dc
user: carlfeberhard
date: 2012-12-10 23:44:11
summary: Fixes to history functional tests; Twilltestcase: added function to parse and check json, exact string matcher
affected #: 5 files
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 lib/galaxy/webapps/galaxy/controllers/root.py
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -130,6 +130,7 @@
history_panel_template = "root/history.mako"
else:
+ # get all datasets server-side, client-side will get flags and render appropriately
datasets = self.get_history_datasets( trans, history,
show_deleted=True, show_hidden=True, show_purged=True )
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -241,8 +241,9 @@
try:
os.makedirs( dir )
except OSError:
- pass
- print "Database connection:", database_connection
+ pass
+ log.info( "Database connection:", database_connection )
+
# ---- Build Application --------------------------------------------------
app = None
if start_server:
@@ -412,6 +413,8 @@
if os.path.exists( tempdir ) and 'GALAXY_TEST_NO_CLEANUP' not in os.environ:
log.info( "Cleaning up temporary files in %s" % tempdir )
shutil.rmtree( tempdir )
+ else:
+ log.info( "GALAXY_TEST_NO_CLEANUP is on. Temporary files in %s" % tempdir )
except:
pass
if psu_production and 'GALAXY_TEST_NO_CLEANUP' not in os.environ:
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -310,11 +310,12 @@
// ostensibly, this is the App
// LOAD INITIAL DATA IN THIS PAGE - since we're already sending it...
// ...use mako to 'bootstrap' the models
- var user = ${ get_current_user() },
+ var page_show_deleted = ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
+ page_show_hidden = ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) },
+
+ user = ${ get_current_user() },
history = ${ get_history( history.id ) },
hdas = ${ get_hdas( history.id, datasets ) };
- var currUser = new User( user );
- if( !Galaxy.currUser ){ Galaxy.currUser = currUser; }
// add user data to history
// i don't like this history+user relationship, but user authentication changes views/behaviour
@@ -326,8 +327,8 @@
urlTemplates : galaxy_paths.attributes,
logger : ( debugging )?( console ):( null ),
// is page sending in show settings? if so override history's
- show_deleted : ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
- show_hidden : ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) }
+ show_deleted : page_show_deleted,
+ show_hidden : page_show_hidden
});
historyPanel.render();
@@ -337,15 +338,16 @@
// urlTemplates : galaxy_paths.attributes,
// logger : ( debugging )?( console ):( null ),
// // is page sending in show settings? if so override history's
- // show_deleted : ${ 'true' if show_deleted == True else ( 'null' if show_deleted == None else 'false' ) },
- // show_hidden : ${ 'true' if show_hidden == True else ( 'null' if show_hidden == None else 'false' ) }
+ // show_deleted : page_show_deleted,
+ // show_hidden : page_show_hidden
//});
//historyPanel.model.loadFromApi( history.id );
// set it up to be accessible across iframes
//TODO:?? mem leak
top.Galaxy.currHistoryPanel = historyPanel;
-
+ var currUser = new User( user );
+ if( !Galaxy.currUser ){ Galaxy.currUser = currUser; }
// QUOTA METER is a cross-frame ui element (meter in masthead, over quota message in history)
// create it and join them here for now (via events)
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -249,8 +249,9 @@
page = self.last_page()
if page.find( 'error' ) > -1:
raise AssertionError('Errors in the history for user %s' % self.user )
+
def check_history_for_string( self, patt, show_deleted=False ):
- """Looks for 'string' in history page"""
+ """Breaks patt on whitespace and searches for each element seperately in the history"""
self.home()
if show_deleted:
self.visit_page( "history?show_deleted=True" )
@@ -264,11 +265,57 @@
errmsg = "no match to '%s'\npage content written to '%s'" % ( subpatt, fname )
raise AssertionError( errmsg )
self.home()
+
+ def check_history_for_exact_string( self, string, show_deleted=False ):
+ """Looks for exact match to 'string' in history page"""
+ self.home()
+ if show_deleted:
+ self.visit_page( "history?show_deleted=True" )
+ else:
+ self.visit_page( "history" )
+ try:
+ tc.find( string )
+ except:
+ fname = self.write_temp_file( tc.browser.get_html() )
+ errmsg = "no match to '%s'\npage content written to '%s'" % ( string, fname )
+ raise AssertionError( errmsg )
+ self.home()
+
+ def check_history_json( self, pattern, check_fn, show_deleted=None, multiline=True ):
+ """
+ Tries to find a JSON string in the history page using the regex pattern,
+ parse it, and assert check_fn returns True when called on that parsed
+ data.
+ """
+ self.home()
+ if show_deleted:
+ self.visit_page( "history?show_deleted=True" )
+ elif show_deleted == False:
+ self.visit_page( "history?show_deleted=False" )
+ else:
+ self.visit_page( "history" )
+ try:
+ tc.find( pattern, flags=( 'm' if multiline else '' ) )
+ # twill stores the regex match in a special stack variable
+ match = twill.namespaces.get_twill_glocals()[1][ '__match__' ]
+ json_data = from_json_string( match )
+ assert check_fn( json_data ), 'failed check_fn'
+
+ except Exception, exc:
+ log.error( exc, exc_info=True )
+ fname = self.write_temp_file( tc.browser.get_html() )
+ errmsg = ( "json '%s' could not be found or failed check_fn" % ( pattern ) +
+ "\npage content written to '%s'" % ( fname ) )
+ raise AssertionError( errmsg )
+
+ self.home()
+
def clear_history( self ):
"""Empties a history of all datasets"""
self.visit_page( "clear_history" )
self.check_history_for_string( 'Your history is empty' )
self.home()
+
def delete_history( self, id ):
"""Deletes one or more histories"""
history_list = self.get_histories_as_data_list()
@@ -279,6 +326,7 @@
check_str = 'Deleted %d %s' % ( num_deleted, iff( num_deleted != 1, "histories", "history" ) )
self.check_page_for_string( check_str )
self.home()
+
def delete_current_history( self, strings_displayed=[] ):
"""Deletes the current history"""
self.home()
@@ -286,16 +334,19 @@
for check_str in strings_displayed:
self.check_page_for_string( check_str )
self.home()
+
def get_histories_as_data_list( self ):
"""Returns the data elements of all histories"""
tree = self.histories_as_xml_tree()
data_list = [ elem for elem in tree.findall("data") ]
return data_list
+
def get_history_as_data_list( self, show_deleted=False ):
"""Returns the data elements of a history"""
tree = self.history_as_xml_tree( show_deleted=show_deleted )
data_list = [ elem for elem in tree.findall("data") ]
return data_list
+
def history_as_xml_tree( self, show_deleted=False ):
"""Returns a parsed xml object of a history"""
self.home()
@@ -303,6 +354,7 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
+
def histories_as_xml_tree( self ):
"""Returns a parsed xml object of all histories"""
self.home()
@@ -310,6 +362,7 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
+
def history_options( self, user=False, active_datasets=False, activatable_datasets=False, histories_shared_by_others=False ):
"""Mimics user clicking on history options link"""
self.home()
@@ -329,6 +382,7 @@
self.check_page_for_string( 'Rename</a> current history' )
self.check_page_for_string( 'Delete</a> current history' )
self.home()
+
def new_history( self, name=None ):
"""Creates a new, empty history"""
self.home()
@@ -338,6 +392,7 @@
self.visit_url( "%s/history_new" % self.url )
self.check_history_for_string('Your history is empty')
self.home()
+
def rename_history( self, id, old_name, new_name ):
"""Rename an existing history"""
self.home()
@@ -345,6 +400,7 @@
check_str = 'History: %s renamed to: %s' % ( old_name, urllib.unquote( new_name ) )
self.check_page_for_string( check_str )
self.home()
+
def set_history( self ):
"""Sets the history (stores the cookies for this run)"""
if self.history_id:
@@ -353,6 +409,7 @@
else:
self.new_history()
self.home()
+
def share_current_history( self, email, strings_displayed=[], strings_displayed_after_submit=[],
action='', action_strings_displayed=[], action_strings_displayed_after_submit=[] ):
"""Share the current history with different users"""
@@ -372,6 +429,7 @@
for check_str in action_strings_displayed_after_submit:
self.check_page_for_string( check_str )
self.home()
+
def share_histories_with_users( self, ids, emails, strings_displayed=[], strings_displayed_after_submit=[],
action=None, action_strings_displayed=[] ):
"""Share one or more histories with one or more different users"""
@@ -389,6 +447,7 @@
for check_str in action_strings_displayed:
self.check_page_for_string( check_str )
self.home()
+
def unshare_history( self, history_id, user_id, strings_displayed=[] ):
"""Unshare a history that has been shared with another user"""
self.visit_url( "%s/history/list?id=%s&operation=share+or+publish" % ( self.url, history_id ) )
@@ -396,12 +455,14 @@
self.check_page_for_string( check_str )
self.visit_url( "%s/history/sharing?unshare_user=%s&id=%s" % ( self.url, user_id, history_id ) )
self.home()
+
def switch_history( self, id='', name='' ):
"""Switches to a history in the current list of histories"""
self.visit_url( "%s/history/list?operation=switch&id=%s" % ( self.url, id ) )
if name:
- self.check_history_for_string( escape( name ) )
+ self.check_history_for_exact_string( name )
self.home()
+
def view_stored_active_histories( self, strings_displayed=[] ):
self.home()
self.visit_page( "history/list" )
@@ -698,11 +759,13 @@
# if the server's env has GALAXY_TEST_SAVE, save the output file to that dir
if self.keepOutdir:
ofn = os.path.join( self.keepOutdir, os.path.basename( local_name ) )
+ log.debug( 'keepoutdir: %s, ofn: %s', self.keepOutdir, ofn )
try:
shutil.copy( temp_name, ofn )
except Exception, exc:
error_log_msg = ( 'TwillTestCase could not save output file %s to %s: ' % ( temp_name, ofn ) )
error_log_msg += str( exc )
+ log.error( error_log_msg, exc_info=True )
else:
log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % ( self.keepOutdir, ofn ) )
diff -r e19bf2b117638221414239698f840730a2cd0569 -r 795a1799e7dcf092b14f7e2a2dba3fb389403531 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py
+++ b/test/functional/test_history_functions.py
@@ -250,8 +250,7 @@
sa_session.query( galaxy.model.HistoryDatasetAssociation )
.filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
- .first()
- )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
self.delete_history_item( str( hda_2_bed.id ) )
@@ -260,8 +259,7 @@
sa_session.query( galaxy.model.HistoryDatasetAssociation )
.filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) )
- .first()
- )
+ .first() )
assert hda_3_bed is not None, "Problem retrieving hda_3_bed from database"
self.delete_history_item( str( hda_3_bed.id ) )
@@ -281,39 +279,52 @@
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history3.name ] )
# Switch to the cloned history to make sure activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone2.id ), name=history3_clone2.name )
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
- hda_3_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) ) \
- .first()
+ hda_3_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history3_clone2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='3.bed' ) )
+ .first() )
assert hda_3_bed is not None, "Problem retrieving hda_3_bed from database"
+
# Make sure the deleted datasets are included in the cloned history
- check_str = 'This dataset has been deleted. Click undelete id=%d' % hda_2_bed.id
- self.check_history_for_string( check_str, show_deleted=True )
- check_str = 'This dataset has been deleted. Click undelete id=%d' % hda_3_bed.id
- self.check_history_for_string( check_str, show_deleted=True )
+ # check for encoded ids
+ # - these will be available bc the refreshed page will have bootstrapped json for the hdas
+ #NOTE: that these WON'T be available when refreshes become less common
+ # (when the backbone.js is fully integrated and refreshes aren't used after every history function)
+ self.check_history_for_exact_string( self.security.encode_id( hda_2_bed.id ), show_deleted=True )
+ self.check_history_for_exact_string( self.security.encode_id( hda_3_bed.id ), show_deleted=True )
+
# Test cloning only active datasets
- self.clone_history( self.security.encode_id( history3.id ),
- 'active',
- strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
+ self.clone_history(
+ self.security.encode_id( history3.id ),
+ 'active',
+ strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history3_clone3
- history3_clone3 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history3_clone3 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first()
+ )
assert history3_clone3 is not None, "Problem retrieving history3_clone3 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed = ["Clone of '%s'" % history3.name ] )
- # Switch to the cloned history to make sure activatable datasets were cloned
+
+ # Switch to the cloned history to make sure ONLY activatable datasets were cloned
self.switch_history( id=self.security.encode_id( history3_clone3.id ) )
# Make sure the deleted datasets are NOT included in the cloned history
+ # - again using the bootstrapped json for the hdas
try:
- self.check_history_for_string( 'This dataset has been deleted.', show_deleted=True )
+ self.check_history_for_exact_string( '"deleted": true', show_deleted=True )
+ #self.check_history_for_string( 'This dataset has been deleted.', show_deleted=True )
raise AssertionError, "Deleted datasets incorrectly included in cloned history history3_clone3"
except:
pass
@@ -349,6 +360,7 @@
# Shared history3 should be in regular_user3's list of shared histories
self.view_shared_histories( cstrings_displayed=[ history3.name, admin_user.email ] )
"""
+
def test_045_change_permissions_on_current_history( self ):
"""Testing changing permissions on the current history"""
# Logged in as regular_user3
@@ -402,6 +414,7 @@
current_history_permissions.sort()
if current_history_permissions != history5_default_permissions:
raise AssertionError, "With logout and login, the history default permissions are not preserved"
+
def test_050_sharing_restricted_history_by_making_datasets_public( self ):
"""Testing sharing a restricted history by making the datasets public"""
# Logged in as admin_user
@@ -432,6 +445,7 @@
self.check_history_for_string( 'chr1' )
self.logout()
self.login( email=admin_user.email )
+
def test_055_sharing_restricted_history_by_making_new_sharing_role( self ):
"""Testing sharing a restricted history by associating a new sharing role with protected datasets"""
# At this point, history5 should have 1 item, 1.bed, which is public. We'll add another
@@ -506,6 +520,7 @@
self.display_history_item( str( hda_2_bed.id ), strings_displayed=[ 'chr1' ] )
# Delete the clone so the next test will be valid
self.delete_history( id=self.security.encode_id( history5_clone2.id ) )
+
def test_060_sharing_restricted_history_with_multiple_users_by_changing_no_permissions( self ):
"""Testing sharing a restricted history with multiple users, making no permission changes"""
# Logged in as regular_user2
@@ -515,10 +530,12 @@
# regular_user2 should be able to access history5's 2.bed dataset since it is associated with a
# sharing role, and regular_user3 should be able to access history5's 1.bed, but not 2.bed even
# though they can see it in their shared history.
+
# We first need to unshare history5 from regular_user2 so that we can re-share it.
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user2.id ),
strings_displayed=[ regular_user1.email, regular_user2.email ] )
+
# Make sure the history was unshared correctly
self.logout()
self.login( email=regular_user2.email )
@@ -528,11 +545,14 @@
raise AssertionError, "history5 still shared with regular_user2 after unsharing it with that user."
except:
pass
+
self.logout()
self.login( admin_user.email )
email = '%s,%s' % ( regular_user2.email, regular_user3.email )
- strings_displayed_after_submit = [ 'The following datasets can be shared with %s with no changes' % email,
- 'The following datasets can be shared with %s by updating their permissions' % email ]
+ strings_displayed_after_submit = [
+ 'The following datasets can be shared with %s with no changes' % email,
+ 'The following datasets can be shared with %s by updating their permissions' % email ]
+
# history5 will be shared with regular_user1, regular_user2 and regular_user3
self.share_current_history( email,
strings_displayed_after_submit=strings_displayed_after_submit,
@@ -547,30 +567,35 @@
'activatable',
strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history5_clone3
- history5_clone3 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user2.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history5_clone3 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==regular_user2.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert history5_clone3 is not None, "Problem retrieving history5_clone3 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone3.id ), name=history5_clone3.name )
- # Make sure both datasets are in the history
+ # Make sure both datasets are in the history
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
- hda_1_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
- .first()
+ hda_1_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) )
+ .first() )
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone3.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
+
# Make sure 1.bed is accessible since it is public
self.display_history_item( str( hda_1_bed.id ), strings_displayed=[ 'chr1' ] )
# Make sure 2.bed is accessible since it is associated with a sharing role
@@ -582,34 +607,39 @@
self.login( email=regular_user3.email )
# Shared history5 should be in regular_user2's list of shared histories
self.view_shared_histories( strings_displayed=[ history5.name, admin_user.email ] )
+
# Clone restricted history5
self.clone_history( self.security.encode_id( history5.id ),
'activatable',
strings_displayed_after_submit=[ 'is now included in your previously stored histories.' ] )
global history5_clone4
- history5_clone4 = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==regular_user3.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ history5_clone4 = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==regular_user3.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert history5_clone4 is not None, "Problem retrieving history5_clone4 from database"
+
# Check list of histories to make sure shared history3 was cloned
self.view_stored_active_histories( strings_displayed=[ "Clone of '%s'" % history5.name ] )
# Make sure the dataset is accessible
self.switch_history( id=self.security.encode_id( history5_clone4.id ), name=history5_clone4.name )
- # Make sure both datasets are in the history
+ # Make sure both datasets are in the history
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
- hda_1_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
- .first()
+ hda_1_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) )
+ .first() )
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
- hda_2_bed = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
- galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
- .first()
+ hda_2_bed = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_clone4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) )
+ .first() )
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
# Make sure 1.bed is accessible since it is public
self.display_history_item( str( hda_1_bed.id ), strings_displayed=[ 'chr1' ] )
@@ -619,20 +649,31 @@
raise AssertionError, "History item 2.bed is accessible by user %s when is should not be" % regular_user3.email
except:
pass
- self.check_history_for_string( 'You do not have permission to view this dataset' )
+
+ # check the history page json for hda_2_bed and if it's accessible
+ def hda_2_bed_is_inaccessible( hda_list ):
+ for hda in hda_list:
+ if hda[ 'id' ] == self.security.encode_id( hda_2_bed.id ):
+ return ( not hda[ 'accessible' ] )
+ return False
+ self.check_history_json( r'\bhdas\s*=\s*(.*);', hda_2_bed_is_inaccessible )
+
# Admin users can view all datasets ( using the history/view feature ), so make sure 2.bed is accessible to the admin
self.logout()
self.login( email=admin_user.email )
self.view_history( str( hda_2_bed.history_id ), strings_displayed=[ '<td>NM_005997_cds_0_0_chr1_147962193_r</td>' ] )
self.logout()
self.login( email=regular_user3.email )
+
# Delete the clone so the next test will be valid
self.delete_history( id=self.security.encode_id( history5_clone4.id ) )
+
def test_065_sharing_private_history_by_choosing_to_not_share( self ):
"""Testing sharing a restricted history with multiple users by choosing not to share"""
- # Logged in as regular_user3
+ # Logged in as regular_user3 - login as admin
self.logout()
self.login( email=admin_user.email )
+
# Unshare history5 from regular_user2
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user2.id ),
@@ -641,7 +682,8 @@
self.unshare_history( self.security.encode_id( history5.id ),
self.security.encode_id( regular_user3.id ),
strings_displayed=[ regular_user1.email, regular_user3.email ] )
- # Make sure the history was unshared correctly
+
+ # Make sure the histories were unshared correctly
self.logout()
self.login( email=regular_user2.email )
self.visit_page( "root/history_options" )
@@ -650,6 +692,7 @@
raise AssertionError, "history5 still shared with regular_user2 after unshaing it with that user."
except:
pass
+
self.logout()
self.login( email=regular_user3.email )
self.visit_page( "root/history_options" )
@@ -660,42 +703,73 @@
pass
self.logout()
self.login( email=admin_user.email )
+
def test_070_history_show_and_hide_deleted_datasets( self ):
"""Testing displaying deleted history items"""
+ #NOTE: due to the new client-side rendering of the history, this test isn't very apt
+ # (a) searching for strings in the dom doesn't work (they won't be twill's html) and
+ # (b) all datasets are included in the bootstrapped hda json regardless of the show_deleted setting
+ #CE: for now, I'm changing this to simply check whether the show_deleted flag
+ # is being properly passed to the history control
+ #TODO: this test needs to be moved to client-side testing framework (selenium or other)
+
# Logged in as admin_user
+ # create a new history and upload a new hda (1.bed) into it
self.new_history( name=urllib.quote( 'show hide deleted datasets' ) )
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ latest_history = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert latest_history is not None, "Problem retrieving latest_history from database"
self.upload_file('1.bed', dbkey='hg18')
- latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
- .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ) \
- .first()
+ latest_hda = (
+ sa_session.query( galaxy.model.HistoryDatasetAssociation )
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) )
+ .first() )
+
+ # delete that item and make sure the 'history empty' message shows
self.home()
+ log.info( 'deleting last hda' )
self.delete_history_item( str( latest_hda.id ) )
- self.check_history_for_string( 'Your history is empty' )
+ # check the historyPanel settings.show_deleted for a null json value (no show_deleted in query string)
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == None )
+
+ # reload this history with the show_deleted flag set in the query string
+ # the deleted dataset should be there with the proper 'deleted' text
self.home()
- self.visit_url( "%s/history/?show_deleted=True" % self.url )
- self.check_page_for_string( 'This dataset has been deleted.' )
- self.check_page_for_string( '1.bed' )
+ log.info( 'turning show_deleted on' )
+ #self.visit_url( "%s/history/?show_deleted=True" % self.url )
+ # check the historyPanel settings.show_deleted for a true json value
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == True, show_deleted=True )
+
+ # reload this history again with the show_deleted flag set TO FALSE in the query string
+ # make sure the 'history empty' message shows
self.home()
- self.visit_url( "%s/history/?show_deleted=False" % self.url )
- self.check_page_for_string( 'Your history is empty' )
+ log.info( 'turning show_deleted off' )
+ #self.visit_url( "%s/history/?show_deleted=False" % self.url )
+ # check the historyPanel settings.show_deleted for a false json value
+ self.check_history_json( r'\bpage_show_deleted\s*=\s*(.*),', lambda x: x == False, show_deleted=False )
+
+ # delete this history
self.delete_history( self.security.encode_id( latest_history.id ) )
+
def test_075_deleting_and_undeleting_history_items( self ):
"""Testing deleting and un-deleting history items"""
# logged in as admin_user
+
# Deleting the current history in the last method created a new history
- latest_history = sa_session.query( galaxy.model.History ) \
- .filter( and_( galaxy.model.History.table.c.deleted==False,
- galaxy.model.History.table.c.user_id==admin_user.id ) ) \
- .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .first()
+ latest_history = (
+ sa_session.query( galaxy.model.History )
+ .filter( and_( galaxy.model.History.table.c.deleted==False,
+ galaxy.model.History.table.c.user_id==admin_user.id ) )
+ .order_by( desc( galaxy.model.History.table.c.create_time ) )
+ .first() )
assert latest_history is not None, "Problem retrieving latest_history from database"
- self.rename_history( self.security.encode_id( latest_history.id ), latest_history.name, new_name=urllib.quote( 'delete undelete history items' ) )
+
+ self.rename_history( self.security.encode_id( latest_history.id ),
+ latest_history.name, new_name=urllib.quote( 'delete undelete history items' ) )
# Add a new history item
self.upload_file( '1.bed', dbkey='hg15' )
latest_hda = sa_session.query( galaxy.model.HistoryDatasetAssociation ) \
@@ -722,6 +796,7 @@
self.check_page_for_string( '1.bed' )
self.check_page_for_string( 'hg15' )
self.delete_history( self.security.encode_id( latest_history.id ) )
+
def test_080_copying_history_items_between_histories( self ):
"""Testing copying history items between histories"""
# logged in as admin_user
@@ -776,6 +851,7 @@
self.check_history_for_string( hda1.name )
self.delete_history( self.security.encode_id( history6.id ) )
self.delete_history( self.security.encode_id( history7.id ) )
+
def test_085_reset_data_for_later_test_runs( self ):
"""Reseting data to enable later test runs to to be valid"""
# logged in as admin_user
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle circular repository dependencies to "n" levels of depth.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e19bf2b11763/
changeset: e19bf2b11763
user: greg
date: 2012-12-10 22:27:29
summary: Handle circular repository dependencies to "n" levels of depth.
affected #: 3 files
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,8 +1,6 @@
import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from galaxy.datatypes.checkers import *
-from galaxy.util.json import *
-from galaxy.util.shed_util_common import *
+from shed_util_common import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
from galaxy.tool_shed.encoding_util import *
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -194,28 +194,18 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
-def can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
- """
- Handle circular repository dependencies that could result in an infinite loop by determining if it is safe to add an entry to the
- repository dependencies container.
- """
- # First check for an exact match - if this is true, the changeset revision was not updated.
- repository_dependency_as_key = container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
- repository_dependency[ 1 ],
- repository_dependency[ 2 ],
- repository_dependency[ 3] )
- current_repository_key_as_repository_dependency = current_repository_key.split( container_util.STRSEP )
- if repository_dependency_as_key in all_repository_dependencies:
- val = all_repository_dependencies[ repository_dependency_as_key ]
- if current_repository_key_as_repository_dependency in val:
- return False
- # Now handle the case where an update to the changeset revision was done, so everything will match except the changeset_revision.
- repository_dependency_as_partial_key = container_util.STRSEP.join( [ repository_dependency[ 0 ], repository_dependency[ 1 ], repository_dependency[ 2 ] ] )
- for key in all_repository_dependencies:
- if key.startswith( repository_dependency_as_partial_key ):
- val = all_repository_dependencies[ key ]
- if current_repository_key_as_repository_dependency in val:
- return False
+def can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ """Handle the case where an update to the changeset revision was done."""
+ k = key_rd_dict.keys()[ 0 ]
+ rd = key_rd_dict[ k ]
+ partial_rd = rd[ 0:3 ]
+ for kr_dict in key_rd_dicts:
+ key = kr_dict.keys()[ 0 ]
+ if key == k:
+ val = kr_dict[ key ]
+ for repository_dependency in val:
+ if repository_dependency[ 0:3 ] == partial_rd:
+ return False
return True
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
@@ -555,13 +545,15 @@
metadata = repository_metadata.metadata
if metadata:
# Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
- all_repository_dependencies=None )
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
+ all_repository_dependencies=None,
+ handled_key_rd_dicts=None,
+ circular_repository_dependencies=None )
# Cast unicode to string.
repo_info_dict[ str( repository.name ) ] = ( str( repository.description ),
str( repository_clone_url ),
@@ -1037,6 +1029,11 @@
if ctx_file_name == stripped_filename:
return manifest_ctx, ctx_file
return None, None
+def get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata ):
+ return container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
+ repository_name=repository.name,
+ repository_owner=repository.user.username,
+ changeset_revision=repository_metadata.changeset_revision )
def get_file_context_from_ctx( ctx, filename ):
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
# within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
@@ -1197,123 +1194,68 @@
.filter( and_( trans.model.Repository.table.c.name == name,
trans.model.Repository.table.c.user_id == user.id ) ) \
.first()
-def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url, repository_dependencies=None,
- all_repository_dependencies=None, handled=None ):
+def get_repository_dependencies_for_changeset_revision( trans, repo, repository, repository_metadata, toolshed_base_url,
+ key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
+ handled_key_rd_dicts=None, circular_repository_dependencies=None ):
"""
Return a dictionary of all repositories upon which the contents of the received repository_metadata record depend. The dictionary keys
are name-spaced values consisting of toolshed_base_url/repository_name/repository_owner/changeset_revision and the values are lists of
repository_dependency tuples consisting of ( toolshed_base_url, repository_name, repository_owner, changeset_revision ). This method
ensures that all required repositories to the nth degree are returned.
"""
- if handled is None:
- handled = []
+ if handled_key_rd_dicts is None:
+ handled_key_rd_dicts = []
if all_repository_dependencies is None:
all_repository_dependencies = {}
- if repository_dependencies is None:
- repository_dependencies = []
+ if key_rd_dicts_to_be_processed is None:
+ key_rd_dicts_to_be_processed = []
+ if circular_repository_dependencies is None:
+ circular_repository_dependencies = []
+ # Assume the current repository does not have repository dependencies defined for it.
+ current_repository_key = None
metadata = repository_metadata.metadata
if metadata and 'repository_dependencies' in metadata:
+ current_repository_key = get_key_for_repository_changeset_revision( toolshed_base_url, repository, repository_metadata )
repository_dependencies_dict = metadata[ 'repository_dependencies' ]
- # The repository_dependencies entry in the metadata is a dictionary that may have a value for a 'description' key. We want to
- # store the value of this key only once, the first time through this recursive method.
- current_repository_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=repository_metadata.changeset_revision )
if not all_repository_dependencies:
- # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
- all_repository_dependencies[ 'root_key' ] = current_repository_key
- all_repository_dependencies[ current_repository_key ] = []
- if 'description' not in all_repository_dependencies:
- description = repository_dependencies_dict.get( 'description', None )
- all_repository_dependencies[ 'description' ] = description
- # The next key of interest in repository_dependencies_dict is 'repository_dependencies', which is a list of tuples.
- repository_dependencies_tups = repository_dependencies_dict[ 'repository_dependencies' ]
- if repository_dependencies_tups and current_repository_key:
- # Remove all repository dependencies that point to a revision within its own repository.
- repository_dependencies_tups = remove_ropository_dependency_reference_to_self( repository_dependencies_tups, current_repository_key )
- for repository_dependency in repository_dependencies_tups:
- if repository_dependency not in handled and repository_dependency not in repository_dependencies:
- # The following if statement handles repositories dependencies that are circular in nature.
- if current_repository_key:
- if current_repository_key in all_repository_dependencies:
- # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
- all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
- if repository_dependency not in all_repository_dependencies_val:
- all_repository_dependencies_val.append( repository_dependency )
- all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
- elif can_add_entry_to_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
- # We don't have a circular dependency that could result in an infinite loop.
- all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
- repository_dependencies.append( repository_dependency )
- else:
- # The current repository does not have repository dependencies defined for it.
- current_repository_key = None
- # The following if statement handles repositories dependencies that are circular in nature.
- if current_repository_key and current_repository_key in all_repository_dependencies:
- repository_dependencies_tups = [ rd for rd in all_repository_dependencies[ current_repository_key ] ]
- if repository_dependencies_tups:
- repository_dependency = repository_dependencies_tups.pop( 0 )
- if repository_dependency not in handled:
- handled.append( repository_dependency )
- if repository_dependency in repository_dependencies:
- repository_dependencies.remove( repository_dependency )
- toolshed, name, owner, changeset_revision = repository_dependency
- if tool_shed_is_this_tool_shed( toolshed ):
- required_repository = get_repository_by_name_and_owner( trans, name, owner )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- changeset_revision )
- if required_repository_metadata:
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- else:
- # The repository changeset_revision is no longer installable, so see if there's been an update.
- required_repo_dir = required_repository.repo_path( trans.app )
- required_repo = hg.repository( get_configured_ui(), required_repo_dir )
- required_changeset_revision = get_next_downloadable_changeset_revision( required_repository, required_repo, changeset_revision )
- required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
- trans.security.encode_id( required_repository.id ),
- required_changeset_revision )
- if required_repository_metadata:
- # The changeset_revision defined in a repository_dependencies.xml file is outdated, so we need to fix appropriate
- # entries in our all_repository_dependencies dictionary.
- updated_repository_dependency = [ toolshed, name, owner, required_changeset_revision ]
- for k, v in all_repository_dependencies.items():
- if k in [ 'root_key', 'description' ]:
- continue
- for i, current_repository_dependency in enumerate( v ):
- cts, cn, co, ccr = current_repository_dependency
- if toolshed == cts and name == cn and owner == co and changeset_revision == ccr:
- if updated_repository_dependency in v:
- # We've already stored the updated repository_dependency, so remove the outdated one.
- v = v.remove( repository_dependency )
- all_repository_dependencies[ k ] = v
- else:
- # Store the updated repository_dependency.
- v[ i ] = updated_repository_dependency
- all_repository_dependencies[ k ] = v
- if required_repository_metadata:
- # The required_repository_metadata changeset_revision is installable.
- required_metadata = required_repository_metadata.metadata
- if required_metadata:
- for repository_dependency in repository_dependencies_tups:
- if repository_dependency not in repository_dependencies:
- repository_dependencies.append( repository_dependency )
- return get_repository_dependencies_for_changeset_revision( trans=trans,
- repo=required_repo,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=toolshed,
- repository_dependencies=repository_dependencies,
- all_repository_dependencies=all_repository_dependencies,
- handled=handled )
- else:
- # The repository is in a different tool shed, so build an url and send a request.
- error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
- error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
- log.debug( error_message )
+ all_repository_dependencies = initialize_all_repository_dependencies( current_repository_key,
+ repository_dependencies_dict,
+ all_repository_dependencies )
+ # Handle the repository dependencies defined in the current repository, if any, and populate the various repository dependency objects for
+ # this round of processing.
+ current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies = \
+ populate_repository_dependency_objects_for_processing( trans,
+ current_repository_key,
+ repository_dependencies_dict,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies,
+ all_repository_dependencies )
+ if current_repository_key:
+ if current_repository_key_rd_dicts:
+ # There should be only a single current_repository_key_rd_dict in this list.
+ current_repository_key_rd_dict = current_repository_key_rd_dicts[ 0 ]
+ # Handle circular repository dependencies.
+ if not in_circular_repository_dependencies( current_repository_key_rd_dict, circular_repository_dependencies ):
+ if current_repository_key in all_repository_dependencies:
+ handle_current_repository_dependency( trans,
+ current_repository_key,
+ key_rd_dicts_to_be_processed,
+ all_repository_dependencies,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
+ elif key_rd_dicts_to_be_processed:
+ handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies )
return all_repository_dependencies
+def get_repository_dependency_as_key( repository_dependency ):
+ return container_util.generate_repository_dependencies_key_for_repository( repository_dependency[ 0 ],
+ repository_dependency[ 1 ],
+ repository_dependency[ 2 ],
+ repository_dependency[ 3] )
def get_repository_file_contents( file_path ):
if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
@@ -1415,11 +1357,77 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_updated_changeset_revisions_for_repository_dependencies( trans, key_rd_dicts ):
+ updated_key_rd_dicts = []
+ for key_rd_dict in key_rd_dicts:
+ key = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ key ]
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # The repository changeset_revision is installable, so no updates are available.
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ updated_key_rd_dicts.append( key_rd_dict )
+ else:
+ # The repository changeset_revision is no longer installable, so see if there's been an update.
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ changeset_revision = get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = [ toolshed, name, owner, repository_metadata.changeset_revision ]
+ # We have the updated changset revision.
+ updated_key_rd_dicts.append( new_key_rd_dict )
+ return updated_key_rd_dicts
def get_user_by_username( trans, username ):
"""Get a user from the database by username"""
return trans.sa_session.query( trans.model.User ) \
.filter( trans.model.User.table.c.username == username ) \
.one()
+def handle_circular_repository_dependency( repository_key, repository_dependency, circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies ):
+ all_repository_dependencies_root_key = all_repository_dependencies[ 'root_key' ]
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ update_circular_repository_dependencies( repository_key,
+ repository_dependency,
+ all_repository_dependencies[ repository_dependency_as_key ],
+ circular_repository_dependencies )
+ if all_repository_dependencies_root_key != repository_dependency_as_key:
+ all_repository_dependencies[ repository_key ] = [ repository_dependency ]
+ return circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies
+def handle_current_repository_dependency( trans, current_repository_key, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts,
+ circular_repository_dependencies ):
+ current_repository_key_rd_dicts = []
+ for rd in all_repository_dependencies[ current_repository_key ]:
+ rd_copy = [ str( item ) for item in rd ]
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd_copy
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts:
+ toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ handle_key_rd_dicts_for_repository( trans,
+ current_repository_key,
+ current_repository_key_rd_dicts,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
+ all_repository_dependencies=all_repository_dependencies,
+ handled_key_rd_dicts=handled_key_rd_dicts,
+ circular_repository_dependencies=circular_repository_dependencies )
def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ):
"""
This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an
@@ -1438,6 +1446,56 @@
else:
deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
return updated_tool_dependency_names, deleted_tool_dependency_names
+def handle_key_rd_dicts_for_repository( trans, current_repository_key, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, circular_repository_dependencies ):
+ key_rd_dict = repository_key_rd_dicts.pop( 0 )
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ toolshed, name, owner, changeset_revision = repository_dependency
+ if tool_shed_is_this_tool_shed( toolshed ):
+ required_repository = get_repository_by_name_and_owner( trans, name, owner )
+ required_repository_metadata = get_repository_metadata_by_repository_id_changset_revision( trans,
+ trans.security.encode_id( required_repository.id ),
+ changeset_revision )
+ if required_repository_metadata:
+ required_repo_dir = required_repository.repo_path( trans.app )
+ required_repo = hg.repository( get_configured_ui(), required_repo_dir )
+ # The required_repository_metadata changeset_revision is installable.
+ required_metadata = required_repository_metadata.metadata
+ if required_metadata:
+ for current_repository_key_rd_dict in repository_key_rd_dicts:
+ if not in_key_rd_dicts( current_repository_key_rd_dict, key_rd_dicts_to_be_processed ):
+ key_rd_dicts_to_be_processed.append( current_repository_key_rd_dict )
+ # Mark the current repository_dependency as handled_key_rd_dicts.
+ if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ):
+ handled_key_rd_dicts.append( key_rd_dict )
+ # Remove the current repository from the list of repository_dependencies to be processed.
+ if in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ key_rd_dicts_to_be_processed = remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed )
+ else:
+ # The repository is in a different tool shed, so build an url and send a request.
+ error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring repository dependency definition "
+ error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
+ return toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
+def handle_next_repository_dependency( trans, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts, circular_repository_dependencies ):
+ next_repository_key_rd_dict = key_rd_dicts_to_be_processed.pop( 0 )
+ next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
+ next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
+ toolshed, required_repo, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
+ handle_key_rd_dicts_for_repository( trans,
+ next_repository_key,
+ next_repository_key_rd_dicts,
+ key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts,
+ circular_repository_dependencies )
+ return get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=required_repo,
+ repository=required_repository,
+ repository_metadata=required_repository_metadata,
+ toolshed_base_url=toolshed,
+ key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
+ all_repository_dependencies=all_repository_dependencies,
+ handled_key_rd_dicts=handled_key_rd_dicts,
+ circular_repository_dependencies=circular_repository_dependencies )
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
@@ -1489,8 +1547,59 @@
message = str( e )
error = True
return error, message
+def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ):
+ """Return True if { repository_key :repository_dependency } is in all_repository_dependencies."""
+ for key, val in all_repository_dependencies.items():
+ if key != repository_key:
+ continue
+ if repository_dependency in val:
+ return True
+ return False
+def in_circular_repository_dependencies( repository_key_rd_dict, circular_repository_dependencies ):
+ """
+ Return True if any combination of a circular dependency tuple is the key : value pair defined in the received repository_key_rd_dict. This
+ means that each circular dependency tuple is converted into the key : value pair for vomparision.
+ """
+ for tup in circular_repository_dependencies:
+ rd_0, rd_1 = tup
+ rd_0_as_key = get_repository_dependency_as_key( rd_0 )
+ rd_1_as_key = get_repository_dependency_as_key( rd_1 )
+ if rd_0_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_0_as_key ] == rd_1:
+ return True
+ if rd_1_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_1_as_key ] == rd_0:
+ return True
+ return False
+def in_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ for key_rd_dict in key_rd_dicts:
+ for key, val in key_rd_dict.items():
+ if key == k and val == v:
+ return True
+ return False
+def is_circular_repository_dependency( repository_key, repository_dependency, all_repository_dependencies ):
+ """
+ Return True if the received repository_dependency is a key in all_repository_dependencies whose list of repository dependencies
+ includes the received repository_key.
+ """
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ for key, val in all_repository_dependencies.items():
+ if key != repository_dependency_as_key:
+ continue
+ if repository_key_as_repository_dependency in val:
+ return True
+ return False
def is_downloadable( metadata_dict ):
return 'datatypes' in metadata_dict or 'repository_dependencies' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+def initialize_all_repository_dependencies( current_repository_key, repository_dependencies_dict, all_repository_dependencies ):
+ # Initialize the all_repository_dependencies dictionary. It's safe to assume that current_repository_key in this case will have a value.
+ all_repository_dependencies[ 'root_key' ] = current_repository_key
+ all_repository_dependencies[ current_repository_key ] = []
+ # Store the value of the 'description' key only once, the first time through this recursive method.
+ description = repository_dependencies_dict.get( 'description', None )
+ all_repository_dependencies[ 'description' ] = description
+ return all_repository_dependencies
def load_tool_from_config( app, full_path ):
try:
tool = app.toolbox.load_tool( full_path )
@@ -1553,24 +1662,78 @@
"key": full_path }
folder_contents.append( node )
return folder_contents
+def populate_repository_dependency_objects_for_processing( trans, current_repository_key, repository_dependencies_dict, key_rd_dicts_to_be_processed,
+ handled_key_rd_dicts, circular_repository_dependencies, all_repository_dependencies ):
+ current_repository_key_rd_dicts = []
+ for rd in repository_dependencies_dict[ 'repository_dependencies' ]:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts and current_repository_key:
+ # Remove all repository dependencies that point to a revision within its own repository.
+ current_repository_key_rd_dicts = remove_ropository_dependency_reference_to_self( current_repository_key_rd_dicts )
+ current_repository_key_rd_dicts = get_updated_changeset_revisions_for_repository_dependencies( trans, current_repository_key_rd_dicts )
+ for key_rd_dict in current_repository_key_rd_dicts:
+ is_circular = False
+ if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ if current_repository_key in all_repository_dependencies:
+ # Add all repository dependencies for the current repository into it's entry in all_repository_dependencies.
+ all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
+ if repository_dependency not in all_repository_dependencies_val:
+ all_repository_dependencies_val.append( repository_dependency )
+ all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+ elif not in_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
+ # Handle circular repository dependencies.
+ if is_circular_repository_dependency( current_repository_key, repository_dependency, all_repository_dependencies ):
+ is_circular = True
+ circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies = \
+ handle_circular_repository_dependency( current_repository_key,
+ repository_dependency,
+ circular_repository_dependencies,
+ handled_key_rd_dicts,
+ all_repository_dependencies )
+ else:
+ all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
+ if not is_circular and can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = repository_dependency
+ key_rd_dicts_to_be_processed.append( new_key_rd_dict )
+ return current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies
def remove_dir( dir ):
if os.path.exists( dir ):
try:
shutil.rmtree( dir )
except:
pass
-def remove_ropository_dependency_reference_to_self( repository_dependencies, repository_key ):
+def remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts ):
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ clean_key_rd_dicts = []
+ for krd_dict in key_rd_dicts:
+ key = krd_dict.keys()[ 0 ]
+ val = krd_dict[ key ]
+ if key == k and val == v:
+ continue
+ clean_key_rd_dicts.append( krd_dict )
+ return clean_key_rd_dicts
+def remove_ropository_dependency_reference_to_self( key_rd_dicts ):
"""Remove all repository dependencies that point to a revision within its own repository."""
- clean_repository_dependencies = []
- repository_tup = repository_key.split( container_util.STRSEP )
+ clean_key_rd_dicts = []
+ key = key_rd_dicts[ 0 ].keys()[ 0 ]
+ repository_tup = key.split( container_util.STRSEP )
rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_tup
- for repository_dependency in repository_dependencies:
+ for key_rd_dict in key_rd_dicts:
+ k = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ k ]
toolshed, name, owner, changeset_revision = repository_dependency
if rd_toolshed == toolshed and rd_name == name and rd_owner == owner:
log.debug( "Removing repository dependency for repository %s owned by %s since it refers to a revision within itself." % ( name, owner ) )
else:
- clean_repository_dependencies.append( repository_dependency )
- return clean_repository_dependencies
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ clean_key_rd_dicts.append( new_key_rd_dict )
+ return clean_key_rd_dicts
def remove_tool_dependency_installation_directory( dependency_install_dir ):
if os.path.exists( dependency_install_dir ):
try:
@@ -1841,6 +2004,19 @@
else:
translated_string = ''
return translated_string
+def update_circular_repository_dependencies( repository_key, repository_dependency, repository_dependencies, circular_repository_dependencies ):
+ repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ if repository_key_as_repository_dependency in repository_dependencies:
+ found = False
+ for tup in circular_repository_dependencies:
+ if repository_dependency in tup and repository_key_as_repository_dependency in tup:
+ # The circular dependency has already been included.
+ found = True
+ if not found:
+ new_circular_tup = [ repository_dependency, repository_key_as_repository_dependency ]
+ circular_repository_dependencies.append( new_circular_tup )
+ return circular_repository_dependencies
def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
"""
Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
@@ -1889,8 +2065,7 @@
sa_session.flush()
new_tool_dependency = tool_dependency
else:
- # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record
- # from the database.
+ # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record from the database.
log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
sa_session.delete( tool_dependency )
diff -r c460f284077f0b400901e4cd791d94a311425751 -r e19bf2b117638221414239698f840730a2cd0569 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1782,14 +1782,14 @@
is_malicious = repository_metadata.malicious
if repository_metadata:
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
if is_malicious:
if trans.app.security_agent.can_push( trans.app, trans.user, repository ):
message += malicious_error_can_push
@@ -1895,14 +1895,14 @@
repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
@@ -2417,14 +2417,14 @@
repository_metadata_id = trans.security.encode_id( repository_metadata.id )
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = get_repository_dependencies_for_changeset_revision( trans,
- repo,
- repository,
- repository_metadata,
- str( url_for( '/', qualified=True ) ).rstrip( '/' ),
- repository_dependencies=None,
+ repository_dependencies = get_repository_dependencies_for_changeset_revision( trans=trans,
+ repo=repo,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
- handled=None )
+ handled_key_rd_dicts=None )
else:
repository_metadata_id = None
metadata = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for value_to_display_text in DatasetToolparameter when encountering a non-set optional multiple dataset; inspired by a patch from Kyle Ellrott.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c460f284077f/
changeset: c460f284077f
user: dan
date: 2012-12-10 22:14:05
summary: Fix for value_to_display_text in DatasetToolparameter when encountering a non-set optional multiple dataset; inspired by a patch from Kyle Ellrott.
affected #: 1 file
diff -r fc84a8b469c3181be64083a91d0fc2faae5e73ef -r c460f284077f0b400901e4cd791d94a311425751 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -1605,7 +1605,7 @@
return value.file_name
def value_to_display_text( self, value, app ):
- if not isinstance(value, list):
+ if value and not isinstance( value, list ):
value = [ value ]
if value:
return ", ".join( [ "%s: %s" % ( item.hid, item.name ) for item in value ] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: use tile region in place of tile index for simplicity.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fc84a8b469c3/
changeset: fc84a8b469c3
user: jgoecks
date: 2012-12-10 22:08:44
summary: Trackster: use tile region in place of tile index for simplicity.
affected #: 1 file
diff -r c3acc86490780d1949c4a3abf1263ace7c5ece6c -r fc84a8b469c3181be64083a91d0fc2faae5e73ef static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2762,23 +2762,27 @@
}
*/
},
+
/**
* Generate a key for the tile cache.
* TODO: create a TileCache object (like DataCache) and generate key internally.
*/
- _gen_tile_cache_key: function(w_scale, tile_index) {
- return w_scale + '_' + tile_index;
+ _gen_tile_cache_key: function(w_scale, tile_region) {
+ return w_scale + '_' + tile_region;
},
+
/**
* Request that track be drawn.
*/
request_draw: function(force, clear_after) {
this.view.request_redraw(false, force, clear_after, this);
},
+
/**
* Actions to be taken before drawing.
*/
before_draw: function() {},
+
/**
* Draw track. It is possible to force a redraw rather than use cached tiles and/or clear old
* tiles after drawing new tiles.
@@ -2826,7 +2830,8 @@
is_tile = function(o) { return (o && 'track' in o); };
// Draw tiles.
while ( ( tile_index * TILE_SIZE * resolution ) < high ) {
- var draw_result = this.draw_helper( force, tile_index, resolution, this.tiles_div, w_scale );
+ var tile_region = this._get_tile_bounds(tile_index, resolution),
+ draw_result = this.draw_helper( force, tile_region, resolution, this.tiles_div, w_scale );
if ( is_tile(draw_result) ) {
drawn_tiles.push( draw_result );
} else {
@@ -2871,10 +2876,9 @@
* Retrieves from cache, draws, or sets up drawing for a single tile. Returns either a Tile object or a
* jQuery.Deferred object that is fulfilled when tile can be drawn again.
*/
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
var track = this,
- key = this._gen_tile_cache_key(w_scale, tile_index),
- region = this._get_tile_bounds(tile_index, resolution);
+ key = this._gen_tile_cache_key(w_scale, region);
// Init kwargs if necessary to avoid having to check if kwargs defined.
if (!kwargs) { kwargs = {}; }
@@ -3022,7 +3026,7 @@
/**
* Returns a genome region that corresponds to a tile at a particular resolution
- */
+ */
_get_tile_bounds: function(tile_index, resolution) {
var tile_low = Math.floor( tile_index * TILE_SIZE * resolution ),
tile_length = Math.ceil( TILE_SIZE * resolution ),
@@ -3223,11 +3227,10 @@
this.action_icons.param_space_viz_icon.hide();
},
can_draw: Drawable.prototype.can_draw,
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
// FIXME: this function is similar to TiledTrack.draw_helper -- can the two be merged/refactored?
var track = this,
- key = this._gen_tile_cache_key(w_scale, tile_index),
- region = this._get_tile_bounds(tile_index, resolution);
+ key = this._gen_tile_cache_key(w_scale, region);
// Init kwargs if necessary to avoid having to check if kwargs defined.
if (!kwargs) { kwargs = {}; }
@@ -3446,9 +3449,9 @@
/**
* Only retrieves data and draws tile if reference data can be displayed.
*/
- draw_helper: function(force, tile_index, resolution, parent_element, w_scale, kwargs) {
+ draw_helper: function(force, region, resolution, parent_element, w_scale, kwargs) {
if (w_scale > this.view.canvas_manager.char_width_px) {
- return TiledTrack.prototype.draw_helper.call(this, force, tile_index, resolution, parent_element, w_scale, kwargs);
+ return TiledTrack.prototype.draw_helper.call(this, force, region, resolution, parent_element, w_scale, kwargs);
}
else {
this.hide_contents();
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster: use underscore methods to simplify tile search/iteration and remove old debugging statement.
by Bitbucket 10 Dec '12
by Bitbucket 10 Dec '12
10 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c3acc8649078/
changeset: c3acc8649078
user: jgoecks
date: 2012-12-10 21:47:05
summary: Trackster: use underscore methods to simplify tile search/iteration and remove old debugging statement.
affected #: 1 file
diff -r 3ee0e5ee1b375c0fd580c26a93850f2fa44f93f0 -r c3acc86490780d1949c4a3abf1263ace7c5ece6c static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -2854,21 +2854,17 @@
//
// If some tiles have icons, set padding of tiles without icons so features and rows align.
//
- var icons_present = false;
- for (var tile_index = 0; tile_index < tiles.length; tile_index++) {
- if (tiles[tile_index].has_icons) {
- icons_present = true;
- break;
- }
- }
+ var icons_present = _.find(tiles, function(tile) {
+ return tile.has_icons;
+ });
+
if (icons_present) {
- for (var tile_index = 0; tile_index < tiles.length; tile_index++) {
- tile = tiles[tile_index];
+ _.each(tiles, function(tile) {
if (!tile.has_icons) {
// Need to align with other tile(s) that have icons.
tile.html_elt.css("padding-top", ERROR_PADDING);
}
- }
+ });
}
},
/**
@@ -4045,7 +4041,6 @@
var painter = new (this.painter)(filtered, tile_low, tile_high, this.prefs, mode, filter_alpha_scaler, filter_height_scaler, ref_seq);
var feature_mapper = null;
- // console.log(( tile_low - this.view.low ) * w_scale, tile_index, w_scale);
ctx.fillStyle = this.prefs.block_color;
ctx.font = ctx.canvas.manager.default_font;
ctx.textAlign = "right";
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0