commit/galaxy-central: inithello: Functional tests for n levels of circular repository dependencies.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/376a3714dc15/ changeset: 376a3714dc15 user: inithello date: 2012-12-11 20:29:28 summary: Functional tests for n levels of circular repository dependencies. affected #: 7 files diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/common.py --- a/test/tool_shed/base/common.py +++ b/test/tool_shed/base/common.py @@ -10,6 +10,8 @@ new_repository_dependencies_xml = '''<?xml version="1.0"?><repositories${description}> - <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" /> +${dependency_lines} </repositories> ''' + +new_repository_dependencies_line = ''' <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />''' \ No newline at end of file diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/base/twilltestcase.py --- a/test/tool_shed/base/twilltestcase.py +++ b/test/tool_shed/base/twilltestcase.py @@ -22,12 +22,6 @@ self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None ) self.tool_shed_test_file = None self.shed_tools_dict = {} - self.keepOutdir = os.environ.get( 'TOOL_SHED_TEST_SAVE', '' ) - if self.keepOutdir > '': - try: - os.makedirs( self.keepOutdir ) - except: - pass self.home() def browse_repository( self, repository, strings_displayed=[], strings_not_displayed=[] ): url = '/repository/browse_repository?id=%s' % self.security.encode_id( repository.id ) @@ -50,9 +44,11 @@ url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id ) self.visit_url( url ) self.check_for_strings( strings_displayed, strings_not_displayed ) - def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision ): + def check_repository_dependency( self, repository, depends_on_repository, depends_on_changeset_revision, changeset_revision=None ): + if changeset_revision is None: + changeset_revision = self.get_repository_tip( repository ) strings_displayed = [ depends_on_repository.name, depends_on_repository.user.username, depends_on_changeset_revision ] - self.display_manage_repository_page( repository, strings_displayed=strings_displayed ) + self.display_manage_repository_page( repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed ) def check_repository_metadata( self, repository, tip_only=True ): if tip_only: assert self.tip_has_metadata( repository ) and len( self.get_repository_metadata_revisions( repository ) ) == 1, \ @@ -216,21 +212,24 @@ else: string = string.replace( character, replacement ) return string - def generate_repository_dependency_xml( self, repository, xml_filename, dependency_description='' ): + def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ): file_path = os.path.split( xml_filename )[0] if not os.path.exists( file_path ): os.makedirs( file_path ) - changeset_revision = self.get_repository_tip( repository ) + dependency_entries = [] + for repository in repositories: + changeset_revision = self.get_repository_tip( repository ) + template = string.Template( common.new_repository_dependencies_line ) + dependency_entries.append( template.safe_substitute( toolshed_url=self.url, + owner=repository.user.username, + repository_name=repository.name, + changeset_revision=changeset_revision ) ) if dependency_description: description = ' description="%s"' % dependency_description else: description = dependency_description template_parser = string.Template( common.new_repository_dependencies_xml ) - repository_dependency_xml = template_parser.safe_substitute( toolshed_url=self.url, - owner=repository.user.username, - repository_name=repository.name, - changeset_revision=changeset_revision, - description=description ) + repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) ) # Save the generated xml to the specified location. file( xml_filename, 'w' ).write( repository_dependency_xml ) def generate_temp_path( self, test_script_path, additional_paths=[] ): diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0020_basic_repository_dependencies.py --- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py +++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py @@ -55,7 +55,7 @@ repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] ) - self.generate_repository_dependency_xml( datatypes_repository, + self.generate_repository_dependency_xml( [ datatypes_repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) ) self.upload_file( repository, 'repository_dependencies.xml', diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0030_repository_dependency_revisions.py --- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py +++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py @@ -73,7 +73,7 @@ '''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.''' datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] ) - self.generate_repository_dependency_xml( datatypes_repository, + self.generate_repository_dependency_xml( [ datatypes_repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) ) emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name ) self.upload_file( emboss_5_repository, @@ -93,7 +93,7 @@ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] ) - self.generate_repository_dependency_xml( emboss_5_repository, + self.generate_repository_dependency_xml( [ emboss_5_repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), dependency_description='Emboss requires the Emboss 5 repository.' ) self.upload_file( emboss_repository, @@ -105,7 +105,7 @@ emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] ) - self.generate_repository_dependency_xml( emboss_6_repository, + self.generate_repository_dependency_xml( [ emboss_6_repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), dependency_description='Emboss requires the Emboss 6 repository.' ) self.upload_file( emboss_repository, diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0040_repository_circular_dependencies.py --- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py +++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py @@ -63,7 +63,7 @@ repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name ) filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] ) - self.generate_repository_dependency_xml( repository, + self.generate_repository_dependency_xml( [ repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), dependency_description='Filtering 1.1.0 depends on the freebayes repository.' ) self.upload_file( filtering_repository, @@ -79,7 +79,7 @@ repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name ) freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name ) repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] ) - self.generate_repository_dependency_xml( repository, + self.generate_repository_dependency_xml( [ repository ], self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), dependency_description='Freebayes depends on the filtering repository.' ) self.upload_file( freebayes_repository, @@ -95,8 +95,5 @@ # Freebayes revision 0 -> filtering revision 1. # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes. # In this case, the displayed dependency will specify the tip revision, but this will not always be the case. - filtering_strings_displayed = [ freebayes_repository_name, common.test_user_1_name, self.get_repository_tip( freebayes_repository ) ] - freebayes_strings_displayed = [ filtering_repository_name, common.test_user_1_name, self.get_repository_tip( filtering_repository ) ] - self.display_manage_repository_page( filtering_repository, strings_displayed=filtering_strings_displayed ) - self.display_manage_repository_page( freebayes_repository, strings_displayed=freebayes_strings_displayed ) - + self.check_repository_dependency( filtering_repository, freebayes_repository, self.get_repository_tip( freebayes_repository ) ) + self.check_repository_dependency( freebayes_repository, filtering_repository, self.get_repository_tip( filtering_repository ) ) diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional/test_0050_circular_n_levels.py --- /dev/null +++ b/test/tool_shed/functional/test_0050_circular_n_levels.py @@ -0,0 +1,129 @@ +from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os +from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role + +emboss_datatypes_repository_name = 'emboss_datatypes_0050' +emboss_datatypes_repository_description = "Datatypes for emboss" +emboss_datatypes_repository_long_description = "Long description of Emboss' datatypes" + +emboss_repository_name = 'emboss_0050' +emboss_repository_description = "Galaxy's emboss tool" +emboss_repository_long_description = "Long description of Galaxy's emboss tool" + +freebayes_repository_name = 'freebayes_0050' +freebayes_repository_description = "Galaxy's freebayes tool" +freebayes_repository_long_description = "Long description of Galaxy's freebayes tool" + +filtering_repository_name = 'filtering_0050' +filtering_repository_description = "Galaxy's filtering tool" +filtering_repository_long_description = "Long description of Galaxy's filtering tool" + +default_category = 'test_0050_repository_n_level_circular_dependencies' +default_category_description = 'Testing handling of circular repository dependencies to n levels.' + +class TestRepositoryCircularDependenciesToNLevels( ShedTwillTestCase ): + '''Verify that the code correctly handles circular dependencies down to n levels.''' + def test_0000_initiate_users( self ): + """Create necessary user accounts.""" + self.logout() + self.login( email=common.test_user_1_email, username=common.test_user_1_name ) + test_user_1 = get_user( common.test_user_1_email ) + assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email + test_user_1_private_role = get_private_role( test_user_1 ) + self.logout() + self.login( email=common.admin_email, username=common.admin_username ) + admin_user = get_user( common.admin_email ) + assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email + admin_user_private_role = get_private_role( admin_user ) + def test_0005_create_category( self ): + """Create a category for this test suite""" + self.create_category( default_category, default_category_description ) + def test_0010_create_emboss_datatypes_repository( self ): + '''Create and populate emboss_datatypes_0050.''' + self.logout() + self.login( email=common.test_user_1_email, username=common.test_user_1_name ) + self.create_repository( emboss_datatypes_repository_name, + emboss_datatypes_repository_description, + repository_long_description=emboss_datatypes_repository_long_description, + categories=[ default_category ], + strings_displayed=[] ) + repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name ) + self.upload_file( repository, + 'emboss/datatypes/datatypes_conf.xml', + strings_displayed=[], + commit_message='Uploaded datatypes_conf.xml.' ) + def test_0015_create_emboss_repository( self ): + '''Create and populate emboss_0050.''' + self.create_repository( emboss_repository_name, + emboss_repository_description, + repository_long_description=emboss_repository_long_description, + categories=[ default_category ], + strings_displayed=[] ) + repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) + self.upload_file( repository, + 'emboss/emboss.tar', + strings_displayed=[], + commit_message='Uploaded tool tarball.' ) + datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name ) + repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'emboss' ] ) + self.generate_repository_dependency_xml( [ datatypes_repository ], + self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), + dependency_description='Emboss depends on the emboss_datatypes repository.' ) + self.upload_file( repository, + 'repository_dependencies.xml', + filepath=repository_dependencies_path, + commit_message='Uploaded dependency on emboss_datatypes.' ) + def test_0020_create_filtering_repository( self ): + '''Create and populate filtering_0050.''' + self.create_repository( filtering_repository_name, + filtering_repository_description, + repository_long_description=filtering_repository_long_description, + categories=[ default_category ], + strings_displayed=[] ) + repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name ) + self.upload_file( repository, + 'filtering/filtering_1.1.0.tar', + strings_displayed=[], + commit_message='Uploaded filtering.tar.' ) + emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) + repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'filtering' ] ) + self.generate_repository_dependency_xml( [ emboss_repository ], + self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), + dependency_description='Filtering depends on the emboss repository.' ) + self.upload_file( repository, + 'repository_dependencies.xml', + filepath=repository_dependencies_path, + commit_message='Uploaded dependency on emboss.' ) + def test_0025_create_freebayes_repository( self ): + '''Create and populate freebayes_0050.''' + self.create_repository( freebayes_repository_name, + freebayes_repository_description, + repository_long_description=freebayes_repository_long_description, + categories=[ default_category ], + strings_displayed=[] ) + repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name ) + self.upload_file( repository, + 'freebayes/freebayes.tar', + strings_displayed=[], + commit_message='Uploaded freebayes.tar.' ) + emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name ) + emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) + filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name ) + repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] ) + previous_tip = self.get_repository_tip( repository ) + self.generate_repository_dependency_xml( [ emboss_datatypes_repository, emboss_repository, filtering_repository, repository ], + self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ), + dependency_description='Freebayes depends on the filtering repository.' ) + self.upload_file( repository, + 'repository_dependencies.xml', + filepath=repository_dependencies_path, + commit_message='Uploaded dependency on filtering.' ) + self.display_manage_repository_page( repository, strings_not_displayed=[ previous_tip ] ) + def test_0030_verify_repository_dependencies( self ): + '''Verify that the generated dependency circle does not cause an infinite loop.''' + emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name ) + emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name ) + filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name ) + freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name ) + for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]: + self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) ) + self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] ) diff -r a7ea7728829dcc5395606bd402805d2ac972d6c6 -r 376a3714dc1510d3a1405575fc2667b68a28a695 test/tool_shed/functional_tests.py --- a/test/tool_shed/functional_tests.py +++ b/test/tool_shed/functional_tests.py @@ -67,9 +67,7 @@ # ---- Configuration ------------------------------------------------------ tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host ) tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None ) - tool_shed_test_save = os.environ.get( 'TOOL_SHED_TEST_SAVE', None ) tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' ) - start_server = 'TOOL_SHED_TEST_EXTERNAL' not in os.environ if 'HTTP_ACCEPT_LANGUAGE' not in os.environ: os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales tool_shed_test_file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', default_tool_shed_test_file_dir ) @@ -84,57 +82,27 @@ tool_dependency_dir = os.environ.get( 'TOOL_SHED_TOOL_DEPENDENCY_DIR', None ) use_distributed_object_store = os.environ.get( 'TOOL_SHED_USE_DISTRIBUTED_OBJECT_STORE', False ) - if start_server: - if not os.path.isdir( tool_shed_test_tmp_dir ): - os.mkdir( tool_shed_test_tmp_dir ) - psu_production = False - tool_shed_test_proxy_port = None - if 'TOOL_SHED_TEST_PSU_PRODUCTION' in os.environ: - if not tool_shed_test_port: - raise Exception( 'Set TOOL_SHED_TEST_PORT to the port to which the proxy server will proxy' ) - tool_shed_test_proxy_port = os.environ.get( 'TOOL_SHED_TEST_PROXY_PORT', None ) - if not tool_shed_test_proxy_port: - raise Exception( 'Set TOOL_SHED_TEST_PROXY_PORT to the port on which the proxy server is listening' ) - base_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_FILE_PATH', None ) - if not base_file_path: - raise Exception( 'Set TOOL_SHED_TEST_BASE_FILE_PATH to the directory which will contain the dataset files directory' ) - base_new_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_NEW_FILE_PATH', None ) - if not base_new_file_path: - raise Exception( 'Set TOOL_SHED_TEST_BASE_NEW_FILE_PATH to the directory which will contain the temporary directory' ) - database_connection = os.environ.get( 'TOOL_SHED_TEST_DBURI', None ) - if not database_connection: - raise Exception( 'Set TOOL_SHED_TEST_DBURI to the URI of the database to be used for tests' ) - nginx_upload_store = os.environ.get( 'TOOL_SHED_TEST_NGINX_UPLOAD_STORE', None ) - if not nginx_upload_store: - raise Exception( 'Set TOOL_SHED_TEST_NGINX_UPLOAD_STORE to the path where the nginx upload module places uploaded files' ) - file_path = tempfile.mkdtemp( dir=base_file_path ) - new_repos_path = tempfile.mkdtemp( dir=base_new_file_path ) - hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - kwargs = dict( database_engine_option_pool_size = '10', - database_engine_option_max_overflow = '20', - database_engine_option_strategy = 'threadlocal', - static_enabled = 'False', - debug = 'False' ) - psu_production = True - else: - if 'TOOL_SHED_TEST_DBPATH' in os.environ: - db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ] - else: - tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - db_path = os.path.join( tempdir, 'database' ) - file_path = os.path.join( db_path, 'files' ) - hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - if 'TOOL_SHED_TEST_DBURI' in os.environ: - database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ] - else: - database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' ) - kwargs = {} - for dir in [ tool_shed_test_tmp_dir ]: - try: - os.makedirs( dir ) - except OSError: - pass + if not os.path.isdir( tool_shed_test_tmp_dir ): + os.mkdir( tool_shed_test_tmp_dir ) + tool_shed_test_proxy_port = None + if 'TOOL_SHED_TEST_DBPATH' in os.environ: + db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ] + else: + tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + db_path = os.path.join( tempdir, 'database' ) + file_path = os.path.join( db_path, 'files' ) + hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + if 'TOOL_SHED_TEST_DBURI' in os.environ: + database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ] + else: + database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' ) + kwargs = {} + for dir in [ tool_shed_test_tmp_dir ]: + try: + os.makedirs( dir ) + except OSError: + pass print "Database connection:", database_connection @@ -145,89 +113,78 @@ # ---- Build Application -------------------------------------------------- app = None - if start_server: - global_conf = { '__file__' : 'community_wsgi.ini.sample' } - if psu_production: - global_conf = None - if not database_connection.startswith( 'sqlite://' ): - kwargs[ 'database_engine_option_max_overflow' ] = '20' - if tool_dependency_dir is not None: - kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir - if use_distributed_object_store: - kwargs[ 'object_store' ] = 'distributed' - kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample' + global_conf = { '__file__' : 'community_wsgi.ini.sample' } + if not database_connection.startswith( 'sqlite://' ): + kwargs[ 'database_engine_option_max_overflow' ] = '20' + if tool_dependency_dir is not None: + kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir + if use_distributed_object_store: + kwargs[ 'object_store' ] = 'distributed' + kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample' - app = UniverseApplication( job_queue_workers = 5, - id_secret = 'changethisinproductiontoo', - template_path = 'templates', - database_connection = database_connection, - database_engine_option_pool_size = '10', - file_path = file_path, - new_file_path = new_repos_path, - tool_path=tool_path, - datatype_converters_config_file = 'datatype_converters_conf.xml.sample', - tool_parse_help = False, - tool_data_table_config_path = tool_data_table_config_path, - shed_tool_data_table_config = shed_tool_data_table_config, - log_destination = "stdout", - use_heartbeat = False, - allow_user_creation = True, - allow_user_deletion = True, - admin_users = 'test@bx.psu.edu', - global_conf = global_conf, - running_functional_tests = True, - hgweb_config_dir = hgweb_config_dir, - **kwargs ) + app = UniverseApplication( job_queue_workers = 5, + id_secret = 'changethisinproductiontoo', + template_path = 'templates', + database_connection = database_connection, + database_engine_option_pool_size = '10', + file_path = file_path, + new_file_path = new_repos_path, + tool_path=tool_path, + datatype_converters_config_file = 'datatype_converters_conf.xml.sample', + tool_parse_help = False, + tool_data_table_config_path = tool_data_table_config_path, + shed_tool_data_table_config = shed_tool_data_table_config, + log_destination = "stdout", + use_heartbeat = False, + allow_user_creation = True, + allow_user_deletion = True, + admin_users = 'test@bx.psu.edu', + global_conf = global_conf, + running_functional_tests = True, + hgweb_config_dir = hgweb_config_dir, + **kwargs ) - log.info( "Embedded Universe application started" ) + log.info( "Embedded Universe application started" ) # ---- Run webserver ------------------------------------------------------ server = None - if start_server: - webapp = buildapp.app_factory( dict( database_file=database_connection ), - use_translogger=False, - static_enabled=False, - app=app ) - if tool_shed_test_port is not None: - server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False ) + webapp = buildapp.app_factory( dict( database_file=database_connection ), + use_translogger=False, + static_enabled=False, + app=app ) + if tool_shed_test_port is not None: + server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False ) + else: + random.seed() + for i in range( 0, 9 ): + try: + tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) ) + log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port ) + server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False ) + break + except socket.error, e: + if e[0] == 98: + continue + raise else: - random.seed() - for i in range( 0, 9 ): - try: - tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) ) - log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port ) - server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False ) - break - except socket.error, e: - if e[0] == 98: - continue - raise - else: - raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) ) - if tool_shed_test_proxy_port: - os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port - else: - os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port - t = threading.Thread( target=server.serve_forever ) - t.start() - # Test if the server is up - for i in range( 10 ): - # Directly test the app, not the proxy. - conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port ) - conn.request( "GET", "/" ) - if conn.getresponse().status == 200: - break - time.sleep( 0.1 ) - else: - raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" ) - # Test if the proxy server is up. - if psu_production: - # Directly test the app, not the proxy. - conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_proxy_port ) - conn.request( "GET", "/" ) - if not conn.getresponse().status == 200: - raise Exception( "Test HTTP proxy server did not return '200 OK'" ) - log.info( "Embedded web server started" ) + raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) ) + if tool_shed_test_proxy_port: + os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port + else: + os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port + t = threading.Thread( target=server.serve_forever ) + t.start() + # Test if the server is up + for i in range( 10 ): + # Directly test the app, not the proxy. + conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port ) + conn.request( "GET", "/" ) + if conn.getresponse().status == 200: + break + time.sleep( 0.1 ) + else: + raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" ) + log.info( "Embedded web server started" ) # We don't add the tests to the path until everything is up and running new_path = [ os.path.join( cwd, 'test' ) ] new_path.extend( sys.path[1:] ) @@ -239,9 +196,6 @@ log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) ) success = False try: - # What requires these? Handy for (eg) functional tests to save outputs? - if tool_shed_test_save: - os.environ[ 'TOOL_SHED_TEST_SAVE' ] = tool_shed_test_save # Pass in through script set env, will leave a copy of ALL test validate files. os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host if tool_shed_test_file_dir: Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket