galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
commit/galaxy-central: jgoecks: Decouple set metadata tool from a web transaction and require only an app.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a749bcb13792/
changeset: a749bcb13792
user: jgoecks
date: 2012-12-14 16:31:19
summary: Decouple set metadata tool from a web transaction and require only an app.
affected #: 1 file
diff -r 2d7a64c143d1136fdfa6ed2203c22527b04b5ff1 -r a749bcb137928ede156899ca5ef4d2eef6e9f45c lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -8,29 +8,50 @@
class SetMetadataToolAction( ToolAction ):
"""Tool action used for setting external metadata on an existing dataset"""
+
+ def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, job_params=None ):
+ """
+ Execute using a web transaction.
+ """
+ user_id = None
+ if trans.user:
+ user_id = trans.user.id
+ job, odict = self.execute_via_app( tool, trans.app, trans.get_galaxy_session().id,
+ trans.history.id, user_id, incoming, set_output_hid,
+ overwrite, history, job_params )
+ # FIXME: can remove this when logging in execute_via_app method.
+ trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return job, odict
- def execute( self, tool, trans, incoming = {}, set_output_hid = False, overwrite = True, history=None, job_params=None ):
+ def execute_via_app( self, tool, app, session_id, history_id, user_id = None,
+ incoming = {}, set_output_hid = False, overwrite = True,
+ history=None, job_params=None ):
+ """
+ Execute using application.
+ """
for name, value in incoming.iteritems():
- if isinstance( value, trans.app.model.HistoryDatasetAssociation ):
+ if isinstance( value, app.model.HistoryDatasetAssociation ):
dataset = value
dataset_name = name
type = 'hda'
break
- elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ):
+ elif isinstance( value, app.model.LibraryDatasetDatasetAssociation ):
dataset = value
dataset_name = name
type = 'ldda'
break
else:
raise Exception( 'The dataset to set metadata on could not be determined.' )
+
+ sa_session = app.model.context
# Create the job object
- job = trans.app.model.Job()
- job.session_id = trans.get_galaxy_session().id
- job.history_id = trans.history.id
+ job = app.model.Job()
+ job.session_id = session_id
+ job.history_id = history_id
job.tool_id = tool.id
- if trans.user:
- job.user_id = trans.user.id
+ if user_id:
+ job.user_id = user_id
if job_params:
job.params = to_json_string( job_params )
start_job_state = job.state #should be job.states.NEW
@@ -40,26 +61,26 @@
except:
job.tool_version = "1.0.1"
job.state = job.states.WAITING #we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
- trans.sa_session.add( job )
- trans.sa_session.flush() #ensure job.id is available
+ sa_session.add( job )
+ sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table
# Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state
external_metadata_wrapper = JobExternalOutputMetadataWrapper( job )
cmd_line = external_metadata_wrapper.setup_external_metadata( dataset,
- trans.sa_session,
+ sa_session,
exec_dir = None,
- tmp_dir = trans.app.config.new_file_path,
- dataset_files_path = trans.app.model.Dataset.file_path,
+ tmp_dir = app.config.new_file_path,
+ dataset_files_path = app.model.Dataset.file_path,
output_fnames = None,
- config_root = trans.app.config.root,
- config_file = trans.app.config.config_file,
- datatypes_config = trans.app.datatypes_registry.integrated_datatypes_configs,
+ config_root = app.config.root,
+ config_file = app.config.config_file,
+ datatypes_config = app.datatypes_registry.integrated_datatypes_configs,
job_metadata = None,
kwds = { 'overwrite' : overwrite } )
incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
- for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ for name, value in tool.params_to_strings( incoming, app ).iteritems():
job.add_parameter( name, value )
#add the dataset to job_to_input_dataset table
if type == 'hda':
@@ -70,11 +91,12 @@
# i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
dataset._state = dataset.states.SETTING_METADATA
job.state = start_job_state #job inputs have been configured, restore initial job state
- trans.sa_session.flush()
+ sa_session.flush()
# Queue the job for execution
- trans.app.job_queue.put( job.id, tool )
- trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ app.job_queue.put( job.id, tool )
+ # FIXME: need to add event logging to app and log events there rather than trans.
+ #trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
#clear e.g. converted files
dataset.datatype.before_setting_metadata( dataset )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Ensure than installed tool dependency information is renderable.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2d7a64c143d1/
changeset: 2d7a64c143d1
user: greg
date: 2012-12-14 16:28:29
summary: Ensure than installed tool dependency information is renderable.
affected #: 2 files
diff -r 4516a0bae905696584040bf9237f1a628400ad37 -r 2d7a64c143d1136fdfa6ed2203c22527b04b5ff1 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -705,10 +705,6 @@
original_repository_metadata = None
readme_file_names = get_readme_file_names( repository.name )
metadata_dict = { 'shed_config_filename' : shed_config_dict.get( 'config_filename' ) }
- # If we're regenerating metadata for a repository that contains tools, make sure we keep the tool panel section information.
- # Fixme: do we need this?
- #if original_repository_metadata and 'tool_panel_section' in original_repository_metadata:
- # metadata_dict[ 'tool_panel_section' ] = original_repository_metadata[ 'tool_panel_section' ]
readme_files = []
invalid_file_tups = []
invalid_tool_configs = []
diff -r 4516a0bae905696584040bf9237f1a628400ad37 -r 2d7a64c143d1136fdfa6ed2203c22527b04b5ff1 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -414,6 +414,11 @@
message += "if necessary, and try installing the dependency again."
status = "error"
tool_shed_repository = tool_dependency.tool_shed_repository
+ tool_dependency.name = suc.to_safe_string( tool_dependency.name )
+ tool_dependency.version = suc.to_safe_string( tool_dependency.version )
+ tool_dependency.type = suc.to_safe_string( tool_dependency.type )
+ tool_dependency.status = suc.to_safe_string( tool_dependency.status )
+ tool_dependency.error_message = suc.to_safe_string( tool_dependency.error_message )
return trans.fill_template( '/admin/tool_shed_repository/browse_tool_dependency.mako',
repository=tool_shed_repository,
tool_dependency=tool_dependency,
@@ -620,7 +625,7 @@
tool_dependencies=tool_dependencies )
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
- message += ' %s' % installed_tool_dependency.error_message
+ message += ' %s' % suc.to_safe_string( installed_tool_dependency.error_message )
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
if message:
status = 'error'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests for repositories with workflows.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4516a0bae905/
changeset: 4516a0bae905
user: inithello
date: 2012-12-14 15:38:23
summary: Functional tests for repositories with workflows.
affected #: 4 files
diff -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 -r 4516a0bae905696584040bf9237f1a628400ad37 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -2,6 +2,7 @@
import galaxy.model as galaxy_model
import common, string, os, re, test_db_util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
+from galaxy.tool_shed.encoding_util import tool_shed_encode
from galaxy import eggs
eggs.require('mercurial')
@@ -52,8 +53,12 @@
self.check_repository_changelog( repository )
self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
def check_installed_repository_tool_dependencies( self, installed_repository, dependencies_installed=False ):
+ # Tool dependencies are not being installed in these functional tests. If this is changed, the test method will also need to be updated.
strings_not_displayed = []
- strings_displayed = []
+ if not dependencies_installed:
+ strings_displayed = [ 'Missing tool dependencies' ]
+ else:
+ strings_displayed = [ 'Tool dependencies' ]
for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
@@ -417,6 +422,12 @@
( self.security.encode_id( repository.id ), tool_xml_path, changeset_revision )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def load_workflow_image( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
+ metadata = self.get_repository_metadata( repository )
+ url = '/workflow/generate_workflow_image?repository_metadata_id=%s&workflow_name=%s' % \
+ ( self.security.encode_id( metadata[0].id ), tool_shed_encode( workflow_name ) )
+ self.visit_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
repository = test_db_util.get_repository_by_name_and_owner( name, owner )
if changeset_revision is None:
diff -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 -r 4516a0bae905696584040bf9237f1a628400ad37 test/tool_shed/functional/test_0060_workflows.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0060_workflows.py
@@ -0,0 +1,56 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+repository_name = 'filtering_0060'
+repository_description = "Galaxy's filtering tool"
+repository_long_description = "Long description of Galaxy's filtering tool"
+workflow_repository_name = 'filtering_workflow_0060'
+workflow_filename = 'Workflow_for_0060_filter_workflow_repository.ga'
+workflow_name = 'Workflow for 0060_filter_workflow_repository'
+
+class TestToolShedWorkflowFeatures( ShedTwillTestCase ):
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_categories( self ):
+ """Create categories for this test suite"""
+ self.create_category( 'Test 0060 Workflow Features', 'Test 0060 - Workflow Features' )
+ def test_0010_create_repository( self ):
+ """Create and populate the filtering repository"""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ self.create_repository( repository_name,
+ repository_description,
+ repository_long_description=repository_long_description,
+ categories=[ 'Test 0060 Workflow Features' ],
+ strings_displayed=[] )
+ def test_0015_upload_workflow( self ):
+ '''Upload a workflow with a missing tool, and verify that the tool specified is marked as missing.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ workflow = file( self.get_filename( 'filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga' ), 'r' ).read()
+ workflow = workflow.replace( '__TEST_TOOL_SHED_URL__', self.url.replace( 'http://', '' ) )
+ workflow_filepath = self.generate_temp_path( 'test_0060', additional_paths=[ 'filtering_workflow' ] )
+ os.makedirs( workflow_filepath )
+ file( os.path.join( workflow_filepath, workflow_filename ), 'w+' ).write( workflow )
+ self.upload_file( repository,
+ workflow_filename,
+ filepath=workflow_filepath,
+ commit_message='Uploaded filtering workflow.' )
+ self.load_workflow_image( repository, workflow_name, strings_displayed=[ '#EBBCB2' ] )
+ def test_0020_upload_tool( self ):
+ '''Upload the missing tool for the workflow in the previous step, and verify that the error is no longer present.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'filtering/filtering_2.2.0.tar',
+ commit_message="Uploaded filtering 2.2.0",
+ remove_repo_files_not_in_tar='No' )
+ self.load_workflow_image( repository, workflow_name, strings_not_displayed=[ '#EBBCB2' ] )
diff -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 -r 4516a0bae905696584040bf9237f1a628400ad37 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -224,7 +224,7 @@
log.info( "Embedded web server started" )
# ---- Optionally start up a Galaxy instance ------------------------------------------------------
- if 'TEST_TOOL_SHED_START_GALAXY' in os.environ:
+ if 'TOOL_SHED_TEST_OMIT_GALAXY' not in os.environ:
# Generate the shed_tool_conf.xml and tool_sheds_conf.xml files
tool_sheds_conf_template_parser = string.Template( tool_sheds_conf_xml_template )
tool_sheds_conf_xml = tool_sheds_conf_template_parser.safe_substitute( shed_url=tool_shed_test_host, shed_port=tool_shed_test_port )
@@ -346,16 +346,17 @@
toolshedapp.shutdown()
toolshedapp = None
log.info( "Embedded tool shed application stopped" )
- if galaxy_server:
- log.info( "Shutting down galaxy web server" )
- galaxy_server.server_close()
- galaxy_server = None
- log.info( "Embedded galaxy server stopped" )
- if galaxyapp:
- log.info( "Shutting down galaxy app" )
- galaxyapp.shutdown()
- galaxyapp = None
- log.info( "Embedded galaxy application stopped" )
+ if 'TOOL_SHED_TEST_OMIT_GALAXY' not in os.environ:
+ if galaxy_server:
+ log.info( "Shutting down galaxy web server" )
+ galaxy_server.server_close()
+ galaxy_server = None
+ log.info( "Embedded galaxy server stopped" )
+ if galaxyapp:
+ log.info( "Shutting down galaxy app" )
+ galaxyapp.shutdown()
+ galaxyapp = None
+ log.info( "Embedded galaxy application stopped" )
if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
try:
for dir in [ tool_shed_test_tmp_dir ]:
diff -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 -r 4516a0bae905696584040bf9237f1a628400ad37 test/tool_shed/test_data/filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga
--- /dev/null
+++ b/test/tool_shed/test_data/filtering_workflow/Workflow_for_0060_filter_workflow_repository.ga
@@ -0,0 +1,60 @@
+{
+ "a_galaxy_workflow": "true",
+ "annotation": "",
+ "format-version": "0.1",
+ "name": "Workflow for 0060_filter_workflow_repository",
+ "steps": {
+ "0": {
+ "annotation": "",
+ "id": 0,
+ "input_connections": {},
+ "inputs": [
+ {
+ "description": "",
+ "name": "Input Dataset"
+ }
+ ],
+ "name": "Input dataset",
+ "outputs": [],
+ "position": {
+ "left": 10,
+ "top": 10
+ },
+ "tool_errors": null,
+ "tool_id": null,
+ "tool_state": "{\"name\": \"Input Dataset\"}",
+ "tool_version": null,
+ "type": "data_input",
+ "user_outputs": []
+ },
+ "1": {
+ "annotation": "",
+ "id": 1,
+ "input_connections": {
+ "input": {
+ "id": 0,
+ "output_name": "output"
+ }
+ },
+ "inputs": [],
+ "name": "Filter",
+ "outputs": [
+ {
+ "name": "out_file1",
+ "type": "input"
+ }
+ ],
+ "position": {
+ "left": 230,
+ "top": 10
+ },
+ "post_job_actions": {},
+ "tool_errors": null,
+ "tool_id": "__TEST_TOOL_SHED_URL__/repos/user1/filtering_0060/Filter1/2.2.0",
+ "tool_state": "{\"__page__\": 0, \"cond\": \"\\\"c1=='chr22'\\\"\", \"chromInfo\": \"\\\"/Users/dave/Documents/workspace/dev-galaxy/tool-data/shared/ucsc/chrom/?.len\\\"\", \"input\": \"null\"}",
+ "tool_version": null,
+ "type": "tool",
+ "user_outputs": []
+ }
+ }
+}
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Include custom genomes in dbkey field when uploading library datasets.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0ec7f0b6dec8/
changeset: 0ec7f0b6dec8
user: jgoecks
date: 2012-12-14 14:47:08
summary: Include custom genomes in dbkey field when uploading library datasets.
affected #: 1 file
diff -r 1d73c973c9adfa27e1759c280e94c712145e1e03 -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -944,11 +944,7 @@
# Send list of data formats to the upload form so the "extension" select list can be populated dynamically
file_formats = trans.app.datatypes_registry.upload_file_formats
- # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically
- def get_dbkey_options( last_used_build ):
- for dbkey, build_name in util.dbnames:
- yield build_name, dbkey, ( dbkey==last_used_build )
- dbkeys = get_dbkey_options( last_used_build )
+ dbkeys = trans.app.genomes.get_dbkeys_with_chrom_info( trans )
# Send the current history to the form to enable importing datasets from history to library
history = trans.get_history()
if history is not None:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: When getting updates to an installed tool shed repository that contains tools, automatically load any updated tools into the Galaxy tool panel.
by Bitbucket 13 Dec '12
by Bitbucket 13 Dec '12
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1d73c973c9ad/
changeset: 1d73c973c9ad
user: greg
date: 2012-12-13 23:13:40
summary: When getting updates to an installed tool shed repository that contains tools, automatically load any updated tools into the Galaxy tool panel.
affected #: 3 files
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -397,7 +397,7 @@
# If there is not yet a tool_shed_repository record, we're in the process of installing
# a new repository, so any included tools can be loaded into the tool panel.
can_load_into_panel_dict = True
- tool = self.load_tool( os.path.join( tool_path, path ), guid=guid )
+ tool = self.load_tool( os.path.join( tool_path, path ), guid=guid )
key = 'tool_%s' % str( tool.id )
if can_load_into_panel_dict:
if guid is not None:
@@ -435,9 +435,12 @@
tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
self.sa_session.add( tta )
self.sa_session.flush()
- if tool.id not in self.tools_by_id:
- # Allow for the same tool to be loaded into multiple places in the tool panel.
- self.tools_by_id[ tool.id ] = tool
+ #if tool.id not in self.tools_by_id:
+ # Allow for the same tool to be loaded into multiple places in the tool panel. We have to handle the case where the tool is contained
+ # in a repository installed from the tool shed, and the Galaxy administrator has retrieved updates to the installed repository. In this
+ # case, the tool may have been updated, but the version was not changed, so the tool should always be reloaded here. We used to only load
+ # the tool if it's it was not found in self.tools_by_id, but performing that check did not enable this scenario.
+ self.tools_by_id[ tool.id ] = tool
if load_panel_dict:
self.__add_tool_to_tool_panel( tool.id, panel_dict, section=isinstance( panel_dict, galaxy.tools.ToolSection ) )
# Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1036,7 +1036,7 @@
return metadata_dict
def generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository ):
"""
- Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
+ Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed_tool_conf in
which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
"""
@@ -2232,8 +2232,10 @@
sa_session.flush()
return new_tool_dependency
def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
+ """
+ A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ of config_elems instead of using the in-memory list.
+ """
shed_conf_dict = repository.get_shed_config_dict( app )
shed_tool_conf = shed_conf_dict[ 'config_filename' ]
tool_path = shed_conf_dict[ 'tool_path' ]
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -327,7 +327,7 @@
shed_util.add_to_tool_panel( trans.app,
repository.name,
repository_clone_url,
- repository.changeset_revision,
+ repository.installed_changeset_revision,
repository_tools_tups,
repository.owner,
shed_tool_conf,
@@ -810,7 +810,7 @@
shed_util.add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
repository_tools_tups=repository_tools_tups,
owner=tool_shed_repository.owner,
shed_tool_conf=shed_tool_conf,
@@ -1777,7 +1777,20 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
- # Fixme: call shed_util.add_to_tool_panel here?
+ if 'tools' in metadata_dict:
+ tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
+ if tool_panel_dict is None:
+ tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
+ shed_util.add_to_tool_panel( app=trans.app,
+ repository_name=repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=repository.installed_changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=False )
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Eliminate unnecessary trans object when only the app is needed downstream. More tool shed util refactoring.
by Bitbucket 13 Dec '12
by Bitbucket 13 Dec '12
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/97ec62934b6b/
changeset: 97ec62934b6b
user: greg
date: 2012-12-13 22:19:54
summary: Eliminate unnecessary trans object when only the app is needed downstream. More tool shed util refactoring.
affected #: 5 files
diff -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 -r 97ec62934b6b17e674706f2c41ce4b34032fa338 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -39,7 +39,7 @@
root = tree.getroot()
self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = common_util.REPOSITORY_OWNER
- index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
@@ -202,7 +202,7 @@
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
- repository_tools_tups = shed_util.get_repository_tools_tups( self.app, metadata_dict )
+ repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
diff -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 -r 97ec62934b6b17e674706f2c41ce4b34032fa338 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -34,12 +34,12 @@
for elem_entry in elem_list:
config_elems.append( elem_entry )
# Persist the altered shed_tool_config file.
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+ suc.config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def add_to_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner, shed_tool_conf, tool_panel_dict,
new_install=True ):
"""A tool shed repository is being installed or updated so handle tool panel alterations accordingly."""
# We need to change the in-memory version and the file system version of the shed_tool_conf file.
- index, shed_tool_conf_dict = get_shed_tool_conf_dict( app, shed_tool_conf )
+ index, shed_tool_conf_dict = suc.get_shed_tool_conf_dict( app, shed_tool_conf )
tool_path = shed_tool_conf_dict[ 'tool_path' ]
# Generate the list of ElementTree Element objects for each section or tool.
elem_list = generate_tool_panel_elem_list( repository_name,
@@ -155,17 +155,6 @@
except:
pass
return converter_path, display_path
-def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
- # Persist the current in-memory list of config_elems to a file named by the value of config_filename.
- fd, filename = tempfile.mkstemp()
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
- for elem in config_elems:
- os.write( fd, '%s' % util.xml_to_string( elem, pretty=True ) )
- os.write( fd, '</toolbox>\n' )
- os.close( fd )
- shutil.move( filename, os.path.abspath( config_filename ) )
- os.chmod( config_filename, 0644 )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
@@ -289,26 +278,6 @@
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
return tool_dependency_objects
-def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
- if tool_section is not None:
- tool_elem = SubElement( tool_section, 'tool' )
- else:
- tool_elem = Element( 'tool' )
- tool_elem.attrib[ 'file' ] = tool_file_path
- tool_elem.attrib[ 'guid' ] = tool.guid
- tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
- tool_shed_elem.text = tool_shed
- repository_name_elem = SubElement( tool_elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- id_elem = SubElement( tool_elem, 'id' )
- id_elem.text = tool.id
- version_elem = SubElement( tool_elem, 'version' )
- version_elem.text = tool.version
- return tool_elem
def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ):
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
@@ -340,9 +309,9 @@
if tup_guid == guid:
break
if inside_section:
- tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section )
+ tool_elem = suc.generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section )
else:
- tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, None )
+ tool_elem = suc.generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, None )
if inside_section:
if section_in_elem_list:
elem_list[ index ] = tool_section
@@ -497,34 +466,6 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
-def get_repository_tools_tups( app, metadata_dict ):
- repository_tools_tups = []
- index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
- if 'tools' in metadata_dict:
- for tool_dict in metadata_dict[ 'tools' ]:
- load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
- if shed_conf_dict.get( 'tool_path' ):
- load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
- guid = tool_dict.get( 'guid', None )
- if relative_path and guid:
- tool = app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
- else:
- tool = None
- if tool:
- repository_tools_tups.append( ( relative_path, guid, tool ) )
- return repository_tools_tups
-def get_shed_tool_conf_dict( app, shed_tool_conf ):
- """
- Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
- in the shed_tool_conf_dict associated with the file.
- """
- for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
- if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
- return index, shed_tool_conf_dict
- else:
- file_name = suc.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
- if shed_tool_conf == file_name:
- return index, shed_tool_conf_dict
def get_tool_index_sample_files( sample_files ):
"""Try to return the list of all appropriate tool data sample files included in the repository."""
tool_index_sample_files = []
@@ -839,12 +780,12 @@
for config_elem in config_elems_to_remove:
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
- config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
+ suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
# in the same way when the repository is activated or reinstalled.
- tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
trans.sa_session.add( repository )
trans.sa_session.flush()
@@ -854,7 +795,7 @@
for guid_to_remove in guids_to_remove:
if guid_to_remove in trans.app.toolbox.tools_by_id:
del trans.app.toolbox.tools_by_id[ guid_to_remove ]
- index, shed_tool_conf_dict = get_shed_tool_conf_dict( trans.app, shed_tool_conf )
+ index, shed_tool_conf_dict = suc.get_shed_tool_conf_dict( trans.app, shed_tool_conf )
if uninstall:
# Remove from the shed_tool_conf file on disk.
remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove )
diff -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 -r 97ec62934b6b17e674706f2c41ce4b34032fa338 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -479,6 +479,17 @@
else:
message = ''
return message
+def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
+ # Persist the current in-memory list of config_elems to a file named by the value of config_filename.
+ fd, filename = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<toolbox tool_path="%s">\n' % str( tool_path ) )
+ for elem in config_elems:
+ os.write( fd, '%s' % util.xml_to_string( elem, pretty=True ) )
+ os.write( fd, '</toolbox>\n' )
+ os.close( fd )
+ shutil.move( filename, os.path.abspath( config_filename ) )
+ os.chmod( config_filename, 0644 )
def copy_disk_sample_files_to_dir( trans, repo_files_dir, dest_path ):
"""Copy all files currently on disk that end with the .sample extension to the directory to which dest_path refers."""
sample_files = []
@@ -570,9 +581,9 @@
repository_dependencies,
metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
-def generate_clone_url_for_installed_repository( trans, repository ):
+def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ tool_shed_url = get_url_from_repository_tool_shed( app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
@@ -949,6 +960,26 @@
handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict )
metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
return metadata_dict
+def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
+ if tool_section is not None:
+ tool_elem = SubElement( tool_section, 'tool' )
+ else:
+ tool_elem = Element( 'tool' )
+ tool_elem.attrib[ 'file' ] = tool_file_path
+ tool_elem.attrib[ 'guid' ] = tool.guid
+ tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ id_elem = SubElement( tool_elem, 'id' )
+ id_elem.text = tool.id
+ version_elem = SubElement( tool_elem, 'version' )
+ version_elem.text = tool.version
+ return tool_elem
def generate_tool_guid( repository_clone_url, tool ):
"""
Generate a guid for the installed tool. It is critical that this guid matches the guid for
@@ -1003,14 +1034,14 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
-def generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ):
+def generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository ):
"""
Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
"""
tool_panel_dict = {}
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( app, repository )
metadata = repository.metadata
# Create a dictionary of tool guid and tool config file name for each tool in the repository.
guids_and_configs = {}
@@ -1386,6 +1417,22 @@
.filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
.first()
+def get_repository_tools_tups( app, metadata_dict ):
+ repository_tools_tups = []
+ index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
+ if 'tools' in metadata_dict:
+ for tool_dict in metadata_dict[ 'tools' ]:
+ load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
+ if shed_conf_dict.get( 'tool_path' ):
+ load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
+ guid = tool_dict.get( 'guid', None )
+ if relative_path and guid:
+ tool = app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
+ else:
+ tool = None
+ if tool:
+ repository_tools_tups.append( ( relative_path, guid, tool ) )
+ return repository_tools_tups
def get_relative_path_to_repository_file( root, name, relative_install_dir, work_dir, shed_config_dict, resetting_all_metadata_on_repository ):
if resetting_all_metadata_on_repository:
full_path_to_file = os.path.join( root, name )
@@ -1428,6 +1475,18 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_shed_tool_conf_dict( app, shed_tool_conf ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+ in the shed_tool_conf_dict associated with the file.
+ """
+ for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
+ if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
+ return index, shed_tool_conf_dict
+ else:
+ file_name = suc.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+ if shed_tool_conf == file_name:
+ return index, shed_tool_conf_dict
def get_tool_panel_config_tool_path_install_dir( app, repository ):
# Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
# repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
@@ -1852,7 +1911,7 @@
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = get_installed_tool_shed_repository( trans, id )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
@@ -2178,15 +2237,9 @@
shed_conf_dict = repository.get_shed_config_dict( app )
shed_tool_conf = shed_conf_dict[ 'config_filename' ]
tool_path = shed_conf_dict[ 'tool_path' ]
-
- # TODO Fix this - we should be able to pass only app - we should not need trans...
- #hack for 'trans.app' used in lots of places. These places should just directly use app
- trans = util.bunch.Bunch()
- trans.app = app
-
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( app, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 -r 97ec62934b6b17e674706f2c41ce4b34032fa338 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -2,6 +2,7 @@
from time import gmtime, strftime
from datetime import date, datetime
from galaxy import util
+from galaxy.util.odict import odict
from galaxy.web.base.controller import *
from galaxy.web.form_builder import CheckboxField
from galaxy.webapps.community import model
diff -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 -r 97ec62934b6b17e674706f2c41ce4b34032fa338 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -310,13 +310,13 @@
"""Activate a repository that was deactivated but not uninstalled."""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools:
metadata = repository.metadata
try:
- repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata )
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata )
except Exception, e:
error = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error )
@@ -795,7 +795,7 @@
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
- repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata_dict )
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
repository_tools_tups = shed_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
@@ -1295,7 +1295,7 @@
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
@@ -1466,7 +1466,7 @@
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
@@ -1572,7 +1572,7 @@
"""Reset all metadata on a single installed tool shed repository."""
repository = suc.get_installed_tool_shed_repository( trans, id )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Tool shed util refactoring and import fixes and cleanup.
by Bitbucket 13 Dec '12
by Bitbucket 13 Dec '12
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/90b28e58bb11/
changeset: 90b28e58bb11
user: greg
date: 2012-12-13 22:00:34
summary: Tool shed util refactoring and import fixes and cleanup.
affected #: 5 files
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -3,6 +3,7 @@
"""
import os
import galaxy.util.shed_util
+import galaxy.util.shed_util_common
from galaxy.model.orm import and_
from galaxy import eggs
@@ -27,7 +28,7 @@
ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
if tool_path:
- tool_shed = galaxy.util.shed_util.clean_tool_shed_url( tool_shed_repository.tool_shed )
+ tool_shed = galaxy.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
relative_path = os.path.join( tool_path,
tool_shed,
'repos',
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -37,7 +37,7 @@
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
- self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
+ self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
@@ -198,6 +198,7 @@
tool_dependencies = None
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
+ sample_files = [ str( s ) for s in sample_files ]
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -166,11 +166,6 @@
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
os.chmod( config_filename, 0644 )
-def clean_tool_shed_url( tool_shed_url ):
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- return tool_shed_url.split( ':' )[ 0 ]
- return tool_shed_url.rstrip( '/' )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
@@ -379,53 +374,6 @@
else:
tool_panel_dict[ guid ] = [ tool_section_dict ]
return tool_panel_dict
-def generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ):
- """
- Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
- which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
- only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
- """
- tool_panel_dict = {}
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- metadata = repository.metadata
- # Create a dictionary of tool guid and tool config file name for each tool in the repository.
- guids_and_configs = {}
- for tool_dict in metadata[ 'tools' ]:
- guid = tool_dict[ 'guid' ]
- tool_config = tool_dict[ 'tool_config' ]
- file_name = suc.strip_path( tool_config )
- guids_and_configs[ guid ] = file_name
- # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- guid = elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel outside of any tool sections.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- elif elem.tag == 'section':
- section_id = elem.get( 'id' ) or ''
- section_name = elem.get( 'name' ) or ''
- section_version = elem.get( 'version' ) or ''
- for section_elem in elem:
- if section_elem.tag == 'tool':
- guid = section_elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel inside the current tool section.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
- id=section_id,
- name=section_name,
- version=section_version )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- return tool_panel_dict
def generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=None ):
"""
Create a dictionary of the following type for a single tool config file name. The intent is to call this method for every tool config
@@ -583,7 +531,7 @@
for s in sample_files:
# The problem with this is that Galaxy does not follow a standard naming convention for file names.
if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
- tool_index_sample_files.append( s )
+ tool_index_sample_files.append( str( s ) )
return tool_index_sample_files
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
@@ -598,23 +546,6 @@
if as_string:
return ','.join( tool_dependency_ids )
return tool_dependency_ids
-def get_tool_panel_config_tool_path_install_dir( app, repository ):
- # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
- # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
- tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
- # Get the relative tool installation paths from each of the shed tool configs.
- relative_install_dir = None
- shed_config_dict = repository.get_shed_config_dict( app )
- if not shed_config_dict:
- #just pick a semi-random shed config
- for shed_config_dict in app.toolbox.shed_tool_confs:
- if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
- break
- shed_tool_conf = shed_config_dict[ 'config_filename' ]
- tool_path = shed_config_dict[ 'tool_path' ]
- relative_install_dir = partial_install_dir
- return shed_tool_conf, tool_path, relative_install_dir
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -913,7 +844,7 @@
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
# in the same way when the repository is activated or reinstalled.
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
trans.sa_session.add( repository )
trans.sa_session.flush()
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -328,6 +328,11 @@
if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
+def clean_tool_shed_url( tool_shed_url ):
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ return tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
try:
@@ -567,8 +572,8 @@
return repo_info_dict
def generate_clone_url_for_installed_repository( trans, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -689,6 +694,10 @@
original_repository_metadata = None
readme_file_names = get_readme_file_names( repository.name )
metadata_dict = { 'shed_config_filename' : shed_config_dict.get( 'config_filename' ) }
+ # If we're regenerating metadata for a repository that contains tools, make sure we keep the tool panel section information.
+ # Fixme: do we need this?
+ #if original_repository_metadata and 'tool_panel_section' in original_repository_metadata:
+ # metadata_dict[ 'tool_panel_section' ] = original_repository_metadata[ 'tool_panel_section' ]
readme_files = []
invalid_file_tups = []
invalid_tool_configs = []
@@ -994,6 +1003,53 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
+def generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ):
+ """
+ Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
+ which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
+ only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
+ """
+ tool_panel_dict = {}
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ metadata = repository.metadata
+ # Create a dictionary of tool guid and tool config file name for each tool in the repository.
+ guids_and_configs = {}
+ for tool_dict in metadata[ 'tools' ]:
+ guid = tool_dict[ 'guid' ]
+ tool_config = tool_dict[ 'tool_config' ]
+ file_name = strip_path( tool_config )
+ guids_and_configs[ guid ] = file_name
+ # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ guid = elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel outside of any tool sections.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ elif elem.tag == 'section':
+ section_id = elem.get( 'id' ) or ''
+ section_name = elem.get( 'name' ) or ''
+ section_version = elem.get( 'version' ) or ''
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ guid = section_elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel inside the current tool section.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+ id=section_id,
+ name=section_name,
+ version=section_version )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ return tool_panel_dict
def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ):
"""Update the received metadata_dict with changes that have been applied to the received exported_workflow_dict."""
if 'workflows' in metadata_dict:
@@ -1372,6 +1428,23 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_tool_panel_config_tool_path_install_dir( app, repository ):
+ # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
+ # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
+ tool_shed = clean_tool_shed_url( repository.tool_shed )
+ partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
+ # Get the relative tool installation paths from each of the shed tool configs.
+ relative_install_dir = None
+ shed_config_dict = repository.get_shed_config_dict( app )
+ if not shed_config_dict:
+ #just pick a semi-random shed config
+ for shed_config_dict in app.toolbox.shed_tool_confs:
+ if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ relative_install_dir = partial_install_dir
+ return shed_tool_conf, tool_path, relative_install_dir
def get_tool_shed_from_clone_url( repository_clone_url ):
tmp_url = clean_repository_clone_url( repository_clone_url )
return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -2019,7 +2092,7 @@
str( markupsafe.escape( ''.join( translated ) ) )
return ''.join( translated )
def tool_shed_from_repository_clone_url( repository_clone_url ):
- return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def tool_shed_is_this_tool_shed( toolshed_base_url ):
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
@@ -2106,13 +2179,14 @@
shed_tool_conf = shed_conf_dict[ 'config_filename' ]
tool_path = shed_conf_dict[ 'tool_path' ]
+ # TODO Fix this - we should be able to pass only app - we should not need trans...
#hack for 'trans.app' used in lots of places. These places should just directly use app
trans = util.bunch.Bunch()
trans.app = app
tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = suc.clean_repository_clone_url( suc.generate_clone_url_for_installed_repository( trans, repository ) )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -309,7 +309,7 @@
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
@@ -460,7 +460,7 @@
remove_from_disk = params.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
if tool_path:
relative_install_dir = os.path.join( tool_path, relative_install_dir )
@@ -553,7 +553,7 @@
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
- tool_shed_url = shed_util.clean_tool_shed_url( tool_shed_url )
+ tool_shed_url = suc.clean_tool_shed_url( tool_shed_url )
return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
@@ -794,7 +794,7 @@
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
- sample_files_copied = [ s for s in tool_index_sample_files ]
+ sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
@@ -866,7 +866,7 @@
( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
@@ -1294,7 +1294,7 @@
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
@@ -1660,7 +1660,7 @@
message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
message ++ "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
@@ -1749,7 +1749,7 @@
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
else:
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
if tool_path:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
@@ -1759,7 +1759,7 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
- tool_shed = shed_util.clean_tool_shed_url( tool_shed_url )
+ tool_shed = suc.clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1777,12 +1777,13 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ # Fixme: call shed_util.add_to_tool_panel here?
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Cleaned up imports in tool_shed/twilltestcase.py.
by Bitbucket 13 Dec '12
by Bitbucket 13 Dec '12
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7fb4047a3d6b/
changeset: 7fb4047a3d6b
user: inithello
date: 2012-12-13 21:19:29
summary: Cleaned up imports in tool_shed/twilltestcase.py.
affected #: 1 file
diff -r b00822d6733f9a4fa93dac20a67d63e56fcc51be -r 7fb4047a3d6be5847f38acba440e4113eb21e12c test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,9 +1,7 @@
import galaxy.webapps.community.util.hgweb_config
import galaxy.model as galaxy_model
-import common, string, os, re
+import common, string, os, re, test_db_util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
-from test_db_util import get_repository_by_name_and_owner, get_repository_metadata_by_repository_id_changeset_revision, \
- get_galaxy_repository_by_name_owner_changeset_revision, get_installed_repository_by_name_owner
from galaxy import eggs
eggs.require('mercurial')
@@ -88,7 +86,7 @@
Loop through each tool dictionary in the repository metadata associated with the received changeset_revision.
For each of these, check for a tools attribute, and load the tool metadata page if it exists, then display that tool's page.
'''
- repository_metadata = get_repository_metadata_by_repository_id_changeset_revision( repository.id, changeset_revision )
+ repository_metadata = test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, changeset_revision )
metadata = repository_metadata.metadata
if 'tools' not in metadata:
raise AssertionError( 'No tools in %s revision %s.' % ( repository.name, changeset_revision ) )
@@ -109,7 +107,7 @@
strings_not_displayed=[] )
def check_repository_invalid_tools_for_changeset_revision( self, repository, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
'''Load the invalid tool page for each invalid tool associated with this changeset revision and verify the received error messages.'''
- repository_metadata = get_repository_metadata_by_repository_id_changeset_revision( repository.id, changeset_revision )
+ repository_metadata = test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, changeset_revision )
metadata = repository_metadata.metadata
if 'invalid_tools' not in metadata:
return
@@ -382,7 +380,7 @@
tc.submit( 'user_access_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
def install_repository( self, name, owner, install_tool_dependencies=False, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
- repository = get_repository_by_name_and_owner( name, owner )
+ repository = test_db_util.get_repository_by_name_and_owner( name, owner )
repository_id = self.security.encode_id( repository.id )
if changeset_revision is None:
changeset_revision = self.get_repository_tip( repository )
@@ -420,7 +418,7 @@
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
def preview_repository_in_tool_shed( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
- repository = get_repository_by_name_and_owner( name, owner )
+ repository = test_db_util.get_repository_by_name_and_owner( name, owner )
if changeset_revision is None:
changeset_revision = self.get_repository_tip( repository )
self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
@@ -456,7 +454,7 @@
self.check_for_strings( strings_displayed, strings_not_displayed )
def tip_has_metadata( self, repository ):
tip = self.get_repository_tip( repository )
- return get_repository_metadata_by_repository_id_changeset_revision( repository.id, tip )
+ return test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, tip )
def upload_file( self,
repository,
filename,
@@ -502,7 +500,7 @@
owner = repository.user.username
if changeset_revision is None:
changeset_revision = self.get_repository_tip( repository )
- galaxy_repository = get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
+ galaxy_repository = test_db_util.get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
timeout_counter = 0
while galaxy_repository.status not in final_states:
ga_refresh( galaxy_repository )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Functional tests for installing tool shed repositories.
by Bitbucket 13 Dec '12
by Bitbucket 13 Dec '12
13 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b00822d6733f/
changeset: b00822d6733f
user: inithello
date: 2012-12-13 21:14:51
summary: Functional tests for installing tool shed repositories.
affected #: 14 files
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/base/common.py
--- a/test/tool_shed/base/common.py
+++ b/test/tool_shed/base/common.py
@@ -14,4 +14,7 @@
</repositories>
'''
-new_repository_dependencies_line = ''' <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />'''
\ No newline at end of file
+new_repository_dependencies_line = ''' <repository toolshed="${toolshed_url}" name="${repository_name}" owner="${owner}" changeset_revision="${changeset_revision}" />'''
+
+# Set a 3 minute timeout for repository installation. This should be sufficient, since we're not installing tool dependencies.
+repository_installation_timeout = 180
\ No newline at end of file
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/base/test_db_util.py
--- a/test/tool_shed/base/test_db_util.py
+++ b/test/tool_shed/base/test_db_util.py
@@ -32,6 +32,11 @@
galaxy.model.ToolShedRepository.table.c.owner == owner,
galaxy.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
+def get_installed_repository_by_name_owner( repository_name, owner ):
+ return ga_session.query( galaxy.model.ToolShedRepository ) \
+ .filter( and_( galaxy.model.ToolShedRepository.table.c.name == repository_name,
+ galaxy.model.ToolShedRepository.table.c.owner == owner ) ) \
+ .first()
def get_private_role( user ):
for role in user.all_roles():
if role.name == user.email and role.description == 'Private Role for %s' % user.email:
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,9 +1,9 @@
import galaxy.webapps.community.util.hgweb_config
import galaxy.model as galaxy_model
import common, string, os, re
-from base.twilltestcase import tc, from_json_string, TwillTestCase, security
+from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
from test_db_util import get_repository_by_name_and_owner, get_repository_metadata_by_repository_id_changeset_revision, \
- get_galaxy_repository_by_name_owner_changeset_revision
+ get_galaxy_repository_by_name_owner_changeset_revision, get_installed_repository_by_name_owner
from galaxy import eggs
eggs.require('mercurial')
@@ -53,6 +53,19 @@
def check_count_of_metadata_revisions_associated_with_repository( self, repository, metadata_count ):
self.check_repository_changelog( repository )
self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
+ def check_installed_repository_tool_dependencies( self, installed_repository, dependencies_installed=False ):
+ strings_not_displayed = []
+ strings_displayed = []
+ for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
+ tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
+ strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
+ if dependencies_installed:
+ strings_displayed.append( 'Installed' )
+ else:
+ strings_displayed.append( 'Never installed' )
+ url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def check_repository_changelog( self, repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/view_changelog?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
@@ -180,6 +193,15 @@
tc.fv( "1", "selected_files_to_delete", ','.join( files_to_delete ) )
tc.submit( 'select_files_to_delete_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def display_installed_repository_manage_page( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
+ self.visit_galaxy_url( url )
+ strings_displayed.extend( [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ] )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def display_manage_repository_page( self, repository, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
base_url = '/repository/manage_repository?id=%s' % self.security.encode_id( repository.id )
if changeset_revision is not None:
@@ -359,7 +381,7 @@
tc.fv( "3", "allow_push", '+%s' % username )
tc.submit( 'user_access_button' )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def install_repository( self, name, owner, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
+ def install_repository( self, name, owner, install_tool_dependencies=False, changeset_revision=None, strings_displayed=[], strings_not_displayed=[] ):
repository = get_repository_by_name_and_owner( name, owner )
repository_id = self.security.encode_id( repository.id )
if changeset_revision is None:
@@ -368,6 +390,11 @@
( changeset_revision, repository_id, self.galaxy_url )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ if 'install_tool_dependencies' in self.last_page():
+ if install_tool_dependencies:
+ tc.fv( '1', 'install_tool_dependencies', True )
+ else:
+ tc.fv( '1', 'install_tool_dependencies', False )
tc.submit( 'select_tool_panel_section_button' )
html = self.last_page()
# Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
@@ -381,7 +408,7 @@
url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
( iri_ids, encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
- self.wait_for_repository_installation( repository )
+ self.wait_for_repository_installation( repository, changeset_revision )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), tool_xml, changeset_revision )
@@ -446,18 +473,42 @@
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def verify_installed_repository_on_browse_page( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/admin_toolshed/browse_repositories'
+ self.visit_galaxy_url( url )
+ strings_displayed.extend( [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ] )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
+ def verify_tool_metadata_for_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
+ repository_id = self.security.encode_id( installed_repository.id )
+ for tool in installed_repository.metadata[ 'tools' ]:
+ strings = list( strings_displayed )
+ strings.extend( [ tool[ 'id' ], tool[ 'description' ], tool[ 'version' ], tool[ 'guid' ], tool[ 'name' ] ] )
+ url = '/admin_toolshed/view_tool_metadata?repository_id=%s&tool_id=%s' % ( repository_id, urllib.quote_plus( tool[ 'id' ] ) )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings, strings_not_displayed )
def visit_galaxy_url( self, url ):
url = '%s%s' % ( self.galaxy_url, url )
self.visit_url( url )
- def wait_for_repository_installation( self, repository ):
+ def wait_for_repository_installation( self, repository, changeset_revision ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
galaxy_model.ToolShedRepository.installation_status.INSTALLED,
galaxy_model.ToolShedRepository.installation_status.UNINSTALLED,
galaxy_model.ToolShedRepository.installation_status.DEACTIVATED ]
repository_name = repository.name
owner = repository.user.username
- changeset_revision = self.get_repository_tip( repository )
+ if changeset_revision is None:
+ changeset_revision = self.get_repository_tip( repository )
galaxy_repository = get_galaxy_repository_by_name_owner_changeset_revision( repository_name, owner, changeset_revision )
+ timeout_counter = 0
while galaxy_repository.status not in final_states:
ga_refresh( galaxy_repository )
+ timeout_counter = timeout_counter + 1
+ if timeout_counter > common.repository_installation_timeout:
+ raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
+ ( timeout_counter, repository.status ) )
+ break
time.sleep( 1 )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
repository_name = 'filtering_0000'
repository_description = "Galaxy's filtering tool"
@@ -10,14 +10,14 @@
def test_0000_initiate_users( self ):
"""Create necessary user accounts and login as an admin user."""
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_categories( self ):
"""Create categories for this test suite"""
self.create_category( 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 1' )
@@ -26,9 +26,9 @@
"""Create the filtering repository"""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
strings_displayed = [ 'Repository %s' % "'%s'" % repository_name,
'Repository %s has been created' % "'%s'" % repository_name ]
self.create_repository( repository_name,
@@ -38,29 +38,29 @@
strings_displayed=strings_displayed )
def test_0015_edit_repository( self ):
"""Edit the repository name, description, and long description"""
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
new_name = "renamed_filtering"
new_description = "Edited filtering tool"
new_long_description = "Edited long description"
self.edit_repository_information( repository, repo_name=new_name, description=new_description, long_description=new_long_description )
def test_0020_change_repository_category( self ):
"""Change the categories associated with the filtering repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.edit_repository_categories( repository,
categories_to_add=[ "Test 0000 Basic Repository Features 2" ],
categories_to_remove=[ "Test 0000 Basic Repository Features 1" ] )
def test_0025_grant_write_access( self ):
'''Grant write access to another user'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.grant_write_access( repository, usernames=[ common.admin_username ] )
self.revoke_write_access( repository, common.admin_username )
def test_0030_upload_filtering_1_1_0( self ):
"""Upload filtering_1.1.0.tar to the repository"""
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_1.1.0.tar', commit_message="Uploaded filtering 1.1.0" )
def test_0035_verify_repository( self ):
'''Display basic repository pages'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
latest_changeset_revision = self.get_repository_tip( repository )
self.check_for_valid_tools( repository, strings_displayed=[ 'Filter1' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=1 )
@@ -73,7 +73,7 @@
strings_displayed=[ 'Uploaded filtering 1.1.0', latest_changeset_revision ] )
def test_0040_alter_repository_states( self ):
'''Test toggling the malicious and deprecated repository flags.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
self.set_repository_malicious( repository, set_malicious=True, strings_displayed=[ 'The repository tip has been defined as malicious.' ] )
@@ -93,7 +93,7 @@
set_deprecated=False )
def test_0045_display_repository_tip_file( self ):
'''Display the contents of filtering.xml in the repository tip revision'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_repository_file_contents( repository=repository,
filename='filtering.xml',
filepath=None,
@@ -101,7 +101,7 @@
strings_not_displayed=[] )
def test_0050_upload_filtering_txt_file( self ):
'''Upload filtering.txt file associated with tool version 1.1.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'filtering/filtering_0000.txt',
commit_message="Uploaded filtering.txt",
@@ -110,7 +110,7 @@
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0055_upload_filtering_test_data( self ):
'''Upload filtering test data.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'filtering/filtering_test_data.tar', commit_message="Uploaded filtering test data", remove_repo_files_not_in_tar='No' )
self.display_repository_file_contents( repository=repository,
filename='1.bed',
@@ -120,14 +120,14 @@
self.check_repository_metadata( repository, tip_only=True )
def test_0060_upload_filtering_2_2_0( self ):
'''Upload filtering version 2.2.0'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'filtering/filtering_2.2.0.tar',
commit_message="Uploaded filtering 2.2.0",
remove_repo_files_not_in_tar='No' )
def test_0065_verify_filtering_repository( self ):
'''Verify the new tool versions and repository metadata.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip = self.get_repository_tip( repository )
self.check_for_valid_tools( repository )
strings_displayed = self.get_repository_metadata_revisions( repository ).append( 'Select a revision' )
@@ -137,7 +137,7 @@
self.check_repository_metadata( repository, tip_only=False )
def test_0070_upload_readme_txt_file( self ):
'''Upload readme.txt file associated with tool version 2.2.0.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository, 'readme.txt', commit_message="Uploaded readme.txt" )
self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] )
# Verify that there is a different readme file for each metadata revision.
@@ -145,13 +145,13 @@
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0', 'This is a readme file.' ] )
def test_0075_delete_readme_txt_file( self ):
'''Delete the readme.txt file.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] )
self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 )
self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0080_search_for_valid_filter_tool( self ):
'''Search for the filtering tool by tool ID, name, and version.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
tip_changeset = self.get_repository_tip( repository )
search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
repository_name = 'freebayes_0010'
repository_description = "Galaxy's freebayes tool"
@@ -11,14 +11,14 @@
"""Create necessary user accounts and login as an admin user."""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'Test 0010 Repository With Tool Dependencies', 'Tests for a repository with tool dependencies.' )
@@ -31,7 +31,7 @@
repository_long_description=repository_long_description,
categories=[ 'Test 0010 Repository With Tool Dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/freebayes.xml',
valid_tools_only=False,
@@ -44,7 +44,7 @@
strings_displayed=[ 'requires an entry', 'tool_data_table_conf.xml' ] )
def test_0015_upload_missing_tool_data_table_conf_file( self ):
'''Upload the missing tool_data_table_conf.xml.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/tool_data_table_conf.xml.sample',
valid_tools_only=False,
@@ -57,27 +57,27 @@
strings_displayed=[ 'refers to a file', 'sam_fa_indices.loc' ] )
def test_0020_upload_missing_sample_loc_file( self ):
'''Upload the missing sam_fa_indices.loc.sample file to the repository.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/sam_fa_indices.loc.sample',
strings_displayed=[],
commit_message='Uploaded tool data table .loc file.' )
def test_0025_upload_invalid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.5 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
strings_displayed=[ 'Name, version and type from a tool requirement tag does not match' ],
commit_message='Uploaded invalid tool dependency XML.' )
def test_0030_upload_valid_tool_dependency_xml( self ):
'''Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.upload_file( repository,
os.path.join( 'freebayes', 'tool_dependencies.xml' ),
commit_message='Uploaded valid tool dependency XML.' )
def test_0035_verify_tool_dependencies( self ):
'''Verify that the uploaded tool_dependencies.xml specifies the correct package versions.'''
- repository = get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository,
strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
strings_not_displayed=[ 'Invalid tools' ] )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
datatypes_repository_name = 'emboss_datatypes_0020'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
@@ -15,14 +15,14 @@
"""Create necessary user accounts and login as an admin user."""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'Test 0020 Basic Repository Dependencies', 'Testing basic repository dependency features.' )
@@ -35,11 +35,11 @@
repository_long_description=datatypes_repository_long_description,
categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
def test_0015_verify_datatypes_in_datatypes_repository( self ):
'''Verify that the emboss_datatypes repository contains datatype entries.'''
- repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
def test_0020_create_emboss_5_repository_and_upload_files( self ):
'''Create and populate the emboss_5_0020 repository.'''
@@ -48,12 +48,12 @@
repository_long_description=emboss_repository_long_description,
categories=[ 'Test 0020 Basic Repository Dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
def test_0025_generate_and_upload_repository_dependencies_xml( self ):
'''Generate and upload the repository_dependencies.xml file'''
- repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0020', additional_paths=[ 'emboss', '5' ] )
self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
@@ -63,8 +63,8 @@
commit_message='Uploaded repository_dependencies.xml' )
def test_0030_verify_emboss_5_repository_dependency_on_emboss_datatypes( self ):
'''Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.'''
- repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
changeset_revision = self.get_repository_tip( datatypes_repository )
strings_displayed = [ datatypes_repository_name, common.test_user_1_name, changeset_revision, 'Repository dependencies' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
datatypes_repository_name = 'emboss_datatypes_0030'
datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
@@ -17,14 +17,14 @@
"""Create necessary user accounts."""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'Test 0030 Repository Dependency Revisions', 'Testing repository dependencies by revision.' )
@@ -32,57 +32,57 @@
'''Create the emboss_5_0030, emboss_6_0030, emboss_datatypes_0030, and emboss_0030 repositories and populate the emboss_datatypes repository.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ emboss_5_repository = test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
if emboss_5_repository is None:
self.create_repository( emboss_5_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
- emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ emboss_5_repository = test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
- emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ emboss_6_repository = test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
if emboss_6_repository is None:
self.create_repository( emboss_6_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
- emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ emboss_6_repository = test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
self.upload_file( emboss_6_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball..' )
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
if datatypes_repository is None:
self.create_repository( datatypes_repository_name,
datatypes_repository_description,
repository_long_description=datatypes_repository_long_description,
categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
if self.repository_is_new( datatypes_repository ):
self.upload_file( datatypes_repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
if emboss_repository is None:
self.create_repository( emboss_repository_name,
emboss_repository_description,
repository_long_description=emboss_repository_long_description,
categories=[ 'Test 0030 Repository Dependency Revisions' ],
strings_displayed=[] )
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ self.upload_file( emboss_repository, 'emboss/emboss.tar', commit_message='Uploaded tool tarball.' )
def test_0015_generate_repository_dependencies_for_emboss_5( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_5 repository.'''
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
- emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ emboss_5_repository = test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
self.upload_file( emboss_5_repository,
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_depepndencies.xml.' )
def test_0020_generate_repository_dependencies_for_emboss_6( self ):
'''Generate a repository_dependencies.xml file specifying emboss_datatypes and upload it to the emboss_6 repository.'''
- emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ emboss_6_repository = test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss' ] )
self.upload_file( emboss_6_repository,
'repository_dependencies.xml',
@@ -90,8 +90,8 @@
commit_message='Uploaded repository_depepndencies.xml.' )
def test_0025_generate_repository_dependency_on_emboss_5( self ):
'''Create and upload repository_dependencies.xml for the emboss_5_0030 repository.'''
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- emboss_5_repository = get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ emboss_5_repository = test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '5' ] )
self.generate_repository_dependency_xml( [ emboss_5_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -102,8 +102,8 @@
commit_message='Uploaded dependency configuration specifying emboss_5' )
def test_0030_generate_repository_dependency_on_emboss_6( self ):
'''Create and upload repository_dependencies.xml for the emboss_6_0030 repository.'''
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- emboss_6_repository = get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ emboss_6_repository = test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0030', additional_paths=[ 'emboss', '6' ] )
self.generate_repository_dependency_xml( [ emboss_6_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -114,9 +114,9 @@
commit_message='Uploaded dependency configuration specifying emboss_6' )
def test_0035_verify_repository_dependency_revisions( self ):
'''Verify that different metadata revisions of the emboss repository have different repository dependencies.'''
- repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
repository_metadata = [ ( metadata.metadata, metadata.changeset_revision ) for metadata in self.get_repository_metadata( repository ) ]
- datatypes_repository = get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
datatypes_tip = self.get_repository_tip( datatypes_repository )
# Iterate through all metadata revisions and check for repository dependencies.
for metadata, changeset_revision in repository_metadata:
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
freebayes_repository_name = 'freebayes_0040'
freebayes_repository_name_description = "Galaxy's freebayes tool"
@@ -15,14 +15,14 @@
"""Create necessary user accounts."""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( 'test_0040_repository_circular_dependencies', 'Testing handling of circular repository dependencies.' )
@@ -35,7 +35,7 @@
repository_long_description=freebayes_repository_name_long_description,
categories=[ 'test_0040_repository_circular_dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/freebayes.tar',
strings_displayed=[],
@@ -49,7 +49,7 @@
repository_long_description=filtering_repository_long_description,
categories=[ 'test_0040_repository_circular_dependencies' ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
self.upload_file( repository,
'filtering/filtering_1.1.0.tar',
strings_displayed=[],
@@ -60,8 +60,8 @@
# Filtering revision 0 -> freebayes revision 0.
# Freebayes revision 0 -> filtering revision 1.
# Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
- repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
- filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -76,8 +76,8 @@
# Filtering revision 0 -> freebayes revision 0.
# Freebayes revision 0 -> filtering revision 1.
# Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
- repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
- freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
self.generate_repository_dependency_xml( [ repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -88,8 +88,8 @@
commit_message='Uploaded dependency on filtering' )
def test_0030_verify_repository_dependencies( self ):
'''Verify that each repository can depend on the other without causing an infinite loop.'''
- filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
- freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
# The dependency structure should look like:
# Filtering revision 0 -> freebayes revision 0.
# Freebayes revision 0 -> filtering revision 1.
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_0050_circular_n_levels.py
--- a/test/tool_shed/functional/test_0050_circular_n_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -1,5 +1,5 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_user, get_private_role
+import tool_shed.base.test_db_util as test_db_util
emboss_datatypes_repository_name = 'emboss_datatypes_0050'
emboss_datatypes_repository_description = "Datatypes for emboss"
@@ -26,14 +26,14 @@
"""Create necessary user accounts."""
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_user( common.test_user_1_email )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_private_role( test_user_1 )
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
self.logout()
self.login( email=common.admin_email, username=common.admin_username )
- admin_user = get_user( common.admin_email )
+ admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
- admin_user_private_role = get_private_role( admin_user )
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( default_category, default_category_description )
@@ -46,7 +46,7 @@
repository_long_description=emboss_datatypes_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
self.upload_file( repository,
'emboss/datatypes/datatypes_conf.xml',
strings_displayed=[],
@@ -58,12 +58,12 @@
repository_long_description=emboss_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
self.upload_file( repository,
'emboss/emboss.tar',
strings_displayed=[],
commit_message='Uploaded tool tarball.' )
- datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'emboss' ] )
self.generate_repository_dependency_xml( [ datatypes_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -79,12 +79,12 @@
repository_long_description=filtering_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
self.upload_file( filtering_repository,
'filtering/filtering_1.1.0.tar',
strings_displayed=[],
commit_message='Uploaded filtering.tar.' )
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'filtering' ] )
self.generate_repository_dependency_xml( [ emboss_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -100,14 +100,14 @@
repository_long_description=freebayes_repository_long_description,
categories=[ default_category ],
strings_displayed=[] )
- repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
self.upload_file( repository,
'freebayes/freebayes.tar',
strings_displayed=[],
commit_message='Uploaded freebayes.tar.' )
- emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_0050', additional_paths=[ 'freebayes' ] )
self.generate_repository_dependency_xml( [ filtering_repository ],
self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
@@ -138,10 +138,10 @@
id: 4 key: http://localhost:8634__ESEP__emboss_0050__ESEP__user1__ESEP__9f1503046640
['http://localhost:8634', 'emboss_datatypes_0050', 'user1', '596029c334b1']
'''
- emboss_datatypes_repository = get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
- emboss_repository = get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
- filtering_repository = get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
- freebayes_repository = get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
for repository in [ emboss_datatypes_repository, emboss_repository, filtering_repository ]:
self.check_repository_dependency( freebayes_repository, repository, self.get_repository_tip( repository ) )
for changeset_revision in self.get_repository_metadata_revisions( emboss_repository ):
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_1000_install_basic_repository.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -0,0 +1,31 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class BasicToolShedFeatures( ShedTwillTestCase ):
+ '''Test installing a basic repository.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_sheds( self ):
+ """Browse the available tool sheds in this Galaxy instance."""
+ self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
+ self.check_page_for_string( 'Embedded tool shed for functional tests' )
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
+ def test_0010_browse_test_0000_category( self ):
+ '''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
+ category = test_db_util.get_category_by_name( 'Test 0000 Basic Repository Features 1' )
+ self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
+ def test_0015_preview_filtering_repository( self ):
+ '''Load the preview page for the filtering_0000 repository in the tool shed.'''
+ self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
+ def test_0020_install_filtering_repository( self ):
+ self.install_repository( 'filtering_0000', common.test_user_1_name )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
+ self.verify_installed_repository_on_browse_page( installed_repository )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Tools', 'Filter1' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
\ No newline at end of file
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_1000_install_filtering_repository.py
--- a/test/tool_shed/functional/test_1000_install_filtering_repository.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
-from tool_shed.base.test_db_util import get_repository_by_name_and_owner, get_galaxy_user, get_galaxy_private_role, get_category_by_name
-
-class BasicToolShedFeatures( ShedTwillTestCase ):
- '''Test installing a basic repository.'''
- def test_0000_initiate_users( self ):
- """Create necessary user accounts."""
- self.galaxy_logout()
- self.galaxy_login( email=common.test_user_1_email, username=common.test_user_1_name )
- test_user_1 = get_galaxy_user( common.test_user_1_email )
- assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
- test_user_1_private_role = get_galaxy_private_role( test_user_1 )
- self.galaxy_logout()
- self.galaxy_login( email=common.admin_email, username=common.admin_username )
- admin_user = get_galaxy_user( common.admin_email )
- assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
- admin_user_private_role = get_galaxy_private_role( admin_user )
- def test_0005_browse_tool_sheds( self ):
- """Browse the available tool sheds in this Galaxy instance."""
- self.visit_galaxy_url( '/admin_toolshed/browse_tool_sheds' )
- self.check_page_for_string( 'Embedded tool shed for functional tests' )
- self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0000 Basic Repository Features 1', 'Test 0000 Basic Repository Features 2' ] )
- def test_0010_browse_test_0000_category( self ):
- '''Browse the category created in test 0000. It should contain the filtering_0000 repository also created in that test.'''
- category = get_category_by_name( 'Test 0000 Basic Repository Features 1' )
- self.browse_category( category, strings_displayed=[ 'filtering_0000' ] )
- def test_0015_preview_filtering_repository( self ):
- '''Load the preview page for the filtering_0000 repository in the tool shed.'''
- self.preview_repository_in_tool_shed( 'filtering_0000', common.test_user_1_name, strings_displayed=[ 'filtering_0000', 'Valid tools' ] )
- def test_0020_install_filtering_repository( self ):
- self.install_repository( 'filtering_0000', common.test_user_1_name )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -0,0 +1,27 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class ToolWithToolDependencies( ShedTwillTestCase ):
+ '''Test installing a repository with tool dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_shed( self ):
+ """Browse the available tool sheds in this Galaxy instance and preview the freebayes tool."""
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0010 Repository With Tool Dependencies' ] )
+ category = test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
+ self.browse_category( category, strings_displayed=[ 'freebayes_0010' ] )
+ self.preview_repository_in_tool_shed( 'freebayes_0010', common.test_user_1_name, strings_displayed=[ 'freebayes_0010', 'Valid tools' ] )
+ def test_0015_install_freebayes_repository( self ):
+ '''Install the freebayes repository without installing tool dependencies.'''
+ self.install_repository( 'freebayes_0010', common.test_user_1_name, install_tool_dependencies=False )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.verify_installed_repository_on_browse_page( installed_repository )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Tools', 'FreeBayes' ] )
+ self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -0,0 +1,27 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class ToolWithRepositoryDependencies( ShedTwillTestCase ):
+ '''Test installing a repository with repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_shed( self ):
+ """Browse the available tool sheds in this Galaxy instance and preview the emboss tool."""
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0020 Basic Repository Dependencies' ] )
+ category = test_db_util.get_category_by_name( 'Test 0020 Basic Repository Dependencies' )
+ self.browse_category( category, strings_displayed=[ 'emboss_0020' ] )
+ self.preview_repository_in_tool_shed( 'emboss_0020', common.test_user_1_name, strings_displayed=[ 'emboss_0020', 'Valid tools' ] )
+ def test_0015_install_emboss_repository( self ):
+ '''Install the emboss repository without installing tool dependencies.'''
+ self.install_repository( 'emboss_0020', common.test_user_1_name, install_tool_dependencies=False )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0020', common.test_user_1_name )
+ self.verify_installed_repository_on_browse_page( installed_repository )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
+ self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 -r b00822d6733f9a4fa93dac20a67d63e56fcc51be test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -0,0 +1,29 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class RepositoryWithDependencyRevisions( ShedTwillTestCase ):
+ '''Test installing a repository with dependency revisions.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ def test_0005_browse_tool_shed( self ):
+ """Browse the available tool sheds in this Galaxy instance and preview the emboss tool."""
+ self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0030 Repository Dependency Revisions' ] )
+ category = test_db_util.get_category_by_name( 'Test 0030 Repository Dependency Revisions' )
+ self.browse_category( category, strings_displayed=[ 'emboss_0030' ] )
+ self.preview_repository_in_tool_shed( 'emboss_0030', common.test_user_1_name, strings_displayed=[ 'emboss_0030', 'Valid tools' ] )
+ def test_0015_install_emboss_repository( self ):
+ '''Install the emboss repository without installing tool dependencies.'''
+ repository = test_db_util.get_repository_by_name_and_owner( 'emboss_0030', common.test_user_1_name )
+ revisions = self.get_repository_metadata_revisions( repository )
+ self.install_repository( 'emboss_0030', common.test_user_1_name, changeset_revision=revisions[1], install_tool_dependencies=False )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
+ self.verify_installed_repository_on_browse_page( installed_repository )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
+ self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/34f61e53a563/
changeset: 34f61e53a563
user: greg
date: 2012-12-13 18:00:53
summary: More import tweaks.
affected #: 6 files
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -2,13 +2,14 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
-import urllib2, tempfile
+import os, urllib2, tempfile
+from galaxy import util
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
-from galaxy.tool_shed.common_util import *
+from galaxy.tool_shed import common_util
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -37,17 +38,17 @@
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = REPOSITORY_OWNER
+ self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
tool_shed_accessible = False
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
@@ -112,7 +113,7 @@
# Tools outside of sections.
file_path = elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
if elem not in tool_panel_elems:
tool_panel_elems.append( elem )
@@ -122,7 +123,7 @@
if section_elem.tag == 'tool':
file_path = section_elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
# Append the section, not the tool.
if elem not in tool_panel_elems:
@@ -139,7 +140,7 @@
if proprietary_tool_panel_elem.tag == 'tool':
# The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded outside of any sections.
tool_sections.append( None )
@@ -151,7 +152,7 @@
if section_elem.tag == 'tool':
# The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = section_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded inside of the section_elem.
tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
@@ -349,7 +350,7 @@
shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
- return get_non_shed_tool_panel_configs( self.app )
+ return common_util.get_non_shed_tool_panel_configs( self.app )
def __get_url_from_tool_shed( self, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -6,6 +6,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
from galaxy.util.odict import odict
+from galaxy.tool_shed import common_util
log = logging.getLogger( __name__ )
@@ -48,11 +49,11 @@
# New installations will not be missing tools, so we don't need to worry about them.
missing_tool_configs_dict = odict()
else:
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-import galaxy.util.shed_util as shed_util
+import galaxy.util.shed_util
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -887,11 +887,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return galaxy.util.shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -393,7 +393,7 @@
for tool_dict in metadata[ 'tools' ]:
guid = tool_dict[ 'guid' ]
tool_config = tool_dict[ 'tool_config' ]
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
guids_and_configs[ guid ] = file_name
# Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
tree = util.parse_xml( shed_tool_conf )
@@ -434,7 +434,7 @@
{<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
"""
tool_panel_dict = {}
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections )
tool_panel_dict[ guid ] = tool_section_dicts
return tool_panel_dict
@@ -471,11 +471,11 @@
return tool_section
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
- config_file = strip_path( config_file )
+ config_file = suc.strip_path( config_file )
for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
- ctx_file_name = strip_path( ctx_file )
+ ctx_file_name = suc.strip_path( ctx_file )
if ctx_file_name == config_file:
return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
@@ -491,7 +491,7 @@
for converter in elem.findall( 'converter' ):
converter_config = converter.get( 'file', None )
if converter_config:
- converter_config_file_name = strip_path( converter_config )
+ converter_config_file_name = suc.strip_path( converter_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -508,7 +508,7 @@
for display_app in elem.findall( 'display' ):
display_config = display_app.get( 'file', None )
if display_config:
- display_config_file_name = strip_path( display_config )
+ display_config_file_name = suc.strip_path( display_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -574,7 +574,7 @@
if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
return index, shed_tool_conf_dict
else:
- file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+ file_name = suc.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
def get_tool_index_sample_files( sample_files ):
@@ -722,11 +722,11 @@
params_with_missing_index_file = repository_tool.params_with_missing_index_file
for param in params_with_missing_index_file:
options = param.options
- missing_file_name = strip_path( options.missing_index_file )
+ missing_file_name = suc.strip_path( options.missing_index_file )
if missing_file_name not in sample_files_copied:
# The repository must contain the required xxx.loc.sample file.
for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
+ sample_file_name = suc.strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
suc.copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1316,7 +1316,7 @@
return suc.get_repository_file_contents( file_path )
def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
"""Return file_name from the received changeset_revision of the repository manifest."""
- stripped_file_name = strip_path( file_name )
+ stripped_file_name = suc.strip_path( file_name )
repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1109,7 +1109,7 @@
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
else:
- file_name = strip_path( config_filename )
+ file_name = suc.strip_path( config_filename )
if file_name == shed_tool_conf:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0