1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/320c9b7f08c4/ changeset: 320c9b7f08c4 user: greg date: 2013-03-15 21:01:09 summary: Refactor shed_util and shed_util_common into appropriate tool shed Galaxy utilities components. affected #: 24 files diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -5,10 +5,10 @@ from galaxy.web.framework.helpers import iff, grids from galaxy.util import json from galaxy.model.orm import or_ -import tool_shed.util.shed_util as shed_util import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util -from tool_shed.util import encoding_util +from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util +from tool_shed.util import repository_dependency_util, tool_dependency_util, tool_util +from tool_shed.galaxy_install import repository_util from galaxy.webapps.tool_shed.util import workflow_util import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids import pkg_resources @@ -36,7 +36,7 @@ repository_id = kwd[ 'id' ] repository = suc.get_installed_tool_shed_repository( trans, repository_id ) try: - shed_util.activate_repository( trans, repository ) + common_install_util.activate_repository( trans, repository ) except Exception, e: error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) ) log.debug( error_message ) @@ -92,7 +92,7 @@ # can reset the metadata if necessary. This will ensure that information about repository dependencies and tool dependencies # will be current. Only allow selecting a different section in the tool panel if the repository was uninstalled and it contained # tools that should be displayed in the tool panel. - changeset_revision_dict = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository ) + changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository ) current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None ) current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None ) if current_changeset_revision and current_ctx_rev: @@ -142,7 +142,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - tool_dependency = shed_util.get_tool_dependency( trans, kwd[ 'id' ] ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, kwd[ 'id' ] ) if tool_dependency.in_error_state: message = "This tool dependency is not installed correctly (see the <b>Tool dependency installation error</b> below). " message += "Choose <b>Uninstall this tool dependency</b> from the <b>Repository Actions</b> menu, correct problems " @@ -217,16 +217,16 @@ if params.get( 'deactivate_or_uninstall_repository_button', False ): if tool_shed_repository.includes_tools_for_display_in_tool_panel: # Handle tool panel alterations. - shed_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked ) + tool_util.remove_from_tool_panel( trans, tool_shed_repository, shed_tool_conf, uninstall=remove_from_disk_checked ) if tool_shed_repository.includes_data_managers: - shed_util.remove_from_data_manager( trans.app, tool_shed_repository ) + data_manager_util.remove_from_data_manager( trans.app, tool_shed_repository ) if tool_shed_repository.includes_datatypes: # Deactivate proprietary datatypes. - installed_repository_dict = shed_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True ) + installed_repository_dict = datatype_util.load_installed_datatypes( trans.app, tool_shed_repository, repository_install_dir, deactivate=True ) if installed_repository_dict and 'converter_path' in installed_repository_dict: - shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True ) + datatype_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=True ) if installed_repository_dict and 'display_path' in installed_repository_dict: - shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True ) + datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=True ) if remove_from_disk_checked: try: # Remove the repository from disk. @@ -244,7 +244,7 @@ tool_shed_repository.uninstalled = True # Remove all installed tool dependencies, but don't touch any repository dependencies.. for tool_dependency in tool_shed_repository.installed_tool_dependencies: - uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency ) + uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency ) if error_message: errors = '%s %s' % ( errors, error_message ) tool_shed_repository.deleted = True @@ -371,7 +371,7 @@ return repo_information_dict def get_versions_of_tool( self, app, guid ): - tool_version = shed_util.get_tool_version( app, guid ) + tool_version = tool_util.get_tool_version( app, guid ) return tool_version.get_version_ids( app, reverse=True ) def handle_repository_contents( self, trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, @@ -395,26 +395,26 @@ trans.sa_session.add( tool_shed_repository ) trans.sa_session.flush() if 'tool_dependencies' in metadata_dict and not reinstalling: - tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True ) + tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True ) if 'tools' in metadata_dict: - tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) + tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) sample_files = metadata_dict.get( 'sample_files', [] ) - tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files ) - shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path ) + tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files ) + tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path ) sample_files_copied = [ str( s ) for s in tool_index_sample_files ] repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict ) if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = shed_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups ) + repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups ) # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( trans.app, + repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups, sample_files_copied ) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. - shed_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied ) - shed_util.add_to_tool_panel( app=trans.app, + tool_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied ) + tool_util.add_to_tool_panel( app=trans.app, repository_name=tool_shed_repository.name, repository_clone_url=repository_clone_url, changeset_revision=tool_shed_repository.installed_changeset_revision, @@ -424,8 +424,13 @@ tool_panel_dict=tool_panel_dict, new_install=True ) if 'data_manager' in metadata_dict: - new_data_managers = shed_util.install_data_managers( trans.app, trans.app.config.shed_data_manager_config_file, metadata_dict, shed_config_dict, relative_install_dir, - tool_shed_repository, repository_tools_tups ) + new_data_managers = data_manager_util.install_data_managers( trans.app, + trans.app.config.shed_data_manager_config_file, + metadata_dict, + shed_config_dict, + relative_install_dir, + tool_shed_repository, + repository_tools_tups ) if 'datatypes' in metadata_dict: tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: @@ -437,16 +442,16 @@ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir ) datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir ) # Load data types required by tools. - converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False ) + converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False ) if converter_path or display_path: # Create a dictionary of tool shed repository related information. - repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed, - name=tool_shed_repository.name, - owner=tool_shed_repository.owner, - installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), - converter_path=converter_path, - display_path=display_path ) + repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed, + name=tool_shed_repository.name, + owner=tool_shed_repository.owner, + installed_changeset_revision=tool_shed_repository.installed_changeset_revision, + tool_dicts=metadata_dict.get( 'tools', [] ), + converter_path=converter_path, + display_path=display_path ) if converter_path: # Load proprietary datatype converters trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict ) @@ -533,10 +538,10 @@ tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) ) - installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_dependencies ) + installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app, + tool_shed_repository=tool_shed_repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=tool_dependencies ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR: message += ' %s' % suc.to_safe_string( installed_tool_dependency.error_message ) @@ -564,7 +569,7 @@ tool_dependency_ids = util.listify( params.get( 'id', None ) ) tool_dependencies = [] for tool_dependency_id in tool_dependency_ids: - tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id ) tool_dependencies.append( tool_dependency ) if kwd.get( 'install_tool_dependencies_button', False ): # Filter tool dependencies to only those that are installed. @@ -616,10 +621,10 @@ if isinstance( repo_info_dict, basestring ): repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict ) # Clone each repository to the configured location. - shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING ) + suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING ) repo_info_tuple = repo_info_dict[ tool_shed_repository.name ] description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple - relative_clone_dir = shed_util.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision ) + relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision ) clone_dir = os.path.join( tool_path, relative_clone_dir ) relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name ) install_dir = os.path.join( tool_path, relative_install_dir ) @@ -627,12 +632,12 @@ if cloned_ok: if reinstalling: # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated. - changeset_revision_dict = shed_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository ) + changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository ) current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None ) current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None ) if current_ctx_rev != ctx_rev: repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) ) - shed_util.pull_repository( repo, repository_clone_url, current_changeset_revision ) + repository_util.pull_repository( repo, repository_clone_url, current_changeset_revision ) suc.update_repository( repo, ctx_rev=current_ctx_rev ) self.handle_repository_contents( trans, tool_shed_repository=tool_shed_repository, @@ -647,9 +652,9 @@ metadata = tool_shed_repository.metadata if 'tools' in metadata: # Get the tool_versions from the tool shed for each tool in the installed change set. - shed_util.update_tool_shed_repository_status( trans.app, - tool_shed_repository, - trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) + suc.update_tool_shed_repository_status( trans.app, + tool_shed_repository, + trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) url = suc.url_join( tool_shed_url, '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ @@ -659,7 +664,7 @@ response.close() if text: tool_version_dicts = json.from_json_string( text ) - shed_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository ) + tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository ) else: message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions " @@ -668,20 +673,20 @@ if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata: work_dir = tempfile.mkdtemp() # Install tool dependencies. - shed_util.update_tool_shed_repository_status( trans.app, - tool_shed_repository, - trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) + suc.update_tool_shed_repository_status( trans.app, + tool_shed_repository, + trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir ) - installed_tool_dependencies = shed_util.handle_tool_dependencies( app=trans.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_shed_repository.tool_dependencies ) + installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app, + tool_shed_repository=tool_shed_repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=tool_shed_repository.tool_dependencies ) try: shutil.rmtree( work_dir ) except: pass - shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED ) + suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED ) else: # An error occurred while cloning the repository, so reset everything necessary to enable another attempt. self.set_repository_attributes( trans, @@ -739,12 +744,12 @@ trans.sa_session.add( repository ) trans.sa_session.flush() message = "The repository information has been updated." - containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - repository=repository, - reinstalling=False, - required_repo_info_dicts=None ) + containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository=repository, + reinstalling=False, + required_repo_info_dicts=None ) return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=description, @@ -825,9 +830,9 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd ) + tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd ) # We need a tool_shed_repository, so get it from one of the tool_dependencies. - tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_ids[ 0 ] ) tool_shed_repository = tool_dependency.tool_shed_repository self.tool_dependency_grid.title = "Tool shed repository '%s' tool dependencies" % tool_shed_repository.name self.tool_dependency_grid.global_actions = \ @@ -868,7 +873,7 @@ elif operation == 'uninstall': tool_dependencies_for_uninstallation = [] for tool_dependency_id in tool_dependency_ids: - tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id ) if tool_dependency.status in [ trans.model.ToolDependency.installation_status.INSTALLED, trans.model.ToolDependency.installation_status.ERROR ]: tool_dependencies_for_uninstallation.append( tool_dependency ) @@ -883,7 +888,7 @@ if trans.app.config.tool_dependency_dir: tool_dependencies_for_installation = [] for tool_dependency_id in tool_dependency_ids: - tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id ) if tool_dependency.status in [ trans.model.ToolDependency.installation_status.NEVER_INSTALLED, trans.model.ToolDependency.installation_status.UNINSTALLED ]: tool_dependencies_for_installation.append( tool_dependency ) @@ -978,15 +983,15 @@ install_tool_dependencies = False tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf ) created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \ - shed_util.create_repository_dependency_objects( trans, - tool_path, - tool_shed_url, - repo_info_dicts, - reinstalling=False, - install_repository_dependencies=install_repository_dependencies, - no_changes_checked=False, - tool_panel_section=tool_panel_section, - new_tool_panel_section=new_tool_panel_section ) + repository_dependency_util.create_repository_dependency_objects( trans, + tool_path, + tool_shed_url, + repo_info_dicts, + reinstalling=False, + install_repository_dependencies=install_repository_dependencies, + no_changes_checked=False, + tool_panel_section=tool_panel_section, + new_tool_panel_section=new_tool_panel_section ) if message and len( repo_info_dicts ) == 1: installed_tool_shed_repository = created_or_updated_tool_shed_repositories[ 0 ] message+= 'Click <a href="%s">here</a> to manage the repository. ' % \ @@ -1069,39 +1074,39 @@ repo_info_dict = repo_info_dicts[ 0 ] name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \ - shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) - readme_files_dict = shed_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ) + common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) + readme_files_dict = suc.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ) # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed # and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process, # so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository # dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since # each displayed repository dependency will display a status, whether installed or missing. - containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies ) + containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies ) else: # We're installing a list of repositories, each of which may have tool dependencies or repository dependencies. containers_dicts = [] for repo_info_dict in repo_info_dicts: name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \ - shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) - containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=None, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies ) + common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) + containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=None, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies ) containers_dicts.append( containers_dict ) # Merge all containers into a single container. - containers_dict = shed_util.merge_containers_dicts_for_new_install( containers_dicts ) + containers_dict = repository_util.merge_containers_dicts_for_new_install( containers_dicts ) # Handle tool dependencies check box. if trans.app.config.tool_dependency_dir is None: if includes_tool_dependencies: @@ -1165,8 +1170,8 @@ install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository ) repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository ) - clone_dir = os.path.join( tool_path, shed_util.generate_tool_shed_repository_install_dir( repository_clone_url, - tool_shed_repository.installed_changeset_revision ) ) + clone_dir = os.path.join( tool_path, suc.generate_tool_shed_repository_install_dir( repository_clone_url, + tool_shed_repository.installed_changeset_revision ) ) relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name ) tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository ) tool_section = None @@ -1179,12 +1184,11 @@ includes_tool_dependencies = tool_shed_repository.includes_tool_dependencies if tool_shed_repository.includes_tools_for_display_in_tool_panel: # Handle the selected tool panel location for loading tools included in the tool shed repository. - tool_section, new_tool_panel_section, tool_panel_section_key = \ - shed_util.handle_tool_panel_selection( trans=trans, - metadata=metadata, - no_changes_checked=no_changes_checked, - tool_panel_section=tool_panel_section, - new_tool_panel_section=new_tool_panel_section ) + tool_section, new_tool_panel_section, tool_panel_section_key = tool_util.handle_tool_panel_selection( trans=trans, + metadata=metadata, + no_changes_checked=no_changes_checked, + tool_panel_section=tool_panel_section, + new_tool_panel_section=new_tool_panel_section ) # The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function. tool_shed_repository = suc.create_or_update_tool_shed_repository( trans.app, tool_shed_repository.name, @@ -1217,28 +1221,28 @@ tool_dependencies = metadata.get( 'tool_dependencies', None ) else: tool_dependencies = None - repo_info_dict = suc.create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=tool_shed_repository.changeset_revision, - ctx_rev=ctx_rev, - repository_owner=tool_shed_repository.owner, - repository_name=tool_shed_repository.name, - repository=None, - repository_metadata=None, - tool_dependencies=tool_dependencies, - repository_dependencies=repository_dependencies ) + repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=tool_shed_repository.changeset_revision, + ctx_rev=ctx_rev, + repository_owner=tool_shed_repository.owner, + repository_name=tool_shed_repository.name, + repository=None, + repository_metadata=None, + tool_dependencies=tool_dependencies, + repository_dependencies=repository_dependencies ) repo_info_dicts.append( repo_info_dict ) # Make sure all tool_shed_repository records exist. created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \ - shed_util.create_repository_dependency_objects( trans=trans, - tool_path=tool_path, - tool_shed_url=tool_shed_url, - repo_info_dicts=repo_info_dicts, - reinstalling=True, - install_repository_dependencies=install_repository_dependencies, - no_changes_checked=no_changes_checked, - tool_panel_section=tool_panel_section, - new_tool_panel_section=new_tool_panel_section ) + repository_dependency_util.create_repository_dependency_objects( trans=trans, + tool_path=tool_path, + tool_shed_url=tool_shed_url, + repo_info_dicts=repo_info_dicts, + reinstalling=True, + install_repository_dependencies=install_repository_dependencies, + no_changes_checked=no_changes_checked, + tool_panel_section=tool_panel_section, + new_tool_panel_section=new_tool_panel_section ) # Default the selected tool panel location for loading tools included in each newly installed required tool shed repository to the location # selected for the repository selected for reinstallation. for index, tps_key in enumerate( tool_panel_section_keys ): @@ -1365,19 +1369,19 @@ repository_name=tool_shed_repository.name, repository_owner=tool_shed_repository.owner, changeset_revision=tool_shed_repository.changeset_revision ) - repo_info_dict = suc.create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=tool_shed_repository.changeset_revision, - ctx_rev=tool_shed_repository.ctx_rev, - repository_owner=tool_shed_repository.owner, - repository_name=tool_shed_repository.name, - repository=None, - repository_metadata=None, - tool_dependencies=tool_dependencies, - repository_dependencies=repository_dependencies ) + repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=tool_shed_repository.changeset_revision, + ctx_rev=tool_shed_repository.ctx_rev, + repository_owner=tool_shed_repository.owner, + repository_name=tool_shed_repository.name, + repository=None, + repository_metadata=None, + tool_dependencies=tool_dependencies, + repository_dependencies=repository_dependencies ) repository_name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \ - shed_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) + common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) if installed_repository_dependencies or missing_repository_dependencies: has_repository_dependencies = True else: @@ -1387,7 +1391,7 @@ if 'tool_panel_section' in metadata: tool_panel_dict = metadata[ 'tool_panel_section' ] if tool_panel_dict: - if shed_util.panel_entry_per_tool( tool_panel_dict ): + if tool_util.panel_entry_per_tool( tool_panel_dict ): # The following forces everything to be loaded into 1 section (or no section) in the tool panel. tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ] tool_section_dict = tool_section_dicts[ 0 ] @@ -1415,17 +1419,17 @@ original_section_name = '' tool_panel_section_select_field = None shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans ) - containers_dict = shed_util.populate_containers_dict_for_new_install( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - readme_files_dict=readme_files_dict, - installed_repository_dependencies=installed_repository_dependencies, - missing_repository_dependencies=missing_repository_dependencies, - installed_tool_dependencies=installed_tool_dependencies, - missing_tool_dependencies=missing_tool_dependencies ) + containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + readme_files_dict=readme_files_dict, + installed_repository_dependencies=installed_repository_dependencies, + missing_repository_dependencies=missing_repository_dependencies, + installed_tool_dependencies=installed_tool_dependencies, + missing_tool_dependencies=missing_tool_dependencies ) # Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed # repository dependency will display a status, whether installed or missing. - containers_dict = suc.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict ) # Handle repository dependencies check box. install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True ) # Handle tool dependencies check box. @@ -1566,7 +1570,7 @@ response.close() if text: tool_version_dicts = json.from_json_string( text ) - shed_util.handle_tool_versions( trans.app, tool_version_dicts, repository ) + tool_util.handle_tool_versions( trans.app, tool_version_dicts, repository ) message = "Tool versions have been set for all included tools." status = 'done' else: @@ -1576,12 +1580,12 @@ status = 'error' shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository ) repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) ) - containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans=trans, - tool_shed_url=tool_shed_url, - tool_path=tool_path, - repository=repository, - reinstalling=False, - required_repo_info_dicts=None ) + containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository=repository, + reinstalling=False, + required_repo_info_dicts=None ) return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako', repository=repository, description=repository.description, @@ -1622,7 +1626,7 @@ tool_dependency_ids = util.listify( params.get( 'id', None ) ) tool_dependencies = [] for tool_dependency_id in tool_dependency_ids: - tool_dependency = shed_util.get_tool_dependency( trans, tool_dependency_id ) + tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id ) tool_dependencies.append( tool_dependency ) tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository if kwd.get( 'uninstall_tool_dependencies_button', False ): @@ -1633,7 +1637,7 @@ if tool_dependency.can_uninstall: tool_dependencies_for_uninstallation.append( tool_dependency ) for tool_dependency in tool_dependencies_for_uninstallation: - uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency ) + uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency ) if error_message: errors = True message = '%s %s' % ( message, error_message ) @@ -1681,12 +1685,12 @@ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) ) repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir ) repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name ) - shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev ) + repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev ) suc.update_repository( repo, latest_ctx_rev ) tool_shed = suc.clean_tool_shed_url( tool_shed_url ) # Remove old Data Manager entries if repository.includes_data_managers: - shed_util.remove_from_data_manager( trans.app, repository ) + data_manager_util.remove_from_data_manager( trans.app, repository ) # Update the repository metadata. metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app, repository=repository, @@ -1710,7 +1714,7 @@ if tool_panel_dict is None: tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository ) repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict ) - shed_util.add_to_tool_panel( app=trans.app, + tool_util.add_to_tool_panel( app=trans.app, repository_name=repository.name, repository_clone_url=repository_clone_url, changeset_revision=repository.installed_changeset_revision, @@ -1721,12 +1725,16 @@ new_install=False ) # Add new Data Manager entries if 'data_manager' in metadata_dict: - new_data_managers = shed_util.install_data_managers( trans.app, trans.app.config.shed_data_manager_config_file, metadata_dict, - repository.get_shed_config_dict( trans.app ), os.path.join( relative_install_dir, name ), - repository, repository_tools_tups ) + new_data_managers = data_manager_util.install_data_managers( trans.app, + trans.app.config.shed_data_manager_config_file, + metadata_dict, + repository.get_shed_config_dict( trans.app ), + os.path.join( relative_install_dir, name ), + repository, + repository_tools_tups ) # Create tool_dependency records if necessary. if 'tool_dependencies' in metadata_dict: - tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False ) + tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False ) message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision ) # See if any tool dependencies can be installed. shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository ) @@ -1829,10 +1837,6 @@ def can_select_tool_panel_section(): pass -def get_tool_dependency( trans, id ): - """Get a tool_dependency from the database via id""" - return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) ) - def have_shed_tool_conf_for_install( trans ): if not trans.app.toolbox.shed_tool_confs: return False diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py --- a/lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py +++ b/lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py @@ -1,6 +1,7 @@ import logging from galaxy.web.framework.helpers import time_ago import tool_shed.util.shed_util_common as suc +from tool_shed.util import metadata_util from galaxy import web from galaxy.web.base.controller import BaseAPIController @@ -29,7 +30,7 @@ rval = [] repository_metadata_id = kwd.get( 'repository_metadata_id', None ) try: - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) ) repository_dict[ 'url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=repository_metadata_id ) rval.append( repository_dict ) diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/api/repository_revisions.py --- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py +++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py @@ -1,6 +1,7 @@ import datetime from galaxy.web.framework.helpers import time_ago import tool_shed.util.shed_util_common as suc +from tool_shed.util import metadata_util from galaxy import web, util from galaxy.model.orm import and_, or_ from galaxy.web.base.controller import BaseAPIController @@ -73,7 +74,7 @@ Displays information about a repository_metadata record in the Tool Shed. """ try: - repository_metadata = suc.get_repository_metadata_by_id( trans, id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) repository_data = repository_metadata.get_api_value( view='element', value_mapper=default_value_mapper( trans, repository_metadata ) ) repository_data[ 'contents_url' ] = web.url_for( 'repository_revision_contents', repository_metadata_id=id ) @@ -91,7 +92,7 @@ """ repository_metadata_id = kwd.get( 'id', None ) try: - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) flush_needed = False for key, new_value in payload.items(): if hasattr( repository_metadata, key ): diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/admin.py --- a/lib/galaxy/webapps/tool_shed/controllers/admin.py +++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py @@ -106,7 +106,7 @@ # The received id is a RepositoryMetadata object id, so we need to get the # associated Repository and redirect to view_or_manage_repository with the # changeset_revision. - repository_metadata = suc.get_repository_metadata_by_id( trans, kwd[ 'id' ] ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, kwd[ 'id' ] ) repository = repository_metadata.repository kwd[ 'id' ] = trans.security.encode_id( repository.id ) kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision @@ -196,7 +196,7 @@ ids = util.listify( id ) count = 0 for repository_metadata_id in ids: - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) trans.sa_session.delete( repository_metadata ) trans.sa_session.flush() count += 1 @@ -332,7 +332,7 @@ for repository_metadata in repository.metadata_revisions: metadata = repository_metadata.metadata if metadata: - if suc.is_downloadable( metadata ): + if metadata_util.is_downloadable( metadata ): repository_metadata.downloadable = True trans.sa_session.add( repository_metadata ) repository.deleted = False diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -11,10 +11,9 @@ from galaxy.util import json from galaxy.model.orm import and_, or_ import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util -from tool_shed.util import encoding_util -from galaxy.webapps.tool_shed.util import workflow_util -from galaxy.webapps.tool_shed.util import common_util +from tool_shed.util import encoding_util, metadata_util, repository_dependency_util, tool_dependency_util +from tool_shed.galaxy_install import repository_util +from galaxy.webapps.tool_shed.util import common_util, workflow_util import galaxy.tools import tool_shed.grids.repository_grids as repository_grids import tool_shed.grids.util as grids_util @@ -85,7 +84,7 @@ operation = kwd[ 'operation' ].lower() # The received id is a RepositoryMetadata id. repository_metadata_id = kwd[ 'id' ] - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_id = trans.security.encode_id( repository_metadata.repository_id ) changeset_revision = repository_metadata.changeset_revision new_kwd = dict( id=repository_id, @@ -266,7 +265,7 @@ operation = kwd[ 'operation' ].lower() # The received id is a RepositoryMetadata id. repository_metadata_id = kwd[ 'id' ] - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_id = trans.security.encode_id( repository_metadata.repository_id ) changeset_revision = repository_metadata.changeset_revision new_kwd = dict( id=repository_id, @@ -287,7 +286,7 @@ operation = kwd[ 'operation' ].lower() # The received id is a RepositoryMetadata id. repository_metadata_id = kwd['id' ] - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_id = trans.security.encode_id( repository_metadata.repository_id ) changeset_revision = repository_metadata.changeset_revision new_kwd = dict( id=repository_id, @@ -304,7 +303,7 @@ operation = kwd[ 'operation' ].lower() # The received id is a RepositoryMetadata id. repository_metadata_id = kwd[ 'id' ] - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_id = trans.security.encode_id( repository_metadata.repository_id ) changeset_revision = repository_metadata.changeset_revision new_kwd = dict( id=repository_id, @@ -689,7 +688,7 @@ status = params.get( 'status', 'done' ) repository = suc.get_repository_by_id( trans, repository_id ) if repository: - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) changeset_revision = repository_metadata.changeset_revision if repository_metadata: metadata = repository_metadata.metadata @@ -757,7 +756,7 @@ is_admin = trans.user_is_admin() if operation == "view_or_manage_repository": # The received id is a RepositoryMetadata id, so we have to get the repository id. - repository_metadata = suc.get_repository_metadata_by_id( trans, item_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id @@ -774,7 +773,7 @@ encoded_repository_ids = [] changeset_revisions = [] for repository_metadata_id in util.listify( item_id ): - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) ) changeset_revisions.append( repository_metadata.changeset_revision ) new_kwd = {} @@ -844,7 +843,7 @@ is_admin = trans.user_is_admin() if operation == "view_or_manage_repository": # The received id is a RepositoryMetadata id, so we have to get the repository id. - repository_metadata = suc.get_repository_metadata_by_id( trans, item_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id ) repository_id = trans.security.encode_id( repository_metadata.repository.id ) repository = suc.get_repository_in_tool_shed( trans, repository_id ) kwd[ 'id' ] = repository_id @@ -861,7 +860,7 @@ encoded_repository_ids = [] changeset_revisions = [] for repository_metadata_id in util.listify( item_id ): - repository_metadata = suc.get_repository_metadata_by_id( trans, item_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, item_id ) encoded_repository_ids.append( trans.security.encode_id( repository_metadata.repository.id ) ) changeset_revisions.append( repository_metadata.changeset_revision ) new_kwd = {} @@ -1085,14 +1084,15 @@ if repository_metadata: metadata = repository_metadata.metadata if metadata: - repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None, - circular_repository_dependencies=None ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None, + circular_repository_dependencies=None ) if repository_dependencies: return encoding_util.tool_shed_encode( repository_dependencies ) return '' @@ -1144,16 +1144,16 @@ repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - repo_info_dict = suc.create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=changeset_revision, - ctx_rev=str( ctx.rev() ), - repository_owner=repository.user.username, - repository_name=repository.name, - repository=repository, - repository_metadata=repository_metadata, - tool_dependencies=None, - repository_dependencies=None ) + repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=changeset_revision, + ctx_rev=str( ctx.rev() ), + repository_owner=repository.user.username, + repository_name=repository.name, + repository=repository, + repository_metadata=repository_metadata, + tool_dependencies=None, + repository_dependencies=None ) repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) ) return dict( includes_tools=includes_tools, includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel, @@ -1257,16 +1257,16 @@ repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) ctx = suc.get_changectx_for_changeset( repo, changeset_revision ) - repo_info_dict = suc.create_repo_info_dict( trans=trans, - repository_clone_url=repository_clone_url, - changeset_revision=changeset_revision, - ctx_rev=str( ctx.rev() ), - repository_owner=repository.user.username, - repository_name=repository.name, - repository=repository, - repository_metadata=repository_metadata, - tool_dependencies=None, - repository_dependencies=None ) + repo_info_dict = repository_util.create_repo_info_dict( trans=trans, + repository_clone_url=repository_clone_url, + changeset_revision=changeset_revision, + ctx_rev=str( ctx.rev() ), + repository_owner=repository.user.username, + repository_name=repository.name, + repository=repository, + repository_metadata=repository_metadata, + tool_dependencies=None, + repository_dependencies=None ) includes_data_managers = False includes_datatypes = False includes_tools = False @@ -1686,15 +1686,16 @@ if repository_metadata: metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) # Handle messaging for orphan tool dependencies. - orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata ) + orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata ) if orphan_message: message += orphan_message status = 'warning' @@ -1809,13 +1810,14 @@ repository_metadata_id = trans.security.encode_id( repository_metadata.id ), metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) if metadata: if 'repository_dependencies' in metadata and not repository_dependencies: message += 'The repository dependency definitions for this repository are invalid and will be ignored.' @@ -2254,7 +2256,7 @@ repository = None if repository: repository_id = trans.security.encode_id( repository.id ) - repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) if not repository_metadata: # Get updates to the received changeset_revision if any exist. repo_dir = repository.repo_path( trans.app ) @@ -2262,7 +2264,7 @@ upper_bound_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) if upper_bound_changeset_revision: changeset_revision = upper_bound_changeset_revision - repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) + repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, repository_id, changeset_revision ) if repository_metadata: return trans.response.send_redirect( web.url_for( controller='repository', action='index', @@ -2471,15 +2473,16 @@ if repository_metadata: metadata = repository_metadata.metadata # Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend. - repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans, - repository=repository, - repository_metadata=repository_metadata, - toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), - key_rd_dicts_to_be_processed=None, - all_repository_dependencies=None, - handled_key_rd_dicts=None ) + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None ) # Handle messaging for orphan tool dependencies. - orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata ) + orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata ) if orphan_message: message += orphan_message status = 'warning' @@ -2621,7 +2624,7 @@ status = params.get( 'status', 'done' ) if workflow_name: workflow_name = encoding_util.tool_shed_decode( workflow_name ) - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository = suc.get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) ) changeset_revision = repository_metadata.changeset_revision metadata = repository_metadata.metadata diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -3,7 +3,7 @@ from galaxy import web, util from galaxy.datatypes import checkers import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util +from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util from galaxy import eggs eggs.require('mercurial') @@ -198,17 +198,17 @@ # so warning messages are important because orphans are always valid. The repository owner must be warned in case they did not intend to define an # orphan dependency, but simply provided incorrect information (tool shed, name owner, changeset_revision) for the definition. # Handle messaging for orphan tool dependencies. - orphan_message = suc.generate_message_for_orphan_tool_dependencies( metadata_dict ) + orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( metadata_dict ) if orphan_message: message += orphan_message status = 'warning' # Handle messaging for invalid tool dependencies. - invalid_tool_dependencies_message = suc.generate_message_for_invalid_tool_dependencies( metadata_dict ) + invalid_tool_dependencies_message = tool_dependency_util.generate_message_for_invalid_tool_dependencies( metadata_dict ) if invalid_tool_dependencies_message: message += invalid_tool_dependencies_message status = 'error' # Handle messaging for invalid repository dependencies. - invalid_repository_dependencies_message = suc.generate_message_for_invalid_repository_dependencies( metadata_dict ) + invalid_repository_dependencies_message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata_dict ) if invalid_repository_dependencies_message: message += invalid_repository_dependencies_message status = 'error' diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/galaxy/webapps/tool_shed/util/workflow_util.py --- a/lib/galaxy/webapps/tool_shed/util/workflow_util.py +++ b/lib/galaxy/webapps/tool_shed/util/workflow_util.py @@ -6,7 +6,7 @@ import logging, svgfig from galaxy.util import json import tool_shed.util.shed_util_common as suc -from tool_shed.util import encoding_util +from tool_shed.util import encoding_util, metadata_util from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory import galaxy.webapps.galaxy.controllers.workflow import galaxy.tools @@ -152,7 +152,7 @@ workflow_name = encoding_util.tool_shed_decode( workflow_name ) if trans.webapp.name == 'tool_shed': # We're in the tool shed. - repository_metadata = suc.get_repository_metadata_by_id( trans, repository_metadata_id ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) repository_id = trans.security.encode_id( repository_metadata.repository_id ) changeset_revision = repository_metadata.changeset_revision metadata = repository_metadata.metadata diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/__init__.py --- a/lib/tool_shed/galaxy_install/__init__.py +++ b/lib/tool_shed/galaxy_install/__init__.py @@ -2,8 +2,8 @@ Classes encapsulating the management of repositories installed from Galaxy tool sheds. """ import os, logging -import tool_shed.util.shed_util import tool_shed.util.shed_util_common +import tool_shed.util.datatype_util from galaxy.model.orm import and_ from galaxy import eggs @@ -51,13 +51,13 @@ .order_by( self.model.ToolShedRepository.table.c.id ): relative_install_dir = self.get_repository_install_dir( tool_shed_repository ) if relative_install_dir: - installed_repository_dict = tool_shed.util.shed_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir ) + installed_repository_dict = tool_shed.util.datatype_util.load_installed_datatypes( self.app, tool_shed_repository, relative_install_dir ) if installed_repository_dict: self.installed_repository_dicts.append( installed_repository_dict ) def load_proprietary_converters_and_display_applications( self, deactivate=False ): for installed_repository_dict in self.installed_repository_dicts: if installed_repository_dict[ 'converter_path' ]: - tool_shed.util.shed_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate ) + tool_shed.util.datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=deactivate ) if installed_repository_dict[ 'display_path' ]: - tool_shed.util.shed_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate ) + tool_shed.util.datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=deactivate ) \ No newline at end of file diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py --- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py +++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py @@ -3,7 +3,7 @@ from galaxy import model, util from galaxy.web.framework.helpers import iff, grids from galaxy.model.orm import or_ -import tool_shed.util.shed_util as shed_util +from tool_shed.util import tool_dependency_util log = logging.getLogger( __name__ ) @@ -345,7 +345,7 @@ ] def build_initial_query( self, trans, **kwd ): - tool_dependency_ids = shed_util.get_tool_dependency_ids( as_string=False, **kwd ) + tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd ) if tool_dependency_ids: clause_list = [] for tool_dependency_id in tool_dependency_ids: diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -6,11 +6,9 @@ from galaxy import util from galaxy.tools import ToolSection from galaxy.util.json import from_json_string, to_json_string -import tool_shed.util.shed_util as shed_util import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util +from tool_shed.util import common_install_util, common_util, datatype_util, metadata_util, tool_dependency_util, tool_util from galaxy.util.odict import odict -from tool_shed.util import common_util class InstallManager( object ): def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ): @@ -174,7 +172,7 @@ # See if tool_config is defined inside of a section in self.proprietary_tool_panel_elems. is_displayed, tool_sections = self.get_containing_tool_sections( tool_config ) if is_displayed: - tool_panel_dict_for_tool_config = shed_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) + tool_panel_dict_for_tool_config = tool_util.generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections ) for k, v in tool_panel_dict_for_tool_config.items(): tool_panel_dict_for_display[ k ] = v else: @@ -195,43 +193,43 @@ self.app.sa_session.flush() if 'tool_dependencies' in metadata_dict: # All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed. - tool_dependencies = shed_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True ) + tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True ) else: tool_dependencies = None if 'tools' in metadata_dict: sample_files = metadata_dict.get( 'sample_files', [] ) sample_files = [ str( s ) for s in sample_files ] - tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files ) - shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path ) + tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files ) + tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path ) sample_files_copied = [ s for s in tool_index_sample_files ] repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata_dict ) if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = shed_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups ) + repository_tools_tups = tool_util.handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups ) # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( self.app, + repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups, sample_files_copied ) # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. - shed_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied ) + tool_util.copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied ) if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict: # Install tool dependencies. - shed_util.update_tool_shed_repository_status( self.app, - tool_shed_repository, - self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from disk. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repo_install_dir ) - installed_tool_dependencies = shed_util.handle_tool_dependencies( app=self.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_dependencies ) + installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=self.app, + tool_shed_repository=tool_shed_repository, + tool_dependencies_config=tool_dependencies_config, + tool_dependencies=tool_dependencies ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == self.app.model.ToolDependency.installation_status.ERROR: print '\nThe following error occurred from the InstallManager while installing tool dependency ', installed_tool_dependency.name, ':' print installed_tool_dependency.error_message, '\n\n' - shed_util.add_to_tool_panel( self.app, + tool_util.add_to_tool_panel( self.app, tool_shed_repository.name, repository_clone_url, tool_shed_repository.installed_changeset_revision, @@ -250,16 +248,16 @@ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', repo_install_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. - converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir + converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir if converter_path or display_path: # Create a dictionary of tool shed repository related information. - repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, - name=tool_shed_repository.name, - owner=self.repository_owner, - installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), - converter_path=converter_path, - display_path=display_path ) + repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, + name=tool_shed_repository.name, + owner=self.repository_owner, + installed_changeset_revision=tool_shed_repository.installed_changeset_revision, + tool_dicts=metadata_dict.get( 'tools', [] ), + converter_path=converter_path, + display_path=display_path ) if converter_path: # Load proprietary datatype converters self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict ) @@ -297,7 +295,7 @@ current_changeset_revision=None, owner=self.repository_owner, dist_to_shed=True ) - shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING ) + suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING ) cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev ) if cloned_ok: self.handle_repository_contents( tool_shed_repository=tool_shed_repository, @@ -308,9 +306,9 @@ self.app.sa_session.refresh( tool_shed_repository ) metadata_dict = tool_shed_repository.metadata if 'tools' in metadata_dict: - shed_util.update_tool_shed_repository_status( self.app, - tool_shed_repository, - self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) + suc.update_tool_shed_repository_status( self.app, + tool_shed_repository, + self.app.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS ) # Get the tool_versions from the tool shed for each tool in the installed change set. url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \ ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision ) @@ -319,7 +317,7 @@ response.close() if text: tool_version_dicts = from_json_string( text ) - shed_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository ) + tool_util.handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository ) else: # Set the tool versions since they seem to be missing for this repository in the tool shed. # CRITICAL NOTE: These default settings may not properly handle all parent/child associations. @@ -328,8 +326,8 @@ tool_id = tool_dict[ 'guid' ] old_tool_id = tool_dict[ 'id' ] tool_version = tool_dict[ 'version' ] - tool_version_using_old_id = shed_util.get_tool_version( self.app, old_tool_id ) - tool_version_using_guid = shed_util.get_tool_version( self.app, tool_id ) + tool_version_using_old_id = tool_util.get_tool_version( self.app, old_tool_id ) + tool_version_using_guid = tool_util.get_tool_version( self.app, tool_id ) if not tool_version_using_old_id: tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id, tool_shed_repository=tool_shed_repository ) @@ -341,7 +339,7 @@ self.app.sa_session.add( tool_version_using_guid ) self.app.sa_session.flush() # Associate the two versions as parent / child. - tool_version_association = shed_util.get_tool_version_association( self.app, + tool_version_association = tool_util.get_tool_version_association( self.app, tool_version_using_old_id, tool_version_using_guid ) if not tool_version_association: @@ -349,7 +347,7 @@ parent_id=tool_version_using_old_id.id ) self.app.sa_session.add( tool_version_association ) self.app.sa_session.flush() - shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED ) + suc.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED ) @property def non_shed_tool_panel_configs( self ): return common_util.get_non_shed_tool_panel_configs( self.app ) diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -1,89 +1,230 @@ -import tool_shed.util.shed_util as shed_util +import os, logging, threading, urllib2 +from galaxy.web import url_for +from galaxy.webapps.tool_shed.util import container_util import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util +from tool_shed.util import encoding_util, repository_dependency_util, tool_dependency_util, tool_util -def handle_repository_contents( app, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, - shed_tool_conf=None, reinstalling=False ): +from galaxy import eggs +import pkg_resources + +pkg_resources.require( 'mercurial' ) +from mercurial import hg, ui, commands + +log = logging.getLogger( __name__ ) + +def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None, + repository_metadata=None, tool_dependencies=None, repository_dependencies=None ): """ - Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed) - when an admin is installing a new repository or reinstalling an uninstalled repository. + Return a dictionary that includes all of the information needed to install a repository into a local Galaxy instance. The dictionary will also + contain the recursive list of repository dependencies defined for the repository, as well as the defined tool dependencies. + + This method is called from Galaxy under three scenarios: + 1. During the tool shed repository installation process via the tool shed's get_repository_information() method. In this case both the received + repository and repository_metadata will be objects., but tool_dependencies and repository_dependencies will be None + 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no updates available. In this case, both + repository and repository_metadata will be None, but tool_dependencies and repository_dependencies will be objects previously retrieved from the + tool shed if the repository includes definitions for them. + 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates available. In this case, this + method is reached via the tool shed's get_updated_repository_information() method, and both repository and repository_metadata will be objects + but tool_dependencies and repository_dependencies will be None. """ - sa_session = app.model.context.current - shed_config_dict = app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ) - metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=app, - repository=tool_shed_repository, - changeset_revision=tool_shed_repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict=shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) - tool_shed_repository.metadata = metadata_dict - sa_session.add( tool_shed_repository ) - sa_session.flush() - if 'tool_dependencies' in metadata_dict and not reinstalling: - tool_dependencies = shed_util.create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ) - if 'tools' in metadata_dict: - tool_panel_dict = shed_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) - sample_files = metadata_dict.get( 'sample_files', [] ) - tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files ) - shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path ) - sample_files_copied = [ str( s ) for s in tool_index_sample_files ] - repository_tools_tups = suc.get_repository_tools_tups( app, metadata_dict ) - if repository_tools_tups: - # Handle missing data table entries for tool parameters that are dynamically generated select lists. - repository_tools_tups = shed_util.handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ) - # Handle missing index files for tool parameters that are dynamically generated select lists. - repository_tools_tups, sample_files_copied = shed_util.handle_missing_index_file( app, - tool_path, - sample_files, - repository_tools_tups, - sample_files_copied ) - # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance. - shed_util.copy_sample_files( app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied ) - shed_util.add_to_tool_panel( app=app, - repository_name=tool_shed_repository.name, - repository_clone_url=repository_clone_url, - changeset_revision=tool_shed_repository.installed_changeset_revision, - repository_tools_tups=repository_tools_tups, - owner=tool_shed_repository.owner, - shed_tool_conf=shed_tool_conf, - tool_panel_dict=tool_panel_dict, - new_install=True ) - if 'data_manager' in metadata_dict: - new_data_managers = shed_util.install_data_managers( app, - app.config.shed_data_manager_config_file, - metadata_dict, - shed_config_dict, - relative_install_dir, - tool_shed_repository, - repository_tools_tups ) - if 'datatypes' in metadata_dict: - tool_shed_repository.status = app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES - if not tool_shed_repository.includes_datatypes: - tool_shed_repository.includes_datatypes = True - sa_session.add( tool_shed_repository ) - sa_session.flush() - files_dir = relative_install_dir - if shed_config_dict.get( 'tool_path' ): - files_dir = os.path.join( shed_config_dict['tool_path'], files_dir ) - datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir ) - # Load data types required by tools. - converter_path, display_path = shed_util.alter_config_and_load_prorietary_datatypes( app, datatypes_config, files_dir, override=False ) - if converter_path or display_path: - # Create a dictionary of tool shed repository related information. - repository_dict = shed_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed, - name=tool_shed_repository.name, - owner=tool_shed_repository.owner, - installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), - converter_path=converter_path, - display_path=display_path ) - if converter_path: - # Load proprietary datatype converters - app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=repository_dict ) - if display_path: - # Load proprietary datatype display applications - app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict ) + repo_info_dict = {} + repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner ) + if trans.webapp.name == 'tool_shed': + # We're in the tool shed. + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + # Get a dictionary of all repositories upon which the contents of the received repository depends. + repository_dependencies = \ + repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans, + repository=repository, + repository_metadata=repository_metadata, + toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ), + key_rd_dicts_to_be_processed=None, + all_repository_dependencies=None, + handled_key_rd_dicts=None, + circular_repository_dependencies=None ) + tool_dependencies = metadata.get( 'tool_dependencies', None ) + if tool_dependencies: + new_tool_dependencies = {} + for dependency_key, requirements_dict in tool_dependencies.items(): + if dependency_key in [ 'set_environment' ]: + new_set_environment_dict_list = [] + for set_environment_dict in requirements_dict: + set_environment_dict[ 'repository_name' ] = repository_name + set_environment_dict[ 'repository_owner' ] = repository_owner + set_environment_dict[ 'changeset_revision' ] = changeset_revision + new_set_environment_dict_list.append( set_environment_dict ) + new_tool_dependencies[ dependency_key ] = new_set_environment_dict_list + else: + requirements_dict[ 'repository_name' ] = repository_name + requirements_dict[ 'repository_owner' ] = repository_owner + requirements_dict[ 'changeset_revision' ] = changeset_revision + new_tool_dependencies[ dependency_key ] = requirements_dict + tool_dependencies = new_tool_dependencies + # Cast unicode to string. + repo_info_dict[ str( repository.name ) ] = ( str( repository.description ), + str( repository_clone_url ), + str( changeset_revision ), + str( ctx_rev ), + str( repository_owner ), + repository_dependencies, + tool_dependencies ) + return repo_info_dict + +def get_update_to_changeset_revision_and_ctx_rev( trans, repository ): + """Return the changeset revision hash to which the repository can be updated.""" + changeset_revision_dict = {} + tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository ) + url = suc.url_join( tool_shed_url, 'repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % \ + ( repository.name, repository.owner, repository.installed_changeset_revision ) ) + try: + response = urllib2.urlopen( url ) + encoded_update_dict = response.read() + if encoded_update_dict: + update_dict = encoding_util.tool_shed_decode( encoded_update_dict ) + includes_data_managers = update_dict.get( 'includes_data_managers', False ) + includes_datatypes = update_dict.get( 'includes_datatypes', False ) + includes_tools = update_dict.get( 'includes_tools', False ) + includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False ) + includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False ) + includes_workflows = update_dict.get( 'includes_workflows', False ) + has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False ) + changeset_revision = update_dict.get( 'changeset_revision', None ) + ctx_rev = update_dict.get( 'ctx_rev', None ) + response.close() + changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers + changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes + changeset_revision_dict[ 'includes_tools' ] = includes_tools + changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel + changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies + changeset_revision_dict[ 'includes_workflows' ] = includes_workflows + changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies + changeset_revision_dict[ 'changeset_revision' ] = changeset_revision + changeset_revision_dict[ 'ctx_rev' ] = ctx_rev + except Exception, e: + log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) ) + changeset_revision_dict[ 'includes_data_managers' ] = False + changeset_revision_dict[ 'includes_datatypes' ] = False + changeset_revision_dict[ 'includes_tools' ] = False + changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False + changeset_revision_dict[ 'includes_tool_dependencies' ] = False + changeset_revision_dict[ 'includes_workflows' ] = False + changeset_revision_dict[ 'has_repository_dependencies' ] = False + changeset_revision_dict[ 'changeset_revision' ] = None + changeset_revision_dict[ 'ctx_rev' ] = None + return changeset_revision_dict + +def merge_containers_dicts_for_new_install( containers_dicts ): + """ + When installing one or more tool shed repositories for the first time, the received list of containers_dicts contains a containers_dict for + each repository being installed. Since the repositories are being installed for the first time, all entries are None except the repository + dependencies and tool dependencies. The entries for missing dependencies are all None since they have previously been merged into the installed + dependencies. This method will merge the dependencies entries into a single container and return it for display. + """ + new_containers_dict = dict( readme_files=None, + datatypes=None, + missing_repository_dependencies=None, + repository_dependencies=None, + missing_tool_dependencies=None, + tool_dependencies=None, + invalid_tools=None, + valid_tools=None, + workflows=None ) + if containers_dicts: + lock = threading.Lock() + lock.acquire( True ) + try: + repository_dependencies_root_folder = None + tool_dependencies_root_folder = None + # Use a unique folder id (hopefully the following is). + folder_id = 867 + for old_container_dict in containers_dicts: + # Merge repository_dependencies. + old_container_repository_dependencies_root = old_container_dict[ 'repository_dependencies' ] + if old_container_repository_dependencies_root: + if repository_dependencies_root_folder is None: + repository_dependencies_root_folder = container_util.Folder( id=folder_id, key='root', label='root', parent=None ) + folder_id += 1 + repository_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Repository dependencies', + parent=repository_dependencies_root_folder ) + folder_id += 1 + # The old_container_repository_dependencies_root will be a root folder containing a single sub_folder. + old_container_repository_dependencies_folder = old_container_repository_dependencies_root.folders[ 0 ] + # Change the folder id so it won't confict with others being merged. + old_container_repository_dependencies_folder.id = folder_id + folder_id += 1 + # Generate the label by retrieving the repository name. + toolshed, name, owner, changeset_revision = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) + old_container_repository_dependencies_folder.label = str( name ) + repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder ) + # Merge tool_dependencies. + old_container_tool_dependencies_root = old_container_dict[ 'tool_dependencies' ] + if old_container_tool_dependencies_root: + if tool_dependencies_root_folder is None: + tool_dependencies_root_folder = container_util.Folder( id=folder_id, key='root', label='root', parent=None ) + folder_id += 1 + tool_dependencies_folder = container_util.Folder( id=folder_id, + key='merged', + label='Tool dependencies', + parent=tool_dependencies_root_folder ) + folder_id += 1 + else: + td_list = [ td.listify for td in tool_dependencies_folder.tool_dependencies ] + # The old_container_tool_dependencies_root will be a root folder containing a single sub_folder. + old_container_tool_dependencies_folder = old_container_tool_dependencies_root.folders[ 0 ] + for td in old_container_tool_dependencies_folder.tool_dependencies: + if td.listify not in td_list: + tool_dependencies_folder.tool_dependencies.append( td ) + if repository_dependencies_root_folder: + repository_dependencies_root_folder.folders.append( repository_dependencies_folder ) + new_containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + if tool_dependencies_root_folder: + tool_dependencies_root_folder.folders.append( tool_dependencies_folder ) + new_containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + except Exception, e: + log.debug( "Exception in merge_containers_dicts_for_new_install: %s" % str( e ) ) + finally: + lock.release() + return new_containers_dict + +def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, missing_repository_dependencies, + installed_tool_dependencies, missing_tool_dependencies ): + """Return the populated containers for a repository being installed for the first time.""" + installed_tool_dependencies, missing_tool_dependencies = \ + tool_dependency_util.populate_tool_dependencies_dicts( trans=trans, + tool_shed_url=tool_shed_url, + tool_path=tool_path, + repository_installed_tool_dependencies=installed_tool_dependencies, + repository_missing_tool_dependencies=missing_tool_dependencies, + required_repo_info_dicts=None ) + # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are. + containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, + repository=None, + datatypes=None, + invalid_tools=None, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=None, + workflows=None, + valid_data_managers=None, + invalid_data_managers=None, + data_managers_errors=None, + new_install=True, + reinstalling=False ) + # Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container. + containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict ) + # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container. + containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict ) + return containers_dict + +def pull_repository( repo, repository_clone_url, ctx_rev ): + """Pull changes from a remote repository to a local one.""" + commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] ) diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -1,7 +1,7 @@ import sys, os, subprocess, tempfile, urllib2 import common_util import fabric_util -from tool_shed.util import encoding_util +from tool_shed.util import encoding_util, tool_dependency_util from galaxy.model.orm import and_ from galaxy.web import url_for @@ -16,24 +16,6 @@ protocol, base = base_url.split( '://' ) return base.rstrip( '/' ) -def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type, status, set_status=True ): - # Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled. - sa_session = app.model.context.current - # First see if an appropriate tool_dependency record exists for the received tool_shed_repository. - if version: - tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type ) - else: - tool_dependency = get_tool_dependency_by_name_type_repository( app, tool_shed_repository, name, type ) - if tool_dependency: - if set_status: - tool_dependency.status = status - else: - # Create a new tool_dependency record for the tool_shed_repository. - tool_dependency = app.model.ToolDependency( tool_shed_repository.id, name, version, type, status ) - sa_session.add( tool_dependency ) - sa_session.flush() - return tool_dependency - def create_temporary_tool_dependencies_config( tool_shed_url, name, owner, changeset_revision ): """Make a call to the tool shed to get the required repository's tool_dependencies.xml file.""" url = url_join( tool_shed_url, @@ -95,23 +77,6 @@ return tool_shed_repository return None -def get_tool_dependency_by_name_type_repository( app, repository, name, type ): - sa_session = app.model.context.current - return sa_session.query( app.model.ToolDependency ) \ - .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id, - app.model.ToolDependency.table.c.name == name, - app.model.ToolDependency.table.c.type == type ) ) \ - .first() - -def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ): - sa_session = app.model.context.current - return sa_session.query( app.model.ToolDependency ) \ - .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id, - app.model.ToolDependency.table.c.name == name, - app.model.ToolDependency.table.c.version == version, - app.model.ToolDependency.table.c.type == type ) ) \ - .first() - def get_tool_dependency_install_dir( app, repository_name, repository_owner, repository_changeset_revision, tool_dependency_type, tool_dependency_name, tool_dependency_version ): if tool_dependency_type == 'package': @@ -147,13 +112,13 @@ for package_elem in elem: if package_elem.tag == 'install': # Create the tool_dependency record in the database. - tool_dependency = create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=package_name, - version=package_version, - type='package', - status=app.model.ToolDependency.installation_status.INSTALLING, - set_status=True ) + tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app, + tool_shed_repository=tool_shed_repository, + name=package_name, + version=package_version, + type='package', + status=app.model.ToolDependency.installation_status.INSTALLING, + set_status=True ) # Get the installation method version from a tag like: <install version="1.0"> package_install_version = package_elem.get( 'version', '1.0' ) if package_install_version == '1.0': @@ -296,13 +261,13 @@ elif package_elem.tag == 'install': # <install version="1.0"> package_install_version = package_elem.get( 'version', '1.0' ) - tool_dependency = create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=package_name, - version=package_version, - type='package', - status=app.model.ToolDependency.installation_status.INSTALLING, - set_status=True ) + tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app, + tool_shed_repository=tool_shed_repository, + name=package_name, + version=package_version, + type='package', + status=app.model.ToolDependency.installation_status.INSTALLING, + set_status=True ) if package_install_version == '1.0': # Handle tool dependency installation using a fabric method included in the Galaxy framework. for actions_elem in package_elem: @@ -324,7 +289,11 @@ # print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path else: print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n' - tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, package_name, package_version, 'package' ) + tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app, + tool_shed_repository, + package_name, + package_version, + 'package' ) tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED sa_session.add( tool_dependency ) sa_session.flush() @@ -550,13 +519,13 @@ if env_var_dict: if not os.path.exists( install_dir ): os.makedirs( install_dir ) - tool_dependency = create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=env_var_name, - version=None, - type='set_environment', - status=app.model.ToolDependency.installation_status.INSTALLING, - set_status=True ) + tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app, + tool_shed_repository=tool_shed_repository, + name=env_var_name, + version=None, + type='set_environment', + status=app.model.ToolDependency.installation_status.INSTALLING, + set_status=True ) cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) if env_var_version == '1.0': # Handle setting environment variables using a fabric method. diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/galaxy_install/update_manager.py --- a/lib/tool_shed/galaxy_install/update_manager.py +++ b/lib/tool_shed/galaxy_install/update_manager.py @@ -3,7 +3,6 @@ """ import threading, urllib2, logging from galaxy.util import string_as_bool -import tool_shed.util.shed_util as shed_util import tool_shed.util.shed_util_common as suc from galaxy.model.orm import and_ diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/grids/repository_grids.py --- a/lib/tool_shed/grids/repository_grids.py +++ b/lib/tool_shed/grids/repository_grids.py @@ -5,6 +5,7 @@ from galaxy.util import json import tool_shed.util.shed_util_common as suc import tool_shed.grids.util as grids_util +from tool_shed.util import metadata_util from galaxy import eggs eggs.require('markupsafe') @@ -776,12 +777,16 @@ required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) if required_repository: required_repository_id = trans.security.encode_id( required_repository.id ) - required_repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, required_repository_id, changeset_revision ) + required_repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + required_repository_id, + changeset_revision ) if not required_repository_metadata: repo_dir = required_repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) updated_changeset_revision = suc.get_next_downloadable_changeset_revision( required_repository, repo, changeset_revision ) - required_repository_metadata = suc.get_repository_metadata_by_repository_id_changeset_revision( trans, required_repository_id, updated_changeset_revision ) + required_repository_metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, + required_repository_id, + updated_changeset_revision ) required_repository_metadata_id = trans.security.encode_id( required_repository_metadata.id ) rd_str += '<a href="browse_repository_dependencies?operation=view_or_manage_repository&id=%s">' % ( required_repository_metadata_id ) rd_str += 'Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>' % ( escape_html( rd_tup[ 1 ] ), escape_html( rd_tup[ 3 ] ), escape_html( rd_tup[ 2 ] ) ) diff -r 62bd6fe0e9aa297b4448dfbcae32d27c8e313c38 -r 320c9b7f08c42c16ac942f05329c134dcdbb2770 lib/tool_shed/grids/repository_review_grids.py --- a/lib/tool_shed/grids/repository_review_grids.py +++ b/lib/tool_shed/grids/repository_review_grids.py @@ -4,7 +4,7 @@ from galaxy.model.orm import and_, or_ from tool_shed.grids.repository_grids import RepositoryGrid import tool_shed.util.shed_util_common as suc -import tool_shed.util.metadata_util as metadata_util +from tool_shed.util import metadata_util from galaxy import eggs eggs.require('mercurial') @@ -73,7 +73,7 @@ class WithoutReviewsRevisionColumn( grids.GridColumn ): def get_value( self, trans, grid, repository ): # Restrict the options to revisions that have not yet been reviewed. - repository_metadata_revisions = suc.get_repository_metadata_revisions_for_review( repository, reviewed=False ) + repository_metadata_revisions = metadata_util.get_repository_metadata_revisions_for_review( repository, reviewed=False ) if repository_metadata_revisions: rval = '' for repository_metadata in repository_metadata_revisions: This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.