commit/galaxy-central: greg: Additional refactoring of shed_util_common into appropriate tool shed Galaxy utility components.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/e40606362e96/ changeset: e40606362e96 user: greg date: 2013-03-16 15:40:26 summary: Additional refactoring of shed_util_common into appropriate tool shed Galaxy utility components. affected #: 12 files diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -7,7 +7,7 @@ from galaxy.model.orm import or_ import tool_shed.util.shed_util_common as suc from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util -from tool_shed.util import repository_dependency_util, tool_dependency_util, tool_util +from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util from tool_shed.galaxy_install import repository_util from galaxy.webapps.tool_shed.util import workflow_util import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids @@ -1075,7 +1075,7 @@ name, repository_owner, changeset_revision, includes_tool_dependencies, installed_repository_dependencies, \ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies = \ common_install_util.get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ) - readme_files_dict = suc.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ) + readme_files_dict = readme_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ) # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed # and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process, # so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository @@ -1362,7 +1362,7 @@ includes_tool_dependencies = True if 'workflows' in metadata: includes_workflows = True - readme_files_dict = suc.build_readme_files_dict( metadata ) + readme_files_dict = readme_util.build_readme_files_dict( metadata ) tool_dependencies = metadata.get( 'tool_dependencies', None ) repository_dependencies = self.get_repository_dependencies( trans=trans, repository_id=repository_id, diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -11,9 +11,9 @@ from galaxy.util import json from galaxy.model.orm import and_, or_ import tool_shed.util.shed_util_common as suc -from tool_shed.util import encoding_util, metadata_util, repository_dependency_util, tool_dependency_util +from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, tool_dependency_util, tool_util from tool_shed.galaxy_install import repository_util -from galaxy.webapps.tool_shed.util import common_util, workflow_util +from galaxy.webapps.tool_shed.util import common_util, container_util, workflow_util import galaxy.tools import tool_shed.grids.repository_grids as repository_grids import tool_shed.grids.util as grids_util @@ -638,7 +638,7 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - repository, tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) + repository, tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) if message: status = 'error' tool_state = self.__new_state( trans ) @@ -1069,7 +1069,7 @@ changeset_revision = kwd[ 'changeset_revision' ] repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner ) repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision ) - return suc.build_readme_files_dict( repository_metadata.metadata ) + return readme_util.build_readme_files_dict( repository_metadata.metadata ) @web.json def get_repository_dependencies( self, trans, **kwd ): @@ -1289,7 +1289,7 @@ break if 'workflows' in metadata: includes_workflows = True - readme_files_dict = suc.build_readme_files_dict( metadata ) + readme_files_dict = readme_util.build_readme_files_dict( metadata ) # See if the repo_info_dict was populated with repository_dependencies or tool_dependencies. for name, repo_info_tuple in repo_info_dict.items(): description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ @@ -1461,18 +1461,18 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'error' ) - repository, tool, error_message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) + repository, tool, error_message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config ) tool_state = self.__new_state( trans ) is_malicious = suc.changeset_is_malicious( trans, repository_id, repository.tip( trans.app ) ) invalid_file_tups = [] if tool: - invalid_file_tups = suc.check_tool_input_params( trans.app, - repository.repo_path( trans.app ), - tool_config, - tool, - [] ) + invalid_file_tups = tool_util.check_tool_input_params( trans.app, + repository.repo_path( trans.app ), + tool_config, + tool, + [] ) if invalid_file_tups: - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True ) + message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True ) elif error_message: message = error_message try: @@ -1719,7 +1719,7 @@ else: review_id = None can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository ) - containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) return trans.fill_template( '/webapps/tool_shed/repository/manage_repository.mako', repo_name=repo_name, description=description, @@ -1832,7 +1832,7 @@ selected_value=changeset_revision, add_id_to_name=False, downloadable=False ) - containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) return trans.fill_template( '/webapps/tool_shed/repository/preview_tools_in_changeset.mako', repository=repository, containers_dict=containers_dict, @@ -1920,7 +1920,7 @@ invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd ) if invalid_file_tups: repository = suc.get_repository_in_tool_shed( trans, id ) - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) status = 'error' else: message = "All repository metadata has been reset. " @@ -2505,7 +2505,7 @@ review_id = trans.security.encode_id( review.id ) else: review_id = None - containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) + containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ) can_browse_repository_reviews = suc.can_browse_repository_reviews( trans, repository ) return trans.fill_template( '/webapps/tool_shed/repository/view_repository.mako', repo=repo, @@ -2559,21 +2559,21 @@ guid = tool_metadata_dict[ 'guid' ] full_path_to_tool_config = os.path.abspath( relative_path_to_tool_config ) full_path_to_dir, tool_config_filename = os.path.split( full_path_to_tool_config ) - can_use_disk_file = suc.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision ) + can_use_disk_file = tool_util.can_use_tool_config_disk_file( trans, repository, repo, full_path_to_tool_config, changeset_revision ) if can_use_disk_file: trans.app.config.tool_data_path = work_dir - tool, valid, message, sample_files = suc.handle_sample_files_and_load_tool_from_disk( trans, + tool, valid, message, sample_files = tool_util.handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, full_path_to_tool_config, work_dir ) if message: status = 'error' else: - tool, message, sample_files = suc.handle_sample_files_and_load_tool_from_tmp_config( trans, - repo, - changeset_revision, - tool_config_filename, - work_dir ) + tool, message, sample_files = tool_util.handle_sample_files_and_load_tool_from_tmp_config( trans, + repo, + changeset_revision, + tool_config_filename, + work_dir ) if message: status = 'error' break diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -3,7 +3,7 @@ from galaxy import web, util from galaxy.datatypes import checkers import tool_shed.util.shed_util_common as suc -from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util +from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util, tool_util from galaxy import eggs eggs.require('mercurial') @@ -153,7 +153,7 @@ if full_path.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. - error, error_message = suc.handle_sample_tool_data_table_conf_file( trans.app, full_path ) + error, error_message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, full_path ) if error: message = '%s<br/>%s' % ( message, error_message ) # See if the content of the change set was valid. @@ -213,7 +213,7 @@ message += invalid_repository_dependencies_message status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) + tool_util.reset_tool_data_tables( trans.app ) trans.response.send_redirect( web.url_for( controller='repository', action='browse_repository', id=repository_id, @@ -223,7 +223,7 @@ else: status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) + tool_util.reset_tool_data_tables( trans.app ) selected_categories = [ trans.security.decode_id( id ) for id in category_ids ] return trans.fill_template( '/webapps/tool_shed/repository/upload.mako', repository=repository, @@ -360,7 +360,7 @@ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ): # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries # to the in-memory trans.app.tool_data_tables dictionary. - error, message = suc.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) + error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive ) if error: return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message ) diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/util/container_util.py --- a/lib/galaxy/webapps/tool_shed/util/container_util.py +++ b/lib/galaxy/webapps/tool_shed/util/container_util.py @@ -1,4 +1,5 @@ -import os, logging +import os, logging, threading +from tool_shed.util import readme_util log = logging.getLogger( __name__ ) @@ -170,6 +171,34 @@ self.repository_metadata_id = repository_metadata_id self.repository_id = repository_id +def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ): + """Inspect all received tool dependencies and label those that are orphans within the repository.""" + orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None ) + new_tool_dependencies = {} + if tool_dependencies: + for td_key, requirements_dict in tool_dependencies.items(): + if td_key in [ 'set_environment' ]: + # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}] + if orphan_env_dependencies: + new_set_environment_dict_list = [] + for set_environment_dict in requirements_dict: + if set_environment_dict in orphan_env_dependencies: + set_environment_dict[ 'is_orphan' ] = True + else: + set_environment_dict[ 'is_orphan' ] = False + new_set_environment_dict_list.append( set_environment_dict ) + new_tool_dependencies[ td_key ] = new_set_environment_dict_list + else: + new_tool_dependencies[ td_key ] = requirements_dict + else: + # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"} + if td_key in orphan_tool_dependencies: + requirements_dict[ 'is_orphan' ] = True + else: + requirements_dict[ 'is_orphan' ] = False + new_tool_dependencies[ td_key ] = requirements_dict + return new_tool_dependencies + def build_data_managers_folder( trans, folder_id, data_managers, label=None ): """Return a folder hierarchy containing Data Managers.""" if data_managers: @@ -199,6 +228,7 @@ else: data_managers_root_folder = None return folder_id, data_managers_root_folder + def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ): """Return a folder hierarchy containing datatypes.""" if datatypes: @@ -249,6 +279,7 @@ else: datatypes_root_folder = None return folder_id, datatypes_root_folder + def build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages=None, label=None ): """Return a folder hierarchy containing invalid Data Managers.""" if data_managers or error_messages: @@ -285,6 +316,7 @@ else: data_managers_root_folder = None return folder_id, data_managers_root_folder + def build_invalid_repository_dependencies_root_folder( trans, folder_id, invalid_repository_dependencies_dict ): """Return a folder hierarchy containing invalid repository dependencies.""" label = 'Invalid repository dependencies' @@ -320,6 +352,7 @@ else: invalid_repository_dependencies_root_folder = None return folder_id, invalid_repository_dependencies_root_folder + def build_invalid_tool_dependencies_root_folder( trans, folder_id, invalid_tool_dependencies_dict ): """Return a folder hierarchy containing invalid tool dependencies.""" # # INvalid tool dependencies are always packages like: @@ -358,6 +391,7 @@ else: invalid_tool_dependencies_root_folder = None return folder_id, invalid_tool_dependencies_root_folder + def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ): """Return a folder hierarchy containing invalid tools.""" # TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy? @@ -388,6 +422,7 @@ else: invalid_tools_root_folder = None return folder_id, invalid_tools_root_folder + def build_readme_files_folder( trans, folder_id, readme_files_dict, label='Readme files' ): """Return a folder hierarchy containing readme text files.""" if readme_files_dict: @@ -415,6 +450,254 @@ else: readme_files_root_folder = None return folder_id, readme_files_root_folder + +def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies, + readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers, + invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ): + """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy.""" + containers_dict = dict( datatypes=None, + invalid_tools=None, + missing_tool_dependencies=None, + readme_files=None, + repository_dependencies=None, + missing_repository_dependencies=None, + tool_dependencies=None, + valid_tools=None, + workflows=None, + valid_data_managers=None, + invalid_data_managers=None ) + # Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository + # id. However we need to be careful because sometimes the repository object is None. + if repository: + repository_id = repository.id + changeset_revision = repository.changeset_revision + else: + repository_id = None + changeset_revision = None + lock = threading.Lock() + lock.acquire( True ) + try: + folder_id = 0 + # Datatypes container. + if datatypes: + folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes ) + containers_dict[ 'datatypes' ] = datatypes_root_folder + # Invalid tools container. + if invalid_tools: + folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans, + folder_id, + invalid_tools, + changeset_revision, + repository=repository, + label='Invalid tools' ) + containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder + # Readme files container. + if readme_files_dict: + folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict ) + containers_dict[ 'readme_files' ] = readme_files_root_folder + # Installed repository dependencies container. + if repository_dependencies: + if new_install: + label = 'Repository dependencies' + else: + label = 'Installed repository dependencies' + folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans, + folder_id=folder_id, + repository_dependencies=repository_dependencies, + label=label, + installed=True ) + containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + # Missing repository dependencies container. + if missing_repository_dependencies: + folder_id, missing_repository_dependencies_root_folder = \ + build_repository_dependencies_folder( trans=trans, + folder_id=folder_id, + repository_dependencies=missing_repository_dependencies, + label='Missing repository dependencies', + installed=False ) + containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder + # Installed tool dependencies container. + if tool_dependencies: + if new_install: + label = 'Tool dependencies' + else: + label = 'Installed tool dependencies' + # We only want to display the Status column if the tool_dependency is missing. + folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans, + folder_id, + tool_dependencies, + label=label, + missing=False, + new_install=new_install, + reinstalling=reinstalling ) + containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + # Missing tool dependencies container. + if missing_tool_dependencies: + # We only want to display the Status column if the tool_dependency is missing. + folder_id, missing_tool_dependencies_root_folder = build_tool_dependencies_folder( trans, + folder_id, + missing_tool_dependencies, + label='Missing tool dependencies', + missing=True, + new_install=new_install, + reinstalling=reinstalling ) + containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder + # Valid tools container. + if valid_tools: + folder_id, valid_tools_root_folder = build_tools_folder( trans, + folder_id, + valid_tools, + repository, + changeset_revision, + label='Valid tools' ) + containers_dict[ 'valid_tools' ] = valid_tools_root_folder + # Workflows container. + if workflows: + folder_id, workflows_root_folder = build_workflows_folder( trans=trans, + folder_id=folder_id, + workflows=workflows, + repository_metadata_id=None, + repository_id=repository_id, + label='Workflows' ) + containers_dict[ 'workflows' ] = workflows_root_folder + if valid_data_managers: + folder_id, valid_data_managers_root_folder = build_data_managers_folder( trans=trans, + folder_id=folder_id, + data_managers=valid_data_managers, + label='Valid Data Managers' ) + containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder + if invalid_data_managers or data_managers_errors: + folder_id, invalid_data_managers_root_folder = build_invalid_data_managers_folder( trans=trans, + folder_id=folder_id, + data_managers=invalid_data_managers, + error_messages=data_managers_errors, + label='Invalid Data Managers' ) + containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder + except Exception, e: + log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) ) + finally: + lock.release() + return containers_dict + +def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ): + """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed.""" + containers_dict = dict( datatypes=None, + invalid_tools=None, + readme_files=None, + repository_dependencies=None, + tool_dependencies=None, + valid_tools=None, + workflows=None, + valid_data_managers=None + ) + if repository_metadata: + metadata = repository_metadata.metadata + lock = threading.Lock() + lock.acquire( True ) + try: + folder_id = 0 + # Datatypes container. + if metadata: + if 'datatypes' in metadata: + datatypes = metadata[ 'datatypes' ] + folder_id, datatypes_root_folder = build_datatypes_folder( trans, folder_id, datatypes ) + containers_dict[ 'datatypes' ] = datatypes_root_folder + # Invalid repository dependencies container. + if metadata: + if 'invalid_repository_dependencies' in metadata: + invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ] + folder_id, invalid_repository_dependencies_root_folder = \ + build_invalid_repository_dependencies_root_folder( trans, + folder_id, + invalid_repository_dependencies ) + containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder + # Invalid tool dependencies container. + if metadata: + if 'invalid_tool_dependencies' in metadata: + invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ] + folder_id, invalid_tool_dependencies_root_folder = \ + build_invalid_tool_dependencies_root_folder( trans, + folder_id, + invalid_tool_dependencies ) + containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder + # Invalid tools container. + if metadata: + if 'invalid_tools' in metadata: + invalid_tool_configs = metadata[ 'invalid_tools' ] + folder_id, invalid_tools_root_folder = build_invalid_tools_folder( trans, + folder_id, + invalid_tool_configs, + changeset_revision, + repository=repository, + label='Invalid tools' ) + containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder + # Readme files container. + if metadata: + if 'readme_files' in metadata: + readme_files_dict = readme_util.build_readme_files_dict( metadata ) + folder_id, readme_files_root_folder = build_readme_files_folder( trans, folder_id, readme_files_dict ) + containers_dict[ 'readme_files' ] = readme_files_root_folder + # Repository dependencies container. + folder_id, repository_dependencies_root_folder = build_repository_dependencies_folder( trans=trans, + folder_id=folder_id, + repository_dependencies=repository_dependencies, + label='Repository dependencies', + installed=False ) + if repository_dependencies_root_folder: + containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder + # Tool dependencies container. + if metadata: + if 'tool_dependencies' in metadata: + tool_dependencies = metadata[ 'tool_dependencies' ] + if trans.webapp.name == 'tool_shed': + if 'orphan_tool_dependencies' in metadata: + orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ] + tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ) + folder_id, tool_dependencies_root_folder = build_tool_dependencies_folder( trans, + folder_id, + tool_dependencies, + missing=False, + new_install=False ) + containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder + # Valid tools container. + if metadata: + if 'tools' in metadata: + valid_tools = metadata[ 'tools' ] + folder_id, valid_tools_root_folder = build_tools_folder( trans, + folder_id, + valid_tools, + repository, + changeset_revision, + label='Valid tools' ) + containers_dict[ 'valid_tools' ] = valid_tools_root_folder + # Workflows container. + if metadata: + if 'workflows' in metadata: + workflows = metadata[ 'workflows' ] + folder_id, workflows_root_folder = build_workflows_folder( trans=trans, + folder_id=folder_id, + workflows=workflows, + repository_metadata_id=repository_metadata.id, + repository_id=None, + label='Workflows' ) + containers_dict[ 'workflows' ] = workflows_root_folder + # Valid Data Managers container + if metadata: + if 'data_manager' in metadata: + data_managers = metadata['data_manager'].get( 'data_managers', None ) + folder_id, data_managers_root_folder = build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" ) + containers_dict[ 'valid_data_managers' ] = data_managers_root_folder + error_messages = metadata['data_manager'].get( 'error_messages', None ) + data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) + folder_id, data_managers_root_folder = build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" ) + containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder + + except Exception, e: + log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) ) + finally: + lock.release() + return containers_dict + def build_repository_dependencies_folder( trans, folder_id, repository_dependencies, label='Repository dependencies', installed=False ): """Return a folder hierarchy containing repository dependencies.""" if repository_dependencies: @@ -438,6 +721,7 @@ else: repository_dependencies_root_folder = None return folder_id, repository_dependencies_root_folder + def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ): """Return a folder hierarchy containing valid tools.""" if tool_dicts: @@ -494,6 +778,7 @@ else: tools_root_folder = None return folder_id, tools_root_folder + def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', missing=False, new_install=False, reinstalling=False ): """Return a folder hierarchy containing tool dependencies.""" # When we're in Galaxy (not the tool shed) and the tool dependencies are not installed or are in an error state, they are considered missing. The tool @@ -603,6 +888,7 @@ else: tool_dependencies_root_folder = None return folder_id, tool_dependencies_root_folder + def build_workflows_folder( trans, folder_id, workflows, repository_metadata_id=None, repository_id=None, label='Workflows' ): """ Return a folder hierarchy containing workflow objects for each workflow dictionary in the received workflows list. When @@ -646,6 +932,7 @@ else: workflows_root_folder = None return folder_id, workflows_root_folder + def cast_empty_repository_dependency_folders( folder, repository_dependency_id ): """ Change any empty folders contained within the repository dependencies container into a repository dependency since it has no repository dependencies @@ -660,6 +947,7 @@ for sub_folder in folder.folders: return cast_empty_repository_dependency_folders( sub_folder, repository_dependency_id ) return folder, repository_dependency_id + def generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, key ): """Return a repository dependency label based on the repository dependency key.""" if key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ): @@ -667,6 +955,7 @@ else: label = "Repository <b>%s</b> revision <b>%s</b> owned by <b>%s</b>" % ( repository_name, changeset_revision, repository_owner ) return label + def generate_repository_dependencies_key_for_repository( toolshed_base_url, repository_name, repository_owner, changeset_revision ): # FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported. return '%s%s%s%s%s%s%s' % ( str( toolshed_base_url ).rstrip( '/' ), @@ -676,14 +965,17 @@ str( repository_owner ), STRSEP, str( changeset_revision ) ) + def generate_tool_dependencies_key( name, version, type ): return '%s%s%s%s%s' % ( str( name ), STRSEP, str( version ), STRSEP, str( type ) ) + def get_folder( folder, key ): if folder.key == key: return folder for sub_folder in folder.folders: return get_folder( sub_folder, key ) return None + def get_components_from_key( key ): # FIXME: assumes tool shed is current tool shed since repository dependencies across tool sheds is not yet supported. items = key.split( STRSEP ) @@ -692,6 +984,7 @@ repository_owner = items[ 2 ] changeset_revision = items[ 3 ] return toolshed_base_url, repository_name, repository_owner, changeset_revision + def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ): toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key ) folder = get_folder( repository_dependencies_folder, rd_key ) @@ -744,6 +1037,7 @@ # Insert the repository_dependency into the folder. sub_folder.repository_dependencies.append( repository_dependency ) return repository_dependencies_folder, folder_id, repository_dependency_id + def is_subfolder_of( folder, repository_dependency ): toolshed, repository_name, repository_owner, changeset_revision = repository_dependency key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision ) @@ -751,15 +1045,18 @@ if key == sub_folder.key: return True return False + def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ): toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key ) return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision + def populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ): folder_keys = repository_dependencies.keys() for key, value in repository_dependencies.items(): repository_dependencies_folder, folder_id, repository_dependency_id = \ handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys ) return repository_dependencies_folder, folder_id, repository_dependency_id + def print_folders( pad, folder ): # For debugging... pad_str = '' @@ -770,6 +1067,7 @@ print ' %s%s' % ( pad_str, repository_dependency.listify ) for sub_folder in folder.folders: print_folders( pad+5, sub_folder ) + def prune_repository_dependencies( folder ): """ Since the object used to generate a repository dependencies container is a dictionary and not an odict() (it must be json-serialize-able), the diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/galaxy/webapps/tool_shed/util/workflow_util.py --- a/lib/galaxy/webapps/tool_shed/util/workflow_util.py +++ b/lib/galaxy/webapps/tool_shed/util/workflow_util.py @@ -6,7 +6,7 @@ import logging, svgfig from galaxy.util import json import tool_shed.util.shed_util_common as suc -from tool_shed.util import encoding_util, metadata_util +from tool_shed.util import encoding_util, metadata_util, tool_util from galaxy.workflow.modules import InputDataModule, ToolModule, WorkflowModuleFactory import galaxy.webapps.galaxy.controllers.workflow import galaxy.tools @@ -52,7 +52,7 @@ if self.tool_id in [ tool_dict[ 'id' ], tool_dict[ 'guid' ] ]: if trans.webapp.name == 'tool_shed': # We're in the tool shed. - repository, self.tool, message = suc.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] ) + repository, self.tool, message = tool_util.load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_dict[ 'tool_config' ] ) if message and self.tool is None: self.errors = 'unavailable' break diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -203,22 +203,22 @@ repository_missing_tool_dependencies=missing_tool_dependencies, required_repo_info_dicts=None ) # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are. - containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, - repository=None, - datatypes=None, - invalid_tools=None, - missing_repository_dependencies=missing_repository_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - readme_files_dict=readme_files_dict, - repository_dependencies=installed_repository_dependencies, - tool_dependencies=installed_tool_dependencies, - valid_tools=None, - workflows=None, - valid_data_managers=None, - invalid_data_managers=None, - data_managers_errors=None, - new_install=True, - reinstalling=False ) + containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans, + repository=None, + datatypes=None, + invalid_tools=None, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=None, + workflows=None, + valid_data_managers=None, + invalid_data_managers=None, + data_managers_errors=None, + new_install=True, + reinstalling=False ) # Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container. containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict ) # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container. diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -1,8 +1,9 @@ -import os, logging +import os, logging, urllib2 import tool_shed.util.shed_util_common as suc -from tool_shed.util import data_manager_util, datatype_util, tool_util +from tool_shed.util import encoding_util, data_manager_util, datatype_util, tool_util from tool_shed.galaxy_install.tool_dependencies.install_util import install_package, set_environment from galaxy import util +from galaxy.util import json from galaxy.webapps.tool_shed.util import container_util from galaxy import eggs @@ -78,7 +79,7 @@ else: installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple ) # Discover all repository dependencies and retrieve information for installing them. - required_repo_info_dicts = suc.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) ) + required_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) ) # Display tool dependencies defined for each of the repository dependencies. if required_repo_info_dicts: all_tool_dependencies = {} @@ -234,6 +235,55 @@ missing_tool_dependencies = None return tool_dependencies, missing_tool_dependencies +def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ): + """ + Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All + repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through + this method is required to retrieve all repository dependencies. + """ + all_repo_info_dicts = [] + if repo_info_dicts: + # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. + required_repository_tups = [] + for repo_info_dict in repo_info_dicts: + for repository_name, repo_info_tup in repo_info_dict.items(): + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ + suc.get_repo_info_tuple_contents( repo_info_tup ) + if repository_dependencies: + for key, val in repository_dependencies.items(): + if key in [ 'root_key', 'description' ]: + continue + toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) + components_list = [ toolshed, name, owner, changeset_revision ] + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + for components_list in val: + if components_list not in required_repository_tups: + required_repository_tups.append( components_list ) + if required_repository_tups: + # The value of required_repository_tups is a list of tuples, so we need to encode it. + encoded_required_repository_tups = [] + for required_repository_tup in required_repository_tups: + encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) + encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) + encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) + url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str ) + response = urllib2.urlopen( url ) + text = response.read() + response.close() + if text: + required_repo_info_dict = json.from_json_string( text ) + required_repo_info_dicts = [] + encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] + for encoded_dict_str in encoded_dict_strings: + decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) + required_repo_info_dicts.append( decoded_dict ) + if required_repo_info_dicts: + for required_repo_info_dict in required_repo_info_dicts: + if required_repo_info_dict not in all_repo_info_dicts: + all_repo_info_dicts.append( required_repo_info_dict ) + return all_repo_info_dicts + def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ): """ Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -5,7 +5,7 @@ from galaxy.datatypes import checkers from galaxy.tools.data_manager.manager import DataManager import tool_shed.util.shed_util_common as suc -from tool_shed.util import common_install_util, tool_dependency_util +from tool_shed.util import common_install_util, readme_util, tool_dependency_util, tool_util from galaxy.model.orm import and_ from galaxy import eggs @@ -417,7 +417,7 @@ # Parse the tool_config to get the guid. tool_config_path = suc.get_config_from_disk( tool_config, repository_files_dir ) full_path = os.path.abspath( tool_config_path ) - tool, valid, error_message = suc.load_tool_from_config( app, full_path ) + tool, valid, error_message = tool_util.load_tool_from_config( app, full_path ) if tool is None: guid = None else: @@ -533,7 +533,7 @@ metadata_dict[ 'sample_files' ] = sample_file_metadata_paths # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them. for sample_file in sample_file_copy_paths: - suc.copy_sample_file( app, sample_file, dest_path=work_dir ) + tool_util.copy_sample_file( app, sample_file, dest_path=work_dir ) # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory. relative_path, filename = os.path.split( sample_file ) if filename == 'tool_data_table_conf.xml.sample': @@ -578,13 +578,13 @@ log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) ) is_tool = False if is_tool: - tool, valid, error_message = suc.load_tool_from_config( app, full_path ) + tool, valid, error_message = tool_util.load_tool_from_config( app, full_path ) if tool is None: if not valid: invalid_tool_configs.append( name ) invalid_file_tups.append( ( name, error_message ) ) else: - invalid_files_and_errors_tups = suc.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths ) + invalid_files_and_errors_tups = tool_util.check_tool_input_params( app, files_dir, name, tool, sample_file_copy_paths ) can_set_metadata = True for tup in invalid_files_and_errors_tups: if name in tup: @@ -1370,7 +1370,7 @@ response.close() readme_files_dict = json.from_json_string( raw_text ) else: - readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path ) + readme_files_dict = readme_util.build_readme_files_dict( repository.metadata, tool_path ) else: readme_files_dict = None # Handle repository dependencies. @@ -1403,22 +1403,22 @@ valid_data_managers = metadata['data_manager'].get( 'data_managers', None ) invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) data_managers_errors = metadata['data_manager'].get( 'messages', None ) - containers_dict = suc.build_repository_containers_for_galaxy( trans=trans, - repository=repository, - datatypes=datatypes, - invalid_tools=invalid_tools, - missing_repository_dependencies=missing_repository_dependencies, - missing_tool_dependencies=missing_tool_dependencies, - readme_files_dict=readme_files_dict, - repository_dependencies=installed_repository_dependencies, - tool_dependencies=installed_tool_dependencies, - valid_tools=valid_tools, - workflows=workflows, - valid_data_managers=valid_data_managers, - invalid_data_managers=invalid_data_managers, - data_managers_errors=data_managers_errors, - new_install=False, - reinstalling=reinstalling ) + containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans, + repository=repository, + datatypes=datatypes, + invalid_tools=invalid_tools, + missing_repository_dependencies=missing_repository_dependencies, + missing_tool_dependencies=missing_tool_dependencies, + readme_files_dict=readme_files_dict, + repository_dependencies=installed_repository_dependencies, + tool_dependencies=installed_tool_dependencies, + valid_tools=valid_tools, + workflows=workflows, + valid_data_managers=valid_data_managers, + invalid_data_managers=invalid_data_managers, + data_managers_errors=data_managers_errors, + new_install=False, + reinstalling=reinstalling ) else: containers_dict = dict( datatypes=None, invalid_tools=None, @@ -1583,7 +1583,7 @@ # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog. reset_all_tool_versions( trans, id, repo ) # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) + tool_util.reset_tool_data_tables( trans.app ) return invalid_file_tups, metadata_dict def reset_metadata_on_selected_repositories( trans, **kwd ): @@ -1608,7 +1608,7 @@ repository = suc.get_installed_tool_shed_repository( trans, repository_id ) invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id ) if invalid_file_tups: - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) + message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) log.debug( message ) unsuccessful_count += 1 else: @@ -1733,10 +1733,10 @@ message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" if invalid_file_tups: - message = suc.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) + message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - suc.reset_tool_data_tables( trans.app ) + tool_util.reset_tool_data_tables( trans.app ) return message, status def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ): diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/readme_util.py --- /dev/null +++ b/lib/tool_shed/util/readme_util.py @@ -0,0 +1,40 @@ +import os, logging, urllib2 +import tool_shed.util.shed_util_common as suc +from galaxy.util import json + +log = logging.getLogger( __name__ ) + +def build_readme_files_dict( metadata, tool_path=None ): + """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata.""" + readme_files_dict = {} + if metadata: + if 'readme_files' in metadata: + for relative_path_to_readme_file in metadata[ 'readme_files' ]: + readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ] + if tool_path: + full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) ) + else: + full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file ) + try: + f = open( full_path_to_readme_file, 'r' ) + text = f.read() + f.close() + readme_files_dict[ readme_file_name ] = suc.translate_string( text, to_html=False ) + except Exception, e: + log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) ) + return readme_files_dict + +def get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ): + """Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page.""" + name = repo_info_dict.keys()[ 0 ] + repo_info_tuple = repo_info_dict[ name ] + description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = suc.get_repo_info_tuple_contents( repo_info_tuple ) + # Handle README files. + url = suc.url_join( tool_shed_url, + 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ + ( name, repository_owner, changeset_revision ) ) + response = urllib2.urlopen( url ) + raw_text = response.read() + response.close() + readme_files_dict = json.from_json_string( raw_text ) + return readme_files_dict diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/repository_dependency_util.py --- a/lib/tool_shed/util/repository_dependency_util.py +++ b/lib/tool_shed/util/repository_dependency_util.py @@ -104,7 +104,7 @@ filtered_repo_info_dicts = [] # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies we have # to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built. - all_repo_info_dicts = suc.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ) + all_repo_info_dicts = common_install_util.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ) if not all_repo_info_dicts: # No repository dependencies were discovered so process the received repositories. all_repo_info_dicts = [ rid for rid in repo_info_dicts ] diff -r 8703329f3716e7c4603e3a3a57286f67bc3b89af -r e40606362e962b4c452960c7c5f9b33b5b13f96c lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -1,18 +1,14 @@ -import os, shutil, tempfile, logging, string, threading, urllib2, filecmp +import os, shutil, tempfile, logging, string, urllib2 from datetime import datetime from time import gmtime, strftime from galaxy import util -from galaxy.tools import parameters from galaxy.util import json from galaxy.util.odict import odict from galaxy.web import url_for from galaxy.web.form_builder import SelectField -from galaxy.webapps.tool_shed.util import container_util from galaxy.datatypes import checkers from galaxy.model.orm import and_ import sqlalchemy.orm.exc -from galaxy.tools.parameters import dynamic_options -from tool_shed.util import encoding_util from galaxy import eggs import pkg_resources @@ -89,297 +85,6 @@ '${host}' """ -def add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ): - """Inspect all received tool dependencies and label those that are orphans within the repository.""" - orphan_env_dependencies = orphan_tool_dependencies.get( 'set_environment', None ) - new_tool_dependencies = {} - if tool_dependencies: - for td_key, requirements_dict in tool_dependencies.items(): - if td_key in [ 'set_environment' ]: - # "set_environment": [{"name": "R_SCRIPT_PATH", "type": "set_environment"}] - if orphan_env_dependencies: - new_set_environment_dict_list = [] - for set_environment_dict in requirements_dict: - if set_environment_dict in orphan_env_dependencies: - set_environment_dict[ 'is_orphan' ] = True - else: - set_environment_dict[ 'is_orphan' ] = False - new_set_environment_dict_list.append( set_environment_dict ) - new_tool_dependencies[ td_key ] = new_set_environment_dict_list - else: - new_tool_dependencies[ td_key ] = requirements_dict - else: - # {"R/2.15.1": {"name": "R", "readme": "some string", "type": "package", "version": "2.15.1"} - if td_key in orphan_tool_dependencies: - requirements_dict[ 'is_orphan' ] = True - else: - requirements_dict[ 'is_orphan' ] = False - new_tool_dependencies[ td_key ] = requirements_dict - return new_tool_dependencies -def build_readme_files_dict( metadata, tool_path=None ): - """Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received metadata.""" - readme_files_dict = {} - if metadata: - if 'readme_files' in metadata: - for relative_path_to_readme_file in metadata[ 'readme_files' ]: - readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ] - if tool_path: - full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) ) - else: - full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file ) - try: - f = open( full_path_to_readme_file, 'r' ) - text = f.read() - f.close() - readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False ) - except Exception, e: - log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) ) - return readme_files_dict -def build_repository_containers_for_galaxy( trans, repository, datatypes, invalid_tools, missing_repository_dependencies, missing_tool_dependencies, - readme_files_dict, repository_dependencies, tool_dependencies, valid_tools, workflows, valid_data_managers, - invalid_data_managers, data_managers_errors, new_install=False, reinstalling=False ): - """Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy.""" - containers_dict = dict( datatypes=None, - invalid_tools=None, - missing_tool_dependencies=None, - readme_files=None, - repository_dependencies=None, - missing_repository_dependencies=None, - tool_dependencies=None, - valid_tools=None, - workflows=None, - valid_data_managers=None, - invalid_data_managers=None ) - # Some of the tool dependency folders will include links to display tool dependency information, and some of these links require the repository - # id. However we need to be careful because sometimes the repository object is None. - if repository: - repository_id = repository.id - changeset_revision = repository.changeset_revision - else: - repository_id = None - changeset_revision = None - lock = threading.Lock() - lock.acquire( True ) - try: - folder_id = 0 - # Datatypes container. - if datatypes: - folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes ) - containers_dict[ 'datatypes' ] = datatypes_root_folder - # Invalid tools container. - if invalid_tools: - folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans, - folder_id, - invalid_tools, - changeset_revision, - repository=repository, - label='Invalid tools' ) - containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder - # Readme files container. - if readme_files_dict: - folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict ) - containers_dict[ 'readme_files' ] = readme_files_root_folder - # Installed repository dependencies container. - if repository_dependencies: - if new_install: - label = 'Repository dependencies' - else: - label = 'Installed repository dependencies' - folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans, - folder_id=folder_id, - repository_dependencies=repository_dependencies, - label=label, - installed=True ) - containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder - # Missing repository dependencies container. - if missing_repository_dependencies: - folder_id, missing_repository_dependencies_root_folder = \ - container_util.build_repository_dependencies_folder( trans=trans, - folder_id=folder_id, - repository_dependencies=missing_repository_dependencies, - label='Missing repository dependencies', - installed=False ) - containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder - # Installed tool dependencies container. - if tool_dependencies: - if new_install: - label = 'Tool dependencies' - else: - label = 'Installed tool dependencies' - # We only want to display the Status column if the tool_dependency is missing. - folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans, - folder_id, - tool_dependencies, - label=label, - missing=False, - new_install=new_install, - reinstalling=reinstalling ) - containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder - # Missing tool dependencies container. - if missing_tool_dependencies: - # We only want to display the Status column if the tool_dependency is missing. - folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans, - folder_id, - missing_tool_dependencies, - label='Missing tool dependencies', - missing=True, - new_install=new_install, - reinstalling=reinstalling ) - containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder - # Valid tools container. - if valid_tools: - folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans, - folder_id, - valid_tools, - repository, - changeset_revision, - label='Valid tools' ) - containers_dict[ 'valid_tools' ] = valid_tools_root_folder - # Workflows container. - if workflows: - folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans, - folder_id=folder_id, - workflows=workflows, - repository_metadata_id=None, - repository_id=repository_id, - label='Workflows' ) - containers_dict[ 'workflows' ] = workflows_root_folder - if valid_data_managers: - folder_id, valid_data_managers_root_folder = container_util.build_data_managers_folder( trans=trans, - folder_id=folder_id, - data_managers=valid_data_managers, - label='Valid Data Managers' ) - containers_dict[ 'valid_data_managers' ] = valid_data_managers_root_folder - if invalid_data_managers or data_managers_errors: - folder_id, invalid_data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans=trans, - folder_id=folder_id, - data_managers=invalid_data_managers, - error_messages=data_managers_errors, - label='Invalid Data Managers' ) - containers_dict[ 'invalid_data_managers' ] = invalid_data_managers_root_folder - except Exception, e: - log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) ) - finally: - lock.release() - return containers_dict -def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ): - """Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed.""" - containers_dict = dict( datatypes=None, - invalid_tools=None, - readme_files=None, - repository_dependencies=None, - tool_dependencies=None, - valid_tools=None, - workflows=None, - valid_data_managers=None - ) - if repository_metadata: - metadata = repository_metadata.metadata - lock = threading.Lock() - lock.acquire( True ) - try: - folder_id = 0 - # Datatypes container. - if metadata: - if 'datatypes' in metadata: - datatypes = metadata[ 'datatypes' ] - folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes ) - containers_dict[ 'datatypes' ] = datatypes_root_folder - # Invalid repository dependencies container. - if metadata: - if 'invalid_repository_dependencies' in metadata: - invalid_repository_dependencies = metadata[ 'invalid_repository_dependencies' ] - folder_id, invalid_repository_dependencies_root_folder = \ - container_util.build_invalid_repository_dependencies_root_folder( trans, - folder_id, - invalid_repository_dependencies ) - containers_dict[ 'invalid_repository_dependencies' ] = invalid_repository_dependencies_root_folder - # Invalid tool dependencies container. - if metadata: - if 'invalid_tool_dependencies' in metadata: - invalid_tool_dependencies = metadata[ 'invalid_tool_dependencies' ] - folder_id, invalid_tool_dependencies_root_folder = \ - container_util.build_invalid_tool_dependencies_root_folder( trans, - folder_id, - invalid_tool_dependencies ) - containers_dict[ 'invalid_tool_dependencies' ] = invalid_tool_dependencies_root_folder - # Invalid tools container. - if metadata: - if 'invalid_tools' in metadata: - invalid_tool_configs = metadata[ 'invalid_tools' ] - folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans, - folder_id, - invalid_tool_configs, - changeset_revision, - repository=repository, - label='Invalid tools' ) - containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder - # Readme files container. - if metadata: - if 'readme_files' in metadata: - readme_files_dict = build_readme_files_dict( metadata ) - folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict ) - containers_dict[ 'readme_files' ] = readme_files_root_folder - # Repository dependencies container. - folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans, - folder_id=folder_id, - repository_dependencies=repository_dependencies, - label='Repository dependencies', - installed=False ) - if repository_dependencies_root_folder: - containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder - # Tool dependencies container. - if metadata: - if 'tool_dependencies' in metadata: - tool_dependencies = metadata[ 'tool_dependencies' ] - if trans.webapp.name == 'tool_shed': - if 'orphan_tool_dependencies' in metadata: - orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ] - tool_dependencies = add_orphan_settings_to_tool_dependencies( tool_dependencies, orphan_tool_dependencies ) - folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans, - folder_id, - tool_dependencies, - missing=False, - new_install=False ) - containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder - # Valid tools container. - if metadata: - if 'tools' in metadata: - valid_tools = metadata[ 'tools' ] - folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans, - folder_id, - valid_tools, - repository, - changeset_revision, - label='Valid tools' ) - containers_dict[ 'valid_tools' ] = valid_tools_root_folder - # Workflows container. - if metadata: - if 'workflows' in metadata: - workflows = metadata[ 'workflows' ] - folder_id, workflows_root_folder = container_util.build_workflows_folder( trans=trans, - folder_id=folder_id, - workflows=workflows, - repository_metadata_id=repository_metadata.id, - repository_id=None, - label='Workflows' ) - containers_dict[ 'workflows' ] = workflows_root_folder - # Valid Data Managers container - if metadata: - if 'data_manager' in metadata: - data_managers = metadata['data_manager'].get( 'data_managers', None ) - folder_id, data_managers_root_folder = container_util.build_data_managers_folder( trans, folder_id, data_managers, label="Data Managers" ) - containers_dict[ 'valid_data_managers' ] = data_managers_root_folder - error_messages = metadata['data_manager'].get( 'error_messages', None ) - data_managers = metadata['data_manager'].get( 'invalid_data_managers', None ) - folder_id, data_managers_root_folder = container_util.build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" ) - containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder - - except Exception, e: - log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) ) - finally: - lock.release() - return containers_dict def build_repository_ids_select_field( trans, name='repository_ids', multiple=True, display='checkboxes' ): """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata.""" repositories_select_field = SelectField( name=name, multiple=multiple, display=display ) @@ -412,25 +117,6 @@ if trans.app.security_agent.user_can_browse_component_review( trans.app, repository, component_review, user ): return True return False -def can_use_tool_config_disk_file( trans, repository, repo, file_path, changeset_revision ): - """ - Determine if repository's tool config file on disk can be used. This method is restricted to tool config files since, with the - exception of tool config files, multiple files with the same name will likely be in various directories in the repository and we're - comparing file names only (not relative paths). - """ - if not file_path or not os.path.exists( file_path ): - # The file no longer exists on disk, so it must have been deleted at some previous point in the change log. - return False - if changeset_revision == repository.tip( trans.app ): - return True - file_name = strip_path( file_path ) - latest_version_of_file = get_latest_tool_config_revision_from_repository_manifest( repo, file_name, changeset_revision ) - can_use_disk_file = filecmp.cmp( file_path, latest_version_of_file ) - try: - os.unlink( latest_version_of_file ) - except: - pass - return can_use_disk_file def changeset_is_malicious( trans, id, changeset_revision, **kwd ): """Check the malicious flag in repository metadata for a specified change set""" repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ) @@ -450,50 +136,6 @@ if review.changeset_revision == changeset_revision and review.user == user: return True return False -def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ): - """ - Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make - sure the files exist. - """ - invalid_files_and_errors_tups = [] - correction_msg = '' - for input_param in tool.input_params: - if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic: - # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist. - options = input_param.dynamic_options or input_param.options - if options and isinstance( options, dynamic_options.DynamicOptions ): - if options.tool_data_table or options.missing_tool_data_table_name: - # Make sure the repository contains a tool_data_table_conf.xml.sample file. - sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir ) - if sample_tool_data_table_conf: - error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf ) - if error: - invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) ) - else: - options.missing_tool_data_table_name = None - else: - correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample " - correction_msg += "to the repository that includes the required entry to correct this error.<br/>" - invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) - if options.index_file or options.missing_index_file: - # Make sure the repository contains the required xxx.loc.sample file. - index_file = options.index_file or options.missing_index_file - index_file_name = strip_path( index_file ) - sample_found = False - for sample_file in sample_files: - sample_file_name = strip_path( sample_file ) - if sample_file_name == '%s.sample' % index_file_name: - options.index_file = index_file_name - options.missing_index_file = None - if options.tool_data_table: - options.tool_data_table.missing_index_file = None - sample_found = True - break - if not sample_found: - correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file_name ) - correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) - invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) - return invalid_files_and_errors_tups def clean_repository_clone_url( repository_clone_url ): if repository_clone_url.find( '@' ) > 0: # We have an url that includes an authenticated user, something like: @@ -527,17 +169,6 @@ error_message = 'Error cloning repository: %s' % str( e ) log.debug( error_message ) return False, error_message -def concat_messages( msg1, msg2 ): - if msg1: - if msg2: - message = '%s %s' % ( msg1, msg2 ) - else: - message = msg1 - elif msg2: - message = msg2 - else: - message = '' - return message def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ): # Persist the current in-memory list of config_elems to a file named by the value of config_filename. fd, filename = tempfile.mkstemp() @@ -549,17 +180,6 @@ os.close( fd ) shutil.move( filename, os.path.abspath( config_filename ) ) os.chmod( config_filename, 0644 ) -def copy_disk_sample_files_to_dir( trans, repo_files_dir, dest_path ): - """Copy all files currently on disk that end with the .sample extension to the directory to which dest_path refers.""" - sample_files = [] - for root, dirs, files in os.walk( repo_files_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - relative_path = os.path.join( root, name ) - copy_sample_file( trans.app, relative_path, dest_path=dest_path ) - sample_files.append( name ) - return sample_files def copy_file_from_manifest( repo, ctx, filename, dir ): """Copy the latest version of the file named filename from the repository manifest to the directory to which dir refers.""" for changeset in reversed_upper_bounded_changelog( repo, ctx ): @@ -572,21 +192,6 @@ fh.close() return file_path return None -def copy_sample_file( app, filename, dest_path=None ): - """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data.""" - if dest_path is None: - dest_path = os.path.abspath( app.config.tool_data_path ) - sample_file_name = strip_path( filename ) - copied_file = sample_file_name.replace( '.sample', '' ) - full_source_path = os.path.abspath( filename ) - full_destination_path = os.path.join( dest_path, sample_file_name ) - # Don't copy a file to itself - not sure how this happens, but sometimes it does... - if full_source_path != full_destination_path: - # It's ok to overwrite the .sample version of the file. - shutil.copy( full_source_path, full_destination_path ) - # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance. - if not os.path.exists( os.path.join( dest_path, copied_file ) ): - shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) ) def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, status, current_changeset_revision=None, owner='', dist_to_shed=False ): # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used @@ -642,20 +247,6 @@ sa_session.add( tool_shed_repository ) sa_session.flush() return tool_shed_repository -def ensure_required_repositories_exist_for_reinstall( trans, repository_dependencies ): - """ - Inspect the received repository_dependencies dictionary and make sure tool_shed_repository objects exist in the database for each entry. These - tool_shed_repositories do not necessarily have to exist on disk, and if they do not, their status will be marked appropriately. They must exist - in the database in order for repository dependency relationships to be properly built. - """ - for key, val in repository_dependencies.items(): - if key in [ 'root_key', 'description' ]: - continue - tool_shed, name, owner, changeset_revision = container_util.get_components_from_key( key ) - repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ) - for repository_components_list in val: - tool_shed, name, owner, changeset_revision = repository_components_list - repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ) def generate_clone_url_for_installed_repository( app, repository ): """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" tool_shed_url = get_url_from_repository_tool_shed( app, repository ) @@ -675,42 +266,6 @@ toolshed, name, owner, changeset_revision = repo_info_tup # Don't include the changeset_revision in clone urls. return url_join( toolshed, 'repos', owner, name ) -def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ): - if as_html: - new_line = '<br/>' - bold_start = '<b>' - bold_end = '</b>' - else: - new_line = '\n' - bold_start = '' - bold_end = '' - message = '' - if not displaying_invalid_tool: - if metadata_dict: - message += "Metadata may have been defined for some items in revision '%s'. " % str( repository.tip( trans.app ) ) - message += "Correct the following problems if necessary and reset metadata.%s" % new_line - else: - message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) ) - message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line - for itc_tup in invalid_file_tups: - tool_file, exception_msg = itc_tup - if exception_msg.find( 'No such file or directory' ) >= 0: - exception_items = exception_msg.split() - missing_file_items = exception_items[ 7 ].split( '/' ) - missing_file = missing_file_items[ -1 ].rstrip( '\'' ) - if missing_file.endswith( '.loc' ): - sample_ext = '%s.sample' % missing_file - else: - sample_ext = missing_file - correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end ) - correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end ) - else: - if as_html: - correction_msg = exception_msg - else: - correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end ) - message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line ) - return message def generate_sharable_link_for_repository_in_tool_shed( trans, repository, changeset_revision=None ): """Generate the URL for sharing a repository that is in the tool shed.""" base_url = url_for( '/', qualified=True ).rstrip( '/' ) @@ -925,64 +480,6 @@ def get_installed_tool_shed_repository( trans, id ): """Get a repository on the Galaxy side from the database via id""" return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) ) -def get_latest_tool_config_revision_from_repository_manifest( repo, filename, changeset_revision ): - """ - Get the latest revision of a tool config file named filename from the repository manifest up to the value of changeset_revision. - This method is restricted to tool_config files rather than any file since it is likely that, with the exception of tool config files, - multiple files will have the same name in various directories within the repository. - """ - stripped_filename = strip_path( filename ) - for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ): - manifest_ctx = repo.changectx( changeset ) - for ctx_file in manifest_ctx.files(): - ctx_file_name = strip_path( ctx_file ) - if ctx_file_name == stripped_filename: - try: - fctx = manifest_ctx[ ctx_file ] - except LookupError: - # The ctx_file may have been moved in the change set. For example, 'ncbi_blastp_wrapper.xml' was moved to - # 'tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml', so keep looking for the file until we find the new location. - continue - fh = tempfile.NamedTemporaryFile( 'wb' ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'wb' ) - fh.write( fctx.data() ) - fh.close() - return tmp_filename - return None -def get_list_of_copied_sample_files( repo, ctx, dir ): - """ - Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy - each discovered file to dir and return the list of filenames. If a .sample file was added in a changeset and then deleted in a later - changeset, it will be returned in the deleted_sample_files list. The caller will set the value of app.config.tool_data_path to dir in - order to load the tools and generate metadata for them. - """ - deleted_sample_files = [] - sample_files = [] - for changeset in reversed_upper_bounded_changelog( repo, ctx ): - changeset_ctx = repo.changectx( changeset ) - for ctx_file in changeset_ctx.files(): - ctx_file_name = strip_path( ctx_file ) - # If we decide in the future that files deleted later in the changelog should not be used, we can use the following if statement. - # if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files: - if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files: - fctx = get_file_context_from_ctx( changeset_ctx, ctx_file ) - if fctx in [ 'DELETED' ]: - # Since the possibly future used if statement above is commented out, the same file that was initially added will be - # discovered in an earlier changeset in the change log and fall through to the else block below. In other words, if - # a file named blast2go.loc.sample was added in change set 0 and then deleted in changeset 3, the deleted file in changeset - # 3 will be handled here, but the later discovered file in changeset 0 will be handled in the else block below. In this - # way, the file contents will always be found for future tools even though the file was deleted. - if ctx_file_name not in deleted_sample_files: - deleted_sample_files.append( ctx_file_name ) - else: - sample_files.append( ctx_file_name ) - tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) ) - fh = open( tmp_ctx_file_name, 'wb' ) - fh.write( fctx.data() ) - fh.close() - return sample_files, deleted_sample_files def get_named_tmpfile_from_ctx( ctx, filename, dir ): filename = strip_path( filename ) for ctx_file in ctx.files(): @@ -1092,20 +589,6 @@ previous_reviews_dict[ previous_changeset_revision ] = dict( changeset_revision_label=previous_changeset_revision_label, reviews=revision_reviews ) return previous_reviews_dict -def get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict ): - """Return a dictionary of README files contained in the single repository being installed so they can be displayed on the tool panel section selection page.""" - name = repo_info_dict.keys()[ 0 ] - repo_info_tuple = repo_info_dict[ name ] - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = get_repo_info_tuple_contents( repo_info_tuple ) - # Handle README files. - url = url_join( tool_shed_url, - 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \ - ( name, repository_owner, changeset_revision ) ) - response = urllib2.urlopen( url ) - raw_text = response.read() - response.close() - readme_files_dict = json.from_json_string( raw_text ) - return readme_files_dict def get_repo_info_tuple_contents( repo_info_tuple ): # Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced. if len( repo_info_tuple ) == 6: @@ -1235,54 +718,6 @@ if tool: repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups -def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ): - """ - Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All - repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through - this method is required to retrieve all repository dependencies. - """ - all_repo_info_dicts = [] - if repo_info_dicts: - # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. - required_repository_tups = [] - for repo_info_dict in repo_info_dicts: - for repository_name, repo_info_tup in repo_info_dict.items(): - description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \ - get_repo_info_tuple_contents( repo_info_tup ) - if repository_dependencies: - for key, val in repository_dependencies.items(): - if key in [ 'root_key', 'description' ]: - continue - toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) - components_list = [ toolshed, name, owner, changeset_revision ] - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - for components_list in val: - if components_list not in required_repository_tups: - required_repository_tups.append( components_list ) - if required_repository_tups: - # The value of required_repository_tups is a list of tuples, so we need to encode it. - encoded_required_repository_tups = [] - for required_repository_tup in required_repository_tups: - encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) - encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) - encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) - url = url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str ) - response = urllib2.urlopen( url ) - text = response.read() - response.close() - if text: - required_repo_info_dict = json.from_json_string( text ) - required_repo_info_dicts = [] - encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] - for encoded_dict_str in encoded_dict_strings: - decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) - required_repo_info_dicts.append( decoded_dict ) - if required_repo_info_dicts: - for required_repo_info_dict in required_repo_info_dicts: - if required_repo_info_dict not in all_repo_info_dicts: - all_repo_info_dicts.append( required_repo_info_dict ) - return all_repo_info_dicts def get_reversed_changelog_changesets( repo ): reversed_changelog = [] for changeset in repo.changelog: @@ -1519,57 +954,6 @@ util.send_mail( frm, to, subject, body, trans.app.config ) except Exception, e: log.exception( "An error occurred sending a tool shed repository update alert by email." ) -def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ): - # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories. - message = '' - sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir ) - if sample_files: - if 'tool_data_table_conf.xml.sample' in sample_files: - # Load entries into the tool_data_tables if the tool requires them. - tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' ) - error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config ) - tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath ) - message = concat_messages( message, message2 ) - return tool, valid, message, sample_files -def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ): - tool = None - message = '' - ctx = get_changectx_for_changeset( repo, changeset_revision ) - # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in - # the manifest, but have been deleted from disk. - sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir ) - if sample_files: - trans.app.config.tool_data_path = work_dir - if 'tool_data_table_conf.xml.sample' in sample_files: - # Load entries into the tool_data_tables if the tool requires them. - tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' ) - if tool_data_table_config: - error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config ) - if error: - log.debug( message ) - manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision ) - if manifest_ctx and ctx_file: - tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir ) - message = concat_messages( message, message2 ) - return tool, message, sample_files -def handle_sample_tool_data_table_conf_file( app, filename, persist=False ): - """ - Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur - if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk. - """ - error = False - message = '' - try: - new_table_elems, message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename, - tool_data_path=app.config.tool_data_path, - shed_tool_data_table_config=app.config.shed_tool_data_table_config, - persist=persist ) - if message: - error = True - except Exception, e: - message = str( e ) - error = True - return error, message def has_previous_repository_reviews( trans, repository, changeset_revision ): """Determine if a repository has a changeset revision review prior to the received changeset revision.""" repo = hg.repository( get_configured_ui(), repository.repo_path( trans.app ) ) @@ -1579,87 +963,6 @@ if previous_changeset_revision in reviewed_revision_hashes: return True return False -def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ): - """ - Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision - is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset - revision and the first changeset revision in the repository, searching backwards. - """ - original_tool_data_path = trans.app.config.tool_data_path - repository = get_repository_in_tool_shed( trans, repository_id ) - repo_files_dir = repository.repo_path( trans.app ) - repo = hg.repository( get_configured_ui(), repo_files_dir ) - message = '' - tool = None - can_use_disk_file = False - tool_config_filepath = get_absolute_path_to_file_in_repository( repo_files_dir, tool_config_filename ) - work_dir = tempfile.mkdtemp() - can_use_disk_file = can_use_tool_config_disk_file( trans, repository, repo, tool_config_filepath, changeset_revision ) - if can_use_disk_file: - trans.app.config.tool_data_path = work_dir - tool, valid, message, sample_files = handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ) - if tool is not None: - invalid_files_and_errors_tups = check_tool_input_params( trans.app, - repo_files_dir, - tool_config_filename, - tool, - sample_files ) - if invalid_files_and_errors_tups: - message2 = generate_message_for_invalid_tools( trans, - invalid_files_and_errors_tups, - repository, - metadata_dict=None, - as_html=True, - displaying_invalid_tool=True ) - message = concat_messages( message, message2 ) - else: - tool, message, sample_files = handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ) - remove_dir( work_dir ) - trans.app.config.tool_data_path = original_tool_data_path - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - reset_tool_data_tables( trans.app ) - return repository, tool, message -def load_tool_from_config( app, full_path ): - try: - tool = app.toolbox.load_tool( full_path ) - valid = True - error_message = None - except KeyError, e: - tool = None - valid = False - error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e ) - error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct ' - error_message += 'this error. ' - except Exception, e: - tool = None - valid = False - error_message = str( e ) - return tool, valid, error_message -def load_tool_from_tmp_config( trans, repo, ctx, ctx_file, work_dir ): - tool = None - message = '' - tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir ) - if tmp_tool_config: - element_tree = util.parse_xml( tmp_tool_config ) - element_tree_root = element_tree.getroot() - # Look for code files required by the tool config. - tmp_code_files = [] - for code_elem in element_tree_root.findall( 'code' ): - code_file_name = code_elem.get( 'file' ) - tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir ) - if tmp_code_file_name: - tmp_code_files.append( tmp_code_file_name ) - tool, valid, message = load_tool_from_config( trans.app, tmp_tool_config ) - for tmp_code_file in tmp_code_files: - try: - os.unlink( tmp_code_file ) - except: - pass - try: - os.unlink( tmp_tool_config ) - except: - pass - return tool, message def open_repository_files_folder( trans, folder_path ): try: files_list = get_repository_files( trans, folder_path ) @@ -1714,9 +1017,6 @@ if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]: return tool_shed_repository, previous_changeset_revision return None, None -def reset_tool_data_tables( app ): - # Reset the tool_data_tables to an empty dictionary. - app.tool_data_tables.data_tables = {} def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ): """ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org