1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/cb6dffff2797/ Changeset: cb6dffff2797 User: davebgx Date: 2014-09-15 22:34:18 Summary: Merged in greg/gvk-galaxy-central (pull request #485) Encapsulate the Tool Shed's metadata generator. Affected #: 13 files diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -417,12 +417,13 @@ query = irmm.get_query_for_setting_metadata_on_repositories( order=False ) # Now reset metadata on all remaining repositories. for repository in query: - repository_id = trans.security.encode_id( repository.id ) try: - invalid_file_tups, metadata_dict = irmm.reset_all_metadata_on_installed_repository( repository_id ) - if invalid_file_tups: + irmm.set_repository( repository ) + irmm.reset_all_metadata_on_installed_repository() + irmm_invalid_file_tups = irmm.get_invalid_file_tups() + if irmm_invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, - invalid_file_tups, + irmm_invalid_file_tups, repository, None, as_html=False ) diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -1732,20 +1732,21 @@ tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) if relative_install_dir: original_metadata_dict = repository.metadata - irmm = InstalledRepositoryMetadataManager( trans.app ) - metadata_dict, invalid_file_tups = \ - irmm.generate_metadata_for_changeset_revision( repository=repository, - changeset_revision=repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict = repository.get_shed_config_dict( trans.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - repository.metadata = metadata_dict - if metadata_dict != original_metadata_dict: - irmm.update_in_shed_tool_config( repository ) + irmm = InstalledRepositoryMetadataManager( app=trans.app, + repository=repository, + changeset_revision=repository.changeset_revision, + repository_clone_url=repository_clone_url, + shed_config_dict = repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) + irmm.generate_metadata_for_changeset_revision() + irmm_metadata_dict = irmm.get_metadata_dict() + if irmm_metadata_dict != original_metadata_dict: + repository.metadata = irmm_metadata_dict + irmm.update_in_shed_tool_config() trans.install_model.context.add( repository ) trans.install_model.context.flush() message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name @@ -1938,22 +1939,24 @@ dmh.remove_from_data_manager( repository ) # Update the repository metadata. tpm = tool_panel_manager.ToolPanelManager( trans.app ) - irmm = InstalledRepositoryMetadataManager( trans.app, tpm ) - metadata_dict, invalid_file_tups = \ - irmm.generate_metadata_for_changeset_revision( repository=repository, - changeset_revision=latest_changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict=repository.get_shed_config_dict( trans.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=True, - persist=True ) - if 'tools' in metadata_dict: - tool_panel_dict = metadata_dict.get( 'tool_panel_section', None ) + irmm = InstalledRepositoryMetadataManager( app=trans.app, + tpm=tpm, + repository=repository, + changeset_revision=latest_changeset_revision, + repository_clone_url=repository_clone_url, + shed_config_dict=repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=True, + persist=True ) + irmm.generate_metadata_for_changeset_revision() + irmm_metadata_dict = irmm.get_metadata_dict() + if 'tools' in irmm_metadata_dict: + tool_panel_dict = irmm_metadata_dict.get( 'tool_panel_section', None ) if tool_panel_dict is None: tool_panel_dict = tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository ) - repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict ) + repository_tools_tups = irmm.get_repository_tools_tups() tpm.add_to_tool_panel( repository_name=str( repository.name ), repository_clone_url=repository_clone_url, changeset_revision=str( repository.installed_changeset_revision ), @@ -1963,18 +1966,18 @@ tool_panel_dict=tool_panel_dict, new_install=False ) # Add new Data Manager entries - if 'data_manager' in metadata_dict: + if 'data_manager' in irmm_metadata_dict: dmh = data_manager.DataManagerHandler( trans.app ) new_data_managers = dmh.install_data_managers( trans.app.config.shed_data_manager_config_file, - metadata_dict, + irmm_metadata_dict, repository.get_shed_config_dict( trans.app ), os.path.join( relative_install_dir, name ), repository, repository_tools_tups ) - if 'repository_dependencies' in metadata_dict or 'tool_dependencies' in metadata_dict: - new_repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} ) + if 'repository_dependencies' in irmm_metadata_dict or 'tool_dependencies' in irmm_metadata_dict: + new_repository_dependencies_dict = irmm_metadata_dict.get( 'repository_dependencies', {} ) new_repository_dependencies = new_repository_dependencies_dict.get( 'repository_dependencies', [] ) - new_tool_dependencies_dict = metadata_dict.get( 'tool_dependencies', {} ) + new_tool_dependencies_dict = irmm_metadata_dict.get( 'tool_dependencies', {} ) if new_repository_dependencies: # [[http://localhost:9009', package_picard_1_56_0', devteam', 910b0b056666', False', False']] proceed_to_install = False @@ -2017,7 +2020,7 @@ updating_repository_id=trans.security.encode_id( repository.id ), updating_to_ctx_rev=latest_ctx_rev, updating_to_changeset_revision=latest_changeset_revision, - encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ), + encoded_updated_metadata=encoding_util.tool_shed_encode( irmm_metadata_dict ), updating=True ) return self.prepare_for_install( trans, **new_kwd ) # Updates received did not include any newly defined repository dependencies but did include @@ -2033,12 +2036,12 @@ proceed_to_install = True break if proceed_to_install: - encoded_tool_dependencies_dict = encoding_util.tool_shed_encode( metadata_dict.get( 'tool_dependencies', {} ) ) + encoded_tool_dependencies_dict = encoding_util.tool_shed_encode( irmm_metadata_dict.get( 'tool_dependencies', {} ) ) encoded_relative_install_dir = encoding_util.tool_shed_encode( relative_install_dir ) new_kwd = dict( updating_repository_id=trans.security.encode_id( repository.id ), updating_to_ctx_rev=latest_ctx_rev, updating_to_changeset_revision=latest_changeset_revision, - encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ), + encoded_updated_metadata=encoding_util.tool_shed_encode( irmm_metadata_dict ), encoded_relative_install_dir=encoded_relative_install_dir, encoded_tool_dependencies_dict=encoded_tool_dependencies_dict, message=message, @@ -2047,7 +2050,7 @@ # Updates received did not include any newly defined repository dependencies or newly defined # tool dependencies that need to be installed. repository = trans.app.update_repository_manager.update_repository_record( repository=repository, - updated_metadata_dict=metadata_dict, + updated_metadata_dict=irmm_metadata_dict, updated_changeset_revision=latest_changeset_revision, updated_ctx_rev=latest_ctx_rev ) message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \ diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/tool_shed/api/repositories.py --- a/lib/galaxy/webapps/tool_shed/api/repositories.py +++ b/lib/galaxy/webapps/tool_shed/api/repositories.py @@ -429,13 +429,13 @@ def handle_repository( trans, rmm, repository, results ): log.debug( "Resetting metadata on repository %s" % str( repository.name ) ) - repository_id = trans.security.encode_id( repository.id ) try: - invalid_file_tups, metadata_dict = \ - rmm.reset_all_metadata_on_repository_in_tool_shed( repository_id ) - if invalid_file_tups: + rmm.set_repository( repository ) + rmm.reset_all_metadata_on_repository_in_tool_shed() + rmm_invalid_file_tups = rmm.get_invalid_file_tups() + if rmm_invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, - invalid_file_tups, + rmm_invalid_file_tups, repository, None, as_html=False ) @@ -451,7 +451,11 @@ status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False ) start_time = strftime( "%Y-%m-%d %H:%M:%S" ) results = dict( start_time=start_time, repository_status=[], @@ -511,12 +515,17 @@ results = dict( start_time=start_time, repository_status=[] ) try: - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) - invalid_file_tups, metadata_dict = \ - rmm.reset_all_metadata_on_repository_in_tool_shed( trans.security.encode_id( repository.id ) ) - if invalid_file_tups: + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + repository=repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False ) + rmm.reset_all_metadata_on_repository_in_tool_shed() + rmm_invalid_file_tups = rmm.get_invalid_file_tups() + if rmm_invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, - invalid_file_tups, + rmm_invalid_file_tups, repository, None, as_html=False ) diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/tool_shed/controllers/hg.py --- a/lib/galaxy/webapps/tool_shed/controllers/hg.py +++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py @@ -1,6 +1,8 @@ import os, logging from galaxy import web from galaxy.web.base.controller import BaseUIController + +from tool_shed.util.common_util import generate_clone_url_for_repository_in_tool_shed from tool_shed.util.shed_util_common import get_repository_by_name_and_owner from tool_shed.util.hg_util import update_repository from tool_shed.metadata import repository_metadata_manager @@ -47,9 +49,19 @@ # interface will result in a new head being created. repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) ) update_repository( repo, ctx_rev=None ) + repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans.user, repository ) # Set metadata using the repository files on disk. - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) - error_message, status = rmm.set_repository_metadata( trans.request.host, repository ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + repository=repository, + changeset_revision=repository.tip( trans.app ), + repository_clone_url=repository_clone_url, + relative_install_dir=repository.repo_path( trans.app ), + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) + error_message, status = rmm.set_repository_metadata( trans.request.host ) if status == 'ok' and error_message: log.debug( "Successfully reset metadata on repository %s owned by %s, but encountered problem: %s" % \ ( str( repository.name ), str( repository.user.username ), error_message ) ) diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -2759,15 +2759,18 @@ def reset_all_metadata( self, trans, id, **kwd ): """Reset all metadata on the complete changelog for a single repository in the tool shed.""" # This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template. - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) - invalid_file_tups, metadata_dict = \ - rmm.reset_all_metadata_on_repository_in_tool_shed( id ) - if invalid_file_tups: - repository = suc.get_repository_in_tool_shed( trans.app, id ) + repository = suc.get_repository_in_tool_shed( trans.app, id ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + repository=repository ) + rmm.reset_all_metadata_on_repository_in_tool_shed() + rmm_metadata_dict = rmm.get_metadata_dict() + rmm_invalid_file_tups = rmm.get_invalid_file_tups() + if rmm_invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( trans.app, - invalid_file_tups, + rmm_invalid_file_tups, repository, - metadata_dict ) + rmm_metadata_dict ) status = 'error' else: message = "All repository metadata has been reset. " @@ -2849,10 +2852,10 @@ if tip == repository.tip( trans.app ): message += 'No changes to repository. ' else: - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) - status, error_message = rmm.set_repository_metadata_due_to_new_tip( trans.request.host, - repository, - **kwd ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + repository=repository ) + status, error_message = rmm.set_repository_metadata_due_to_new_tip( trans.request.host, **kwd ) if error_message: message = error_message else: diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/galaxy/webapps/tool_shed/controllers/upload.py --- a/lib/galaxy/webapps/tool_shed/controllers/upload.py +++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py @@ -263,10 +263,11 @@ ( len( files_to_remove ), upload_point ) else: message += " %d files were removed from the repository root. " % len( files_to_remove ) - rmm = repository_metadata_manager.RepositoryMetadataManager( trans.app, trans.user ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=trans.app, + user=trans.user, + repository=repository ) status, error_message = \ rmm.set_repository_metadata_due_to_new_tip( trans.request.host, - repository, content_alert_str=content_alert_str, **kwd ) if error_message: diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/capsule/capsule_manager.py --- a/lib/tool_shed/capsule/capsule_manager.py +++ b/lib/tool_shed/capsule/capsule_manager.py @@ -792,9 +792,10 @@ results_dict[ 'ok' ] = False results_dict[ 'error_message' ] += error_message try: - rmm = repository_metadata_manager.RepositoryMetadataManager( self.app, self.user ) + rmm = repository_metadata_manager.RepositoryMetadataManager( app=self.app, + user=self.user, + repository=repository ) status, error_message = rmm.set_repository_metadata_due_to_new_tip( self.host, - repository, content_alert_str=content_alert_str ) if error_message: results_dict[ 'ok' ] = False diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -510,31 +510,33 @@ """ shed_config_dict = self.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ) tdtm = data_table_manager.ToolDataTableManager( self.app ) - irmm = InstalledRepositoryMetadataManager( self.app, self.tpm ) - metadata_dict, invalid_file_tups = \ - irmm.generate_metadata_for_changeset_revision( repository=tool_shed_repository, - changeset_revision=tool_shed_repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict=shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) - tool_shed_repository.metadata = metadata_dict + irmm = InstalledRepositoryMetadataManager( app=self.app, + tpm=self.tpm, + repository=tool_shed_repository, + changeset_revision=tool_shed_repository.changeset_revision, + repository_clone_url=repository_clone_url, + shed_config_dict=shed_config_dict, + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=True ) + irmm.generate_metadata_for_changeset_revision() + irmm_metadata_dict = irmm.get_metadata_dict() + tool_shed_repository.metadata = irmm_metadata_dict # Update the tool_shed_repository.tool_shed_status column in the database. tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( self.app, tool_shed_repository ) if tool_shed_status_dict: tool_shed_repository.tool_shed_status = tool_shed_status_dict self.install_model.context.add( tool_shed_repository ) self.install_model.context.flush() - if 'tool_dependencies' in metadata_dict and not reinstalling: + if 'tool_dependencies' in irmm_metadata_dict and not reinstalling: tool_dependencies = tool_dependency_util.create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True ) - if 'sample_files' in metadata_dict: - sample_files = metadata_dict.get( 'sample_files', [] ) + if 'sample_files' in irmm_metadata_dict: + sample_files = irmm_metadata_dict.get( 'sample_files', [] ) tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files ) tool_data_table_conf_filename, tool_data_table_elems = \ tdtm.install_tool_data_tables( tool_shed_repository, tool_index_sample_files ) @@ -543,13 +545,13 @@ None, self.app.config.shed_tool_data_table_config, persist=True ) - if 'tools' in metadata_dict: - tool_panel_dict = self.tpm.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) - sample_files = metadata_dict.get( 'sample_files', [] ) + if 'tools' in irmm_metadata_dict: + tool_panel_dict = self.tpm.generate_tool_panel_dict_for_new_install( irmm_metadata_dict[ 'tools' ], tool_section ) + sample_files = irmm_metadata_dict.get( 'sample_files', [] ) tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files ) tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path ) sample_files_copied = [ str( s ) for s in tool_index_sample_files ] - repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict ) + repository_tools_tups = irmm.get_repository_tools_tups() if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically generated select lists. repository_tools_tups = tdtm.handle_missing_data_table_entry( relative_install_dir, @@ -575,15 +577,15 @@ shed_tool_conf=shed_tool_conf, tool_panel_dict=tool_panel_dict, new_install=True ) - if 'data_manager' in metadata_dict: + if 'data_manager' in irmm_metadata_dict: dmh = data_manager.DataManagerHandler( self.app ) new_data_managers = dmh.install_data_managers( self.app.config.shed_data_manager_config_file, - metadata_dict, + irmm_metadata_dict, shed_config_dict, relative_install_dir, tool_shed_repository, repository_tools_tups ) - if 'datatypes' in metadata_dict: + if 'datatypes' in irmm_metadata_dict: tool_shed_repository.status = self.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: tool_shed_repository.includes_datatypes = True @@ -604,7 +606,7 @@ name=tool_shed_repository.name, owner=tool_shed_repository.owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), + tool_dicts=irmm_metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) if converter_path: diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/galaxy_install/installed_repository_manager.py --- a/lib/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py @@ -82,9 +82,12 @@ repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED if repository.includes_tools_for_display_in_tool_panel: tpm = tool_panel_manager.ToolPanelManager( self.app ) - irmm = InstalledRepositoryMetadataManager( self.app, tpm ) - metadata = repository.metadata - repository_tools_tups = irmm.get_repository_tools_tups( metadata ) + irmm = InstalledRepositoryMetadataManager( app=self.app, + tpm=tpm, + repository=repository, + changeset_revision=repository.changeset_revision, + metadata_dict=repository.metadata ) + repository_tools_tups = irmm.get_repository_tools_tups() # Reload tools into the appropriate tool panel section. tool_panel_dict = repository.metadata[ 'tool_panel_section' ] tpm.add_to_tool_panel( repository.name, @@ -101,7 +104,7 @@ data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name ) dmh = data_manager.DataManagerHandler( self.app ) new_data_managers = dmh.install_data_managers( self.app.config.shed_data_manager_config_file, - metadata, + repository.metadata, repository.get_shed_config_dict( self.app ), data_manager_relative_install_dir, repository, diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py --- a/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -19,9 +19,16 @@ class InstalledRepositoryMetadataManager( metadata_generator.MetadataGenerator ): - def __init__( self, app, tpm=None ): - super( InstalledRepositoryMetadataManager, self ).__init__( app ) - self.app = app + def __init__( self, app, tpm=None, repository=None, changeset_revision=None, repository_clone_url=None, + shed_config_dict=None, relative_install_dir=None, repository_files_dir=None, + resetting_all_metadata_on_repository=False, updating_installed_repository=False, + persist=False, metadata_dict=None ): + super( InstalledRepositoryMetadataManager, self ).__init__( app, repository, changeset_revision, + repository_clone_url, shed_config_dict, + relative_install_dir, repository_files_dir, + resetting_all_metadata_on_repository, + updating_installed_repository, persist, + metadata_dict=metadata_dict, user=None ) if tpm is None: self.tpm = tool_panel_manager.ToolPanelManager( self.app ) else: @@ -53,15 +60,15 @@ return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ) \ .filter( self.app.install_model.ToolShedRepository.table.c.uninstalled == False ) - def get_repository_tools_tups( self, metadata_dict ): + def get_repository_tools_tups( self ): """ Return a list of tuples of the form (relative_path, guid, tool) for each tool defined in the received tool shed repository metadata. """ repository_tools_tups = [] - index, shed_conf_dict = self.tpm.get_shed_tool_conf_dict( metadata_dict.get( 'shed_config_filename' ) ) - if 'tools' in metadata_dict: - for tool_dict in metadata_dict[ 'tools' ]: + index, shed_conf_dict = self.tpm.get_shed_tool_conf_dict( self.metadata_dict.get( 'shed_config_filename' ) ) + if 'tools' in self.metadata_dict: + for tool_dict in self.metadata_dict[ 'tools' ]: load_relative_path = relative_path = tool_dict.get( 'tool_config', None ) if shed_conf_dict.get( 'tool_path' ): load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path ) @@ -74,36 +81,21 @@ repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups - def reset_all_metadata_on_installed_repository( self, id ): + def reset_all_metadata_on_installed_repository( self ): """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" - invalid_file_tups = [] - metadata_dict = {} - repository = repository_util.get_installed_tool_shed_repository( self.app, id ) - repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) - tool_path, relative_install_dir = repository.get_tool_relative_path( self.app ) - if relative_install_dir: - original_metadata_dict = repository.metadata - metadata_dict, invalid_file_tups = \ - self.generate_metadata_for_changeset_revision( repository=repository, - changeset_revision=repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict = repository.get_shed_config_dict( self.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - repository.metadata = metadata_dict - if metadata_dict != original_metadata_dict: - self.update_in_shed_tool_config( repository ) - self.app.install_model.context.add( repository ) + if self.relative_install_dir: + original_metadata_dict = self.repository.metadata + self.generate_metadata_for_changeset_revision() + if self.metadata_dict != original_metadata_dict: + self.repository.metadata = self.metadata_dict + self.update_in_shed_tool_config() + self.app.install_model.context.add( self.repository ) self.app.install_model.context.flush() - log.debug( 'Metadata has been reset on repository %s.' % repository.name ) + log.debug( 'Metadata has been reset on repository %s.' % self.repository.name ) else: - log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name ) + log.debug( 'Metadata did not need to be reset on repository %s.' % self.repository.name ) else: - log.debug( 'Error locating installation directory for repository %s.' % repository.name ) - return invalid_file_tups, metadata_dict + log.debug( 'Error locating installation directory for repository %s.' % self.repository.name ) def reset_metadata_on_selected_repositories( self, user, **kwd ): """ @@ -119,19 +111,19 @@ for repository_id in repository_ids: try: repository = repository_util.get_installed_tool_shed_repository( self.app, repository_id ) - owner = str( repository.owner ) - invalid_file_tups, metadata_dict = \ - self.reset_all_metadata_on_installed_repository( repository_id ) - if invalid_file_tups: + self.set_repository( repository ) + self.reset_all_metadata_on_installed_repository() + if self.invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( self.app, - invalid_file_tups, + self.invalid_file_tups, repository, None, as_html=False ) log.debug( message ) unsuccessful_count += 1 else: - log.debug( "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), owner ) ) + log.debug( "Successfully reset metadata on repository %s owned by %s" % \ + ( str( repository.name ), str( repository.owner ) ) ) successful_count += 1 except: log.exception( "Error attempting to reset metadata on repository %s", str( repository.name ) ) @@ -146,32 +138,37 @@ status = 'error' return message, status - def tool_shed_from_repository_clone_url( self, repository_clone_url ): + def set_repository( self, repository ): + super( InstalledRepositoryMetadataManager, self ).set_repository( repository ) + self.repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) + + def tool_shed_from_repository_clone_url( self ): """Given a repository clone URL, return the tool shed that contains the repository.""" - return common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' ) + cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( self.repository_clone_url ) + return common_util.remove_protocol_and_user_from_clone_url( cleaned_repository_clone_url ).split( '/repos/' )[ 0 ].rstrip( '/' ) - def update_in_shed_tool_config( self, repository ): + def update_in_shed_tool_config( self ): """ A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list of config_elems instead of using the in-memory list. """ - shed_conf_dict = repository.get_shed_config_dict( self.app ) + shed_conf_dict = self.repository.get_shed_config_dict( self.app ) shed_tool_conf = shed_conf_dict[ 'config_filename' ] tool_path = shed_conf_dict[ 'tool_path' ] - tool_panel_dict = self.tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( repository ) - repository_tools_tups = self.get_repository_tools_tups( repository.metadata ) - clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) - cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url ) - tool_shed = self.tool_shed_from_repository_clone_url( cleaned_repository_clone_url ) - owner = repository.owner + tool_panel_dict = self.tpm.generate_tool_panel_dict_from_shed_tool_conf_entries( self.repository ) + repository_tools_tups = self.get_repository_tools_tups() + clone_url = common_util.generate_clone_url_for_installed_repository( self.app, self.repository ) + tool_shed = self.tool_shed_from_repository_clone_url() + owner = self.repository.owner if not owner: + cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url( clone_url ) owner = suc.get_repository_owner( cleaned_repository_clone_url ) guid_to_tool_elem_dict = {} for tool_config_filename, guid, tool in repository_tools_tups: guid_to_tool_elem_dict[ guid ] = self.tpm.generate_tool_elem( tool_shed, - repository.name, - repository.changeset_revision, - repository.owner or '', + self.repository.name, + self.repository.changeset_revision, + self.repository.owner or '', tool_config_filename, tool, None ) diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/galaxy_install/tool_migration_manager.py --- a/lib/tool_shed/galaxy_install/tool_migration_manager.py +++ b/lib/tool_shed/galaxy_install/tool_migration_manager.py @@ -416,21 +416,23 @@ log.exception( "Exception attempting to filter and persist non-shed-related tool panel configs:\n%s" % str( e ) ) finally: lock.release() - irmm = InstalledRepositoryMetadataManager( self.app, self.tpm ) - metadata_dict, invalid_file_tups = \ - irmm.generate_metadata_for_changeset_revision( repository=tool_shed_repository, - changeset_revision=tool_shed_repository.changeset_revision, - repository_clone_url=repository_clone_url, - shed_config_dict = self.shed_config_dict, - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=True ) - tool_shed_repository.metadata = metadata_dict + irmm = InstalledRepositoryMetadataManager( app=self.app, + tpm=self.tpm, + repository=tool_shed_repository, + changeset_revision=tool_shed_repository.changeset_revision, + repository_clone_url=repository_clone_url, + shed_config_dict = self.shed_config_dict, + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=True ) + irmm.generate_metadata_for_changeset_revision() + irmm_metadata_dict = irmm.get_metadata_dict() + tool_shed_repository.metadata = irmm_metadata_dict self.app.install_model.context.add( tool_shed_repository ) self.app.install_model.context.flush() - has_tool_dependencies = self.__has_tool_dependencies( metadata_dict ) + has_tool_dependencies = self.__has_tool_dependencies( irmm_metadata_dict ) if has_tool_dependencies: # All tool_dependency objects must be created before the tools are processed even if no # tool dependencies will be installed. @@ -440,14 +442,14 @@ set_status=True ) else: tool_dependencies = None - if 'tools' in metadata_dict: + if 'tools' in irmm_metadata_dict: tdtm = data_table_manager.ToolDataTableManager( self.app ) - sample_files = metadata_dict.get( 'sample_files', [] ) + sample_files = irmm_metadata_dict.get( 'sample_files', [] ) sample_files = [ str( s ) for s in sample_files ] tool_index_sample_files = tdtm.get_tool_index_sample_files( sample_files ) tool_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path ) sample_files_copied = [ s for s in tool_index_sample_files ] - repository_tools_tups = irmm.get_repository_tools_tups( metadata_dict ) + repository_tools_tups = irmm.get_repository_tools_tups() if repository_tools_tups: # Handle missing data table entries for tool parameters that are dynamically # generated select lists. @@ -491,7 +493,7 @@ if installed_tool_dependency.status == self.app.install_model.ToolDependency.installation_status.ERROR: print '\nThe ToolMigrationManager returned the following error while installing tool dependency ', installed_tool_dependency.name, ':' print installed_tool_dependency.error_message, '\n\n' - if 'datatypes' in metadata_dict: + if 'datatypes' in irmm_metadata_dict: cdl = custom_datatype_manager.CustomDatatypeLoader( self.app ) tool_shed_repository.status = self.app.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES if not tool_shed_repository.includes_datatypes: @@ -514,7 +516,7 @@ name=tool_shed_repository.name, owner=self.repository_owner, installed_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dicts=metadata_dict.get( 'tools', [] ), + tool_dicts=irmm_metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) if converter_path: diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/metadata/metadata_generator.py --- a/lib/tool_shed/metadata/metadata_generator.py +++ b/lib/tool_shed/metadata/metadata_generator.py @@ -27,31 +27,89 @@ class MetadataGenerator( object ): - def __init__( self, app ): + def __init__( self, app, repository=None, changeset_revision=None, repository_clone_url=None, + shed_config_dict=None, relative_install_dir=None, repository_files_dir=None, + resetting_all_metadata_on_repository=False, updating_installed_repository=False, + persist=False, metadata_dict=None, user=None ): self.app = app + self.user = user + self.repository = repository + if self.app.name == 'galaxy': + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.changeset_revision + else: + self.changeset_revision = changeset_revision + + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, self.repository ) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + if self.repository is not None: + self.shed_config_dict = self.repository.get_shed_config_dict( self.app ) + else: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path( self.app ) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_files_directory( self.app ) + if metadata_dict is None: + # Shed related tool panel configs are only relevant to Galaxy. + self.metadata_dict = { 'shed_config_filename' : self.shed_config_dict.get( 'config_filename', None ) } + else: + self.metadata_dict = metadata_dict + else: + # We're in the Tool Shed. + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.tip( self.app ) + else: + self.changeset_revision = changeset_revision + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = \ + common_util.generate_clone_url_for_repository_in_tool_shed( self.user, self.repository ) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + relative_install_dir = self.repository.repo_path( self.app ) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_path( self.app ) + if metadata_dict is None: + self.metadata_dict = {} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] self.sa_session = app.model.context.current self.NOT_TOOL_CONFIGS = [ suc.DATATYPES_CONFIG_FILENAME, - rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, - rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, - suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ] + rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, + rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, + suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ] - def generate_data_manager_metadata( self, repository, repo_dir, data_manager_config_filename, metadata_dict, + def generate_data_manager_metadata( self, repo_dir, data_manager_config_filename, metadata_dict, shed_config_dict=None ): """ Update the received metadata_dict with information from the parsed data_manager_config_filename. """ if data_manager_config_filename is None: return metadata_dict - repo_path = repository.repo_path( self.app ) + repo_path = self.repository.repo_path( self.app ) try: # Galaxy Side. - repo_files_directory = repository.repo_files_directory( self.app ) + repo_files_directory = self.repository.repo_files_directory( self.app ) repo_dir = repo_files_directory - repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository ) except AttributeError: # Tool Shed side. repo_files_directory = repo_path - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( None, repository ) relative_data_manager_dir = util.relpath( os.path.split( data_manager_config_filename )[0], repo_dir ) rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split( data_manager_config_filename )[1] ) @@ -91,7 +149,7 @@ # FIXME: default behavior is to fall back to tool.name. data_manager_name = data_manager_elem.get( 'name', data_manager_id ) version = data_manager_elem.get( 'version', DataManager.DEFAULT_VERSION ) - guid = self.generate_guid_for_object( repository_clone_url, DataManager.GUID_TYPE, data_manager_id, version ) + guid = self.generate_guid_for_object( DataManager.GUID_TYPE, data_manager_id, version ) data_tables = [] if tool_file is None: log.error( 'Data Manager entry is missing tool_file attribute in "%s".' % ( data_manager_config_filename ) ) @@ -130,8 +188,7 @@ log.debug( 'Loaded Data Manager tool_files: %s' % ( tool_file ) ) return metadata_dict - def generate_datatypes_metadata( self, tv, repository, repository_clone_url, repository_files_dir, datatypes_config, - metadata_dict ): + def generate_datatypes_metadata( self, tv, repository_files_dir, datatypes_config, metadata_dict ): """Update the received metadata_dict with information from the parsed datatypes_config.""" tree, error_message = xml_util.parse_xml( datatypes_config ) if tree is None: @@ -180,11 +237,11 @@ tool_config_path = hg_util.get_config_from_disk( tool_config, repository_files_dir ) full_path = os.path.abspath( tool_config_path ) tool, valid, error_message = \ - tv.load_tool_from_config( self.app.security.encode_id( repository.id ), full_path ) + tv.load_tool_from_config( self.app.security.encode_id( self.repository.id ), full_path ) if tool is None: guid = None else: - guid = suc.generate_tool_guid( repository_clone_url, tool ) + guid = suc.generate_tool_guid( self.repository_clone_url, tool ) converter_dict = dict( tool_config=tool_config, guid=guid, target_datatype=target_datatype ) @@ -226,76 +283,70 @@ valid_tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ] return valid_tool_dependencies_dict - def generate_guid_for_object( self, repository_clone_url, guid_type, obj_id, version ): - tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url ) + def generate_guid_for_object( self, guid_type, obj_id, version ): + tmp_url = common_util.remove_protocol_and_user_from_clone_url( self.repository_clone_url ) return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version ) - def generate_metadata_for_changeset_revision( self, repository, changeset_revision, repository_clone_url, - shed_config_dict=None, relative_install_dir=None, repository_files_dir=None, - resetting_all_metadata_on_repository=False, updating_installed_repository=False, - persist=False ): + def generate_metadata_for_changeset_revision( self ): """ Generate metadata for a repository using its files on disk. To generate metadata for changeset revisions older than the repository tip, the repository will have been cloned to a temporary location and updated to a specified changeset revision to access - that changeset revision's disk files, so the value of repository_files_dir will not - always be repository.repo_path( self.app ) (it could be an absolute path to a temporary - directory containing a clone). If it is an absolute path, the value of relative_install_dir + that changeset revision's disk files, so the value of self.repository_files_dir will not + always be self.repository.repo_path( self.app ) (it could be an absolute path to a temporary + directory containing a clone). If it is an absolute path, the value of self.relative_install_dir must contain repository.repo_path( self.app ). - The value of persist will be True when the installed repository contains a valid + The value of self.persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries should ultimately be persisted to the file referred to by self.app.config.shed_tool_data_table_config. """ tv = tool_validator.ToolValidator( self.app ) - if shed_config_dict is None: - shed_config_dict = {} - if updating_installed_repository: + if self.shed_config_dict is None: + self.shed_config_dict = {} + if self.updating_installed_repository: # Keep the original tool shed repository metadata if setting metadata on a repository # installed into a local Galaxy instance for which we have pulled updates. - original_repository_metadata = repository.metadata + original_repository_metadata = self.repository.metadata else: original_repository_metadata = None - readme_file_names = readme_util.get_readme_file_names( str( repository.name ) ) + readme_file_names = readme_util.get_readme_file_names( str( self.repository.name ) ) if self.app.name == 'galaxy': # Shed related tool panel configs are only relevant to Galaxy. - metadata_dict = { 'shed_config_filename' : shed_config_dict.get( 'config_filename' ) } + metadata_dict = { 'shed_config_filename' : self.shed_config_dict.get( 'config_filename' ) } else: metadata_dict = {} readme_files = [] - invalid_file_tups = [] invalid_tool_configs = [] tool_dependencies_config = None original_tool_data_path = self.app.config.tool_data_path original_tool_data_table_config_path = self.app.config.tool_data_table_config_path - if resetting_all_metadata_on_repository: - if not relative_install_dir: - raise Exception( "The value of repository.repo_path must be sent when resetting all metadata on a repository." ) + if self.resetting_all_metadata_on_repository: + if not self.relative_install_dir: + raise Exception( "The value of self.repository.repo_path must be set when resetting all metadata on a repository." ) # Keep track of the location where the repository is temporarily cloned so that we can - # strip the path when setting metadata. The value of repository_files_dir is the full - # path to the temporary directory to which the repository was cloned. - work_dir = repository_files_dir - files_dir = repository_files_dir + # strip the path when setting metadata. The value of self.repository_files_dir is the + # full path to the temporary directory to which self.repository was cloned. + work_dir = self.repository_files_dir + files_dir = self.repository_files_dir # Since we're working from a temporary directory, we can safely copy sample files included # in the repository to the repository root. - self.app.config.tool_data_path = repository_files_dir - self.app.config.tool_data_table_config_path = repository_files_dir + self.app.config.tool_data_path = self.repository_files_dir + self.app.config.tool_data_table_config_path = self.repository_files_dir else: # Use a temporary working directory to copy all sample files. work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-gmfcr" ) # All other files are on disk in the repository's repo_path, which is the value of - # relative_install_dir. - files_dir = relative_install_dir - if shed_config_dict.get( 'tool_path' ): - files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir ) + # self.relative_install_dir. + files_dir = self.relative_install_dir + if self.shed_config_dict.get( 'tool_path' ): + files_dir = os.path.join( self.shed_config_dict[ 'tool_path' ], files_dir ) self.app.config.tool_data_path = work_dir #FIXME: Thread safe? self.app.config.tool_data_table_config_path = work_dir # Handle proprietary datatypes, if any. datatypes_config = hg_util.get_config_from_disk( suc.DATATYPES_CONFIG_FILENAME, files_dir ) if datatypes_config: metadata_dict = self.generate_datatypes_metadata( tv, - repository, - repository_clone_url, files_dir, datatypes_config, metadata_dict ) @@ -303,9 +354,8 @@ # the repository's metadata. sample_file_metadata_paths, sample_file_copy_paths = \ self.get_sample_files_from_disk( repository_files_dir=files_dir, - tool_path=shed_config_dict.get( 'tool_path' ), - relative_install_dir=relative_install_dir, - resetting_all_metadata_on_repository=resetting_all_metadata_on_repository ) + tool_path=self.shed_config_dict.get( 'tool_path' ), + relative_install_dir=self.relative_install_dir ) if sample_file_metadata_paths: metadata_dict[ 'sample_files' ] = sample_file_metadata_paths # Copy all sample files included in the repository to a single directory location so we @@ -322,7 +372,7 @@ shed_tool_data_table_config=self.app.config.shed_tool_data_table_config, persist=False ) if error_message: - invalid_file_tups.append( ( filename, error_message ) ) + self.invalid_file_tups.append( ( filename, error_message ) ) for root, dirs, files in os.walk( files_dir ): if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0: if '.hg' in dirs: @@ -333,18 +383,16 @@ path_to_repository_dependencies_config = os.path.join( root, name ) metadata_dict, error_message = \ self.generate_repository_dependency_metadata( path_to_repository_dependencies_config, - metadata_dict, - updating_installed_repository=updating_installed_repository ) + metadata_dict ) if error_message: - invalid_file_tups.append( ( name, error_message ) ) + self.invalid_file_tups.append( ( name, error_message ) ) # See if we have one or more READ_ME files. elif name.lower() in readme_file_names: relative_path_to_readme = self.get_relative_path_to_repository_file( root, name, - relative_install_dir, + self.relative_install_dir, work_dir, - shed_config_dict, - resetting_all_metadata_on_repository ) + self.shed_config_dict ) readme_files.append( relative_path_to_readme ) # See if we have a tool config. elif name not in self.NOT_TOOL_CONFIGS and name.endswith( '.xml' ): @@ -365,12 +413,12 @@ is_tool = element_tree_root.tag == 'tool' if is_tool: tool, valid, error_message = \ - tv.load_tool_from_config( self.app.security.encode_id( repository.id ), + tv.load_tool_from_config( self.app.security.encode_id( self.repository.id ), full_path ) if tool is None: if not valid: invalid_tool_configs.append( name ) - invalid_file_tups.append( ( name, error_message ) ) + self.invalid_file_tups.append( ( name, error_message ) ) else: invalid_files_and_errors_tups = \ tv.check_tool_input_params( files_dir, @@ -387,17 +435,15 @@ relative_path_to_tool_config = \ self.get_relative_path_to_repository_file( root, name, - relative_install_dir, + self.relative_install_dir, work_dir, - shed_config_dict, - resetting_all_metadata_on_repository ) + self.shed_config_dict ) metadata_dict = self.generate_tool_metadata( relative_path_to_tool_config, tool, - repository_clone_url, metadata_dict ) else: for tup in invalid_files_and_errors_tups: - invalid_file_tups.append( tup ) + self.invalid_file_tups.append( tup ) # Find all exported workflows. elif name.endswith( '.ga' ): relative_path = os.path.join( root, name ) @@ -421,11 +467,10 @@ metadata_dict ) # Handle any data manager entries data_manager_config = hg_util.get_config_from_disk( suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, files_dir ) - metadata_dict = self.generate_data_manager_metadata( repository, - files_dir, + metadata_dict = self.generate_data_manager_metadata( files_dir, data_manager_config, metadata_dict, - shed_config_dict=shed_config_dict ) + shed_config_dict=self.shed_config_dict ) if readme_files: metadata_dict[ 'readme_files' ] = readme_files @@ -433,21 +478,18 @@ tool_dependencies_config = hg_util.get_config_from_disk( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, files_dir ) if tool_dependencies_config: metadata_dict, error_message = \ - self.generate_tool_dependency_metadata( repository, - changeset_revision, - repository_clone_url, - tool_dependencies_config, + self.generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict, original_repository_metadata=original_repository_metadata ) if error_message: - invalid_file_tups.append( ( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, error_message ) ) + self.invalid_file_tups.append( ( rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME, error_message ) ) if invalid_tool_configs: metadata_dict [ 'invalid_tools' ] = invalid_tool_configs + self.metadata_dict = metadata_dict # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values. self.app.config.tool_data_path = original_tool_data_path self.app.config.tool_data_table_config_path = original_tool_data_table_config_path basic_util.remove_dir( work_dir ) - return metadata_dict, invalid_file_tups def generate_package_dependency_metadata( self, elem, valid_tool_dependencies_dict, invalid_tool_dependencies_dict ): """ @@ -475,8 +517,7 @@ # where a tool dependency definition is considered invalid. repository_dependency_tup, repository_dependency_is_valid, error_message = \ self.handle_repository_elem( repository_elem=sub_elem, - only_if_compiling_contained_td=False, - updating_installed_repository=False ) + only_if_compiling_contained_td=False ) elif sub_elem.tag == 'install': package_install_version = sub_elem.get( 'version', '1.0' ) if package_install_version == '1.0': @@ -509,8 +550,7 @@ # We have a complex repository dependency. repository_dependency_tup, repository_dependency_is_valid, error_message = \ self.handle_repository_elem( repository_elem=sub_action_elem, - only_if_compiling_contained_td=True, - updating_installed_repository=False ) + only_if_compiling_contained_td=True ) elif action_elem.tag == 'action': # <action type="set_environment_for_install"> # <repository changeset_revision="b107b91b3574" name="package_readline_6_2" owner="devteam" prior_installation_required="True" toolshed="http://localhost:9009"> @@ -522,8 +562,7 @@ # We have a complex repository dependency. repository_dependency_tup, repository_dependency_is_valid, error_message = \ self.handle_repository_elem( repository_elem=sub_action_elem, - only_if_compiling_contained_td=True, - updating_installed_repository=False ) + only_if_compiling_contained_td=True ) if requirements_dict: dependency_key = '%s/%s' % ( package_name, package_version ) if repository_dependency_is_valid: @@ -538,8 +577,7 @@ repository_dependency_is_valid, \ error_message - def generate_repository_dependency_metadata( self, repository_dependencies_config, metadata_dict, - updating_installed_repository=False ): + def generate_repository_dependency_metadata( self, repository_dependencies_config, metadata_dict ): """ Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method is called from the tool shed as well as from Galaxy. @@ -560,8 +598,7 @@ for repository_elem in root.findall( 'repository' ): repository_dependency_tup, repository_dependency_is_valid, err_msg = \ self.handle_repository_elem( repository_elem, - only_if_compiling_contained_td=False, - updating_installed_repository=updating_installed_repository ) + only_if_compiling_contained_td=False ) if repository_dependency_is_valid: valid_repository_dependency_tups.append( repository_dependency_tup ) else: @@ -585,10 +622,10 @@ metadata_dict[ 'repository_dependencies' ] = valid_repository_dependencies_dict return metadata_dict, error_message - def generate_tool_metadata( self, tool_config, tool, repository_clone_url, metadata_dict ): + def generate_tool_metadata( self, tool_config, tool, metadata_dict ): """Update the received metadata_dict with changes that have been applied to the received tool.""" # Generate the guid. - guid = suc.generate_tool_guid( repository_clone_url, tool ) + guid = suc.generate_tool_guid( self.repository_clone_url, tool ) # Handle tool.requirements. tool_requirements = [] for tool_requirement in tool.requirements: @@ -667,11 +704,10 @@ metadata_dict[ 'tools' ] = [ tool_dict ] return metadata_dict - def generate_tool_dependency_metadata( self, repository, changeset_revision, repository_clone_url, tool_dependencies_config, - metadata_dict, original_repository_metadata=None ): + def generate_tool_dependency_metadata( self, tool_dependencies_config, metadata_dict, original_repository_metadata=None ): """ If the combination of name, version and type of each element is defined in the <requirement> tag for - at least one tool in the repository, then update the received metadata_dict with information from the + at least one tool in self.repository, then update the received metadata_dict with information from the parsed tool_dependencies_config. """ error_message = '' @@ -738,7 +774,7 @@ # into a Galaxy instance, so handle changes to tool dependencies appropriately. irm = self.app.installed_repository_manager updated_tool_dependency_names, deleted_tool_dependency_names = \ - irm.handle_existing_tool_dependencies_that_changed_in_update( repository, + irm.handle_existing_tool_dependencies_that_changed_in_update( self.repository, original_valid_tool_dependencies_dict, valid_tool_dependencies_dict ) metadata_dict[ 'tool_dependencies' ] = valid_tool_dependencies_dict @@ -769,9 +805,14 @@ metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ] return metadata_dict - def get_relative_path_to_repository_file( self, root, name, relative_install_dir, work_dir, shed_config_dict, - resetting_all_metadata_on_repository ): - if resetting_all_metadata_on_repository: + def get_invalid_file_tups( self ): + return self.invalid_file_tups + + def get_metadata_dict( self ): + return self.metadata_dict + + def get_relative_path_to_repository_file( self, root, name, relative_install_dir, work_dir, shed_config_dict ): + if self.resetting_all_metadata_on_repository: full_path_to_file = os.path.join( root, name ) stripped_path_to_file = full_path_to_file.replace( work_dir, '' ) if stripped_path_to_file.startswith( '/' ): @@ -785,9 +826,8 @@ relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] return relative_path_to_file - def get_sample_files_from_disk( self, repository_files_dir, tool_path=None, relative_install_dir=None, - resetting_all_metadata_on_repository=False ): - if resetting_all_metadata_on_repository: + def get_sample_files_from_disk( self, repository_files_dir, tool_path=None, relative_install_dir=None ): + if self.resetting_all_metadata_on_repository: # Keep track of the location where the repository is temporarily cloned so that we can strip # it when setting metadata. work_dir = repository_files_dir @@ -797,7 +837,7 @@ if root.find( '.hg' ) < 0: for name in files: if name.endswith( '.sample' ): - if resetting_all_metadata_on_repository: + if self.resetting_all_metadata_on_repository: full_path_to_sample_file = os.path.join( root, name ) stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' ) if stripped_path_to_sample_file.startswith( '/' ): @@ -816,7 +856,7 @@ sample_file_metadata_paths.append( relative_path_to_sample_file ) return sample_file_metadata_paths, sample_file_copy_paths - def handle_repository_elem( self, repository_elem, only_if_compiling_contained_td=False, updating_installed_repository=False ): + def handle_repository_elem( self, repository_elem, only_if_compiling_contained_td=False ): """ Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml file or a tool_dependencies.xml file. If the former, @@ -832,7 +872,7 @@ changeset_revision = repository_elem.get( 'changeset_revision', None ) prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) ) if self.app.name == 'galaxy': - if updating_installed_repository: + if self.updating_installed_repository: pass else: # We're installing a repository into Galaxy, so make sure its contained repository @@ -889,12 +929,12 @@ updated_changeset_revision ) if repository: return repository_dependency_tup, is_valid, error_message - if updating_installed_repository: + if self.updating_installed_repository: # The repository dependency was included in an update to the installed # repository, so it will not yet be installed. Return the tuple for later # installation. return repository_dependency_tup, is_valid, error_message - if updating_installed_repository: + if self.updating_installed_repository: # The repository dependency was included in an update to the installed repository, # so it will not yet be installed. Return the tuple for later installation. return repository_dependency_tup, is_valid, error_message @@ -1006,6 +1046,46 @@ return False return True + def set_changeset_revision( self, changeset_revision ): + self.changeset_revision = changeset_revision + + def set_relative_install_dir( self, relative_install_dir ): + self.relative_install_dir = relative_install_dir + + def set_repository( self, repository, relative_install_dir=None, changeset_revision=None ): + self.repository = repository + # Shed related tool panel configs are only relevant to Galaxy. + if self.app.name == 'galaxy': + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path( self.app ) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision( self.repository.changeset_revision ) + else: + self.set_changeset_revision( changeset_revision ) + self.shed_config_dict = repository.get_shed_config_dict( self.app ) + self.metadata_dict = { 'shed_config_filename' : self.shed_config_dict.get( 'config_filename', None ) } + else: + if relative_install_dir is None and self.repository is not None: + relative_install_dir = repository.repo_path( self.app ) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision( self.repository.tip( self.app ) ) + else: + self.set_changeset_revision( changeset_revision ) + self.shed_config_dict = {} + self.metadata_dict = {} + self.set_relative_install_dir( relative_install_dir ) + self.set_repository_files_dir() + self.resetting_all_metadata_on_repository = False + self.updating_installed_repository = False + self.persist = False + self.invalid_file_tups = [] + + def set_repository_clone_url( self, repository_clone_url ): + self.repository_clone_url = repository_clone_url + + def set_repository_files_dir( self, repository_files_dir=None ): + self.repository_files_dir = repository_files_dir + def update_repository_dependencies_metadata( self, metadata, repository_dependency_tups, is_valid, description ): if is_valid: repository_dependencies_dict = metadata.get( 'repository_dependencies', None ) diff -r 9b8d3dae67427121793b2390baca699f2887d600 -r cb6dffff27973cff0afd22d9907e675fea948113 lib/tool_shed/metadata/repository_metadata_manager.py --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -22,8 +22,16 @@ class RepositoryMetadataManager( metadata_generator.MetadataGenerator ): - def __init__( self, app, user ): - super( RepositoryMetadataManager, self ).__init__( app ) + def __init__( self, app, user, repository=None, changeset_revision=None, repository_clone_url=None, + shed_config_dict=None, relative_install_dir=None, repository_files_dir=None, + resetting_all_metadata_on_repository=False, updating_installed_repository=False, + persist=False, metadata_dict=None ): + super( RepositoryMetadataManager, self ).__init__( app, repository, changeset_revision, + repository_clone_url, shed_config_dict, + relative_install_dir, repository_files_dir, + resetting_all_metadata_on_repository, + updating_installed_repository, persist, + metadata_dict=metadata_dict, user=user ) self.app = app self.user = user # Repository metadata comparisons for changeset revisions. @@ -62,7 +70,7 @@ repositories_select_field.add_option( option_label, option_value ) return repositories_select_field - def clean_repository_metadata( self, id, changeset_revisions ): + def clean_repository_metadata( self, changeset_revisions ): # Delete all repository_metadata records associated with the repository that have # a changeset_revision that is not in changeset_revisions. We sometimes see multiple # records with the same changeset revision value - no idea how this happens. We'll @@ -71,7 +79,7 @@ changeset_revisions_checked = [] for repository_metadata in \ self.sa_session.query( self.app.model.RepositoryMetadata ) \ - .filter( self.app.model.RepositoryMetadata.table.c.repository_id == self.app.security.decode_id( id ) ) \ + .filter( self.app.model.RepositoryMetadata.table.c.repository_id == self.repository.id ) \ .order_by( self.app.model.RepositoryMetadata.table.c.changeset_revision, self.app.model.RepositoryMetadata.table.c.update_time.desc() ): changeset_revision = repository_metadata.changeset_revision @@ -79,15 +87,14 @@ self.sa_session.delete( repository_metadata ) self.sa_session.flush() - def compare_changeset_revisions( self, ancestor_changeset_revision, ancestor_metadata_dict, - current_changeset_revision, current_metadata_dict ): + def compare_changeset_revisions( self, ancestor_changeset_revision, ancestor_metadata_dict ): """ Compare the contents of two changeset revisions to determine if a new repository metadata revision should be created. """ # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. - # This changeset_revision is an ancestor of current_changeset_revision which is associated - # with current_metadata_dict. A new repository_metadata record will be created only + # This changeset_revision is an ancestor of self.changeset_revision which is associated + # with self.metadata_dict. A new repository_metadata record will be created only # when this method returns the constant value self.NOT_EQUAL_AND_NOT_SUBSET. ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) @@ -99,16 +106,16 @@ ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', {} ) ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) ancestor_data_manager = ancestor_metadata_dict.get( 'data_manager', {} ) - current_datatypes = current_metadata_dict.get( 'datatypes', [] ) - current_tools = current_metadata_dict.get( 'tools', [] ) + current_datatypes = self.metadata_dict.get( 'datatypes', [] ) + current_tools = self.metadata_dict.get( 'tools', [] ) current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] current_guids.sort() - current_readme_files = current_metadata_dict.get( 'readme_files', [] ) - current_repository_dependencies_dict = current_metadata_dict.get( 'repository_dependencies', {} ) + current_readme_files = self.metadata_dict.get( 'readme_files', [] ) + current_repository_dependencies_dict = self.metadata_dict.get( 'repository_dependencies', {} ) current_repository_dependencies = current_repository_dependencies_dict.get( 'repository_dependencies', [] ) - current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', {} ) - current_workflows = current_metadata_dict.get( 'workflows', [] ) - current_data_manager = current_metadata_dict.get( 'data_manager', {} ) + current_tool_dependencies = self.metadata_dict.get( 'tool_dependencies', {} ) + current_workflows = self.metadata_dict.get( 'workflows', [] ) + current_data_manager = self.metadata_dict.get( 'data_manager', {} ) # Handle case where no metadata exists for either changeset. no_datatypes = not ancestor_datatypes and not current_datatypes no_readme_files = not ancestor_readme_files and not current_readme_files @@ -334,7 +341,7 @@ return self.SUBSET return self.NOT_EQUAL_AND_NOT_SUBSET - def create_or_update_repository_metadata( self, id, repository, changeset_revision, metadata_dict ): + def create_or_update_repository_metadata( self, changeset_revision, metadata_dict ): """Create or update a repository_metadatqa record in the tool shed.""" has_repository_dependencies = False has_repository_dependencies_only_if_compiling_contained_td = False @@ -364,7 +371,9 @@ downloadable = True else: downloadable = False - repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, + self.app.security.encode_id( self.repository.id ), + changeset_revision ) if repository_metadata: # A repository metadata record already exists with the received changeset_revision, # so we don't need to check the skip_tool_test table. @@ -381,7 +390,7 @@ # need to update the skip_tool_test table. check_skip_tool_test = True repository_metadata = \ - self.app.model.RepositoryMetadata( repository_id=repository.id, + self.app.model.RepositoryMetadata( repository_id=self.repository.id, changeset_revision=changeset_revision, metadata=metadata_dict, downloadable=downloadable, @@ -407,7 +416,7 @@ # if it is contained in the skip_tool_test table. If it is, but is not associated # with a repository_metadata record, reset that skip_tool_test record to the newly # created repository_metadata record. - repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False ) + repo = hg_util.get_repo_for_repository( self.app, repository=self.repository, repo_path=None, create=False ) for changeset in repo.changelog: changeset_hash = str( repo.changectx( changeset ) ) skip_tool_test = self.get_skip_tool_test_by_changeset_revision( changeset_hash ) @@ -531,16 +540,16 @@ .filter( self.app.model.SkipToolTest.table.c.initial_changeset_revision == changeset_revision ) \ .first() - def new_datatypes_metadata_required( self, repository_metadata, metadata_dict ): + def new_datatypes_metadata_required( self, repository_metadata ): """ Compare the last saved metadata for each datatype in the repository with the new metadata - in metadata_dict to determine if a new repository_metadata table record is required or if - the last saved metadata record can be updated for datatypes instead. + in self.metadata_dict to determine if a new repository_metadata table record is required + or if the last saved metadata record can be updated for datatypes instead. """ # Datatypes are stored in metadata as a list of dictionaries that looks like: # [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}] - if 'datatypes' in metadata_dict: - current_datatypes = metadata_dict[ 'datatypes' ] + if 'datatypes' in self.metadata_dict: + current_datatypes = self.metadata_dict[ 'datatypes' ] if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -564,37 +573,31 @@ # There is no stored repository metadata, so we need to create a new repository_metadata # table record. return True - # The received metadata_dict includes no metadata for datatypes, so a new repository_metadata + # self.metadata_dict includes no metadata for datatypes, so a new repository_metadata # table record is not needed. return False - def new_metadata_required_for_utilities( self, repository, new_tip_metadata_dict ): + def new_metadata_required_for_utilities( self ): """ - Galaxy utilities currently consist of datatypes, repository_dependency definitions, - tools, tool_dependency definitions and exported Galaxy workflows. This method compares - the last stored repository_metadata record associated with the received repository against - the contents of the received new_tip_metadata_dict and returns True or False for the union - set of Galaxy utilities contained in both metadata dictionaries. The metadata contained - in new_tip_metadata_dict may not be a subset of that contained in the last stored - repository_metadata record associated with the received repository because one or more - Galaxy utilities may have been deleted from the repository in the new tip. + This method compares the last stored repository_metadata record associated with self.repository + against the contents of self.metadata_dict and returns True or False for the union set of Galaxy + utilities contained in both metadata dictionaries. The metadata contained in self.metadata_dict + may not be a subset of that contained in the last stored repository_metadata record associated with + self.repository because one or more Galaxy utilities may have been deleted from self.repository in + the new tip. """ repository_metadata = metadata_util.get_latest_repository_metadata( self.app, - repository.id, + self.repository.id, downloadable=False ) - datatypes_required = self.new_datatypes_metadata_required( repository_metadata, - new_tip_metadata_dict ) + datatypes_required = self.new_datatypes_metadata_required( repository_metadata ) # Uncomment the following if we decide that README files should affect how installable # repository revisions are defined. See the NOTE in the compare_readme_files() method. - # readme_files_required = sewlf.new_readme_files_metadata_required( repository_metadata, - # new_tip_metadata_dict ) + # readme_files_required = sewlf.new_readme_files_metadata_required( repository_metadata ) repository_dependencies_required = \ - self.new_repository_dependency_metadata_required( repository_metadata, - new_tip_metadata_dict ) - tools_required = self.new_tool_metadata_required( repository_metadata, new_tip_metadata_dict ) - tool_dependencies_required = self.new_tool_dependency_metadata_required( repository_metadata, - new_tip_metadata_dict ) - workflows_required = self.new_workflow_metadata_required( repository_metadata, new_tip_metadata_dict ) + self.new_repository_dependency_metadata_required( repository_metadata ) + tools_required = self.new_tool_metadata_required( repository_metadata ) + tool_dependencies_required = self.new_tool_dependency_metadata_required( repository_metadata ) + workflows_required = self.new_workflow_metadata_required( repository_metadata ) if datatypes_required or \ repository_dependencies_required or \ tools_required or \ @@ -603,17 +606,17 @@ return True return False - def new_readme_files_metadata_required( self, repository_metadata, metadata_dict ): + def new_readme_files_metadata_required( self, repository_metadata ): """ Compare the last saved metadata for each readme file in the repository with the new metadata - in metadata_dict to determine if a new repository_metadata table record is required or if the - last saved metadata record can be updated for readme files instead. + in self.metadata_dict to determine if a new repository_metadata table record is required or + if the last saved metadata record can be updated for readme files instead. """ # Repository README files are kind of a special case because they have no effect on reproducibility. # We'll simply inspect the file names to determine if any that exist in the saved metadata are - # eliminated from the new metadata in the received metadata_dict. - if 'readme_files' in metadata_dict: - current_readme_files = metadata_dict[ 'readme_files' ] + # eliminated from the new metadata in self.metadata_dict. + if 'readme_files' in self.metadata_dict: + current_readme_files = self.metadata_dict[ 'readme_files' ] if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -638,14 +641,14 @@ # There is no stored repository metadata, so we need to create a new repository_metadata # table record. return True - # The received metadata_dict includes no metadata for readme_files, so a new repository_metadata + # self.metadata_dict includes no metadata for readme_files, so a new repository_metadata # table record is not needed. return False - def new_repository_dependency_metadata_required( self, repository_metadata, metadata_dict ): + def new_repository_dependency_metadata_required( self, repository_metadata ): """ Compare the last saved metadata for each repository dependency in the repository - with the new metadata in metadata_dict to determine if a new repository_metadata + with the new metadata in self.metadata_dict to determine if a new repository_metadata table record is required or if the last saved metadata record can be updated for repository_dependencies instead. """ @@ -653,9 +656,9 @@ metadata = repository_metadata.metadata if 'repository_dependencies' in metadata: saved_repository_dependencies = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] - new_repository_dependencies_metadata = metadata_dict.get( 'repository_dependencies', None ) + new_repository_dependencies_metadata = self.metadata_dict.get( 'repository_dependencies', None ) if new_repository_dependencies_metadata: - new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] + new_repository_dependencies = self.metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ] # TODO: We used to include the following here to handle the case where repository # dependency definitions were deleted. However this erroneously returned True in # cases where is should not have done so. This usually occurred where multiple single @@ -682,21 +685,21 @@ else: return False else: - if 'repository_dependencies' in metadata_dict: + if 'repository_dependencies' in self.metadata_dict: # There is no saved repository metadata, so we need to create a new repository_metadata record. return True else: - # The received metadata_dict includes no metadata for repository dependencies, so - # a new repository_metadata record is not needed. + # self.metadata_dict includes no metadata for repository dependencies, so a new repository_metadata + # record is not needed. return False - def new_tool_metadata_required( self, repository_metadata, metadata_dict ): + def new_tool_metadata_required( self, repository_metadata ): """ Compare the last saved metadata for each tool in the repository with the new metadata in - metadata_dict to determine if a new repository_metadata table record is required, or if + self.metadata_dict to determine if a new repository_metadata table record is required, or if the last saved metadata record can be updated instead. """ - if 'tools' in metadata_dict: + if 'tools' in self.metadata_dict: if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -704,9 +707,9 @@ saved_tool_ids = [] # The metadata for one or more tools was successfully generated in the past # for this repository, so we first compare the version string for each tool id - # in metadata_dict with what was previously saved to see if we need to create + # in self.metadata_dict with what was previously saved to see if we need to create # a new table record or if we can simply update the existing record. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + for new_tool_metadata_dict in self.metadata_dict[ 'tools' ]: for saved_tool_metadata_dict in metadata[ 'tools' ]: if saved_tool_metadata_dict[ 'id' ] not in saved_tool_ids: saved_tool_ids.append( saved_tool_metadata_dict[ 'id' ] ) @@ -714,10 +717,10 @@ if new_tool_metadata_dict[ 'version' ] != saved_tool_metadata_dict[ 'version' ]: return True # So far, a new metadata record is not required, but we still have to check to see if - # any new tool ids exist in metadata_dict that are not in the saved metadata. We do + # any new tool ids exist in self.metadata_dict that are not in the saved metadata. We do # this because if a new tarball was uploaded to a repository that included tools, it # may have removed existing tool files if they were not included in the uploaded tarball. - for new_tool_metadata_dict in metadata_dict[ 'tools' ]: + for new_tool_metadata_dict in self.metadata_dict[ 'tools' ]: if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids: return True return False @@ -733,22 +736,22 @@ # There is no stored repository metadata, so we need to create a new repository_metadata # table record. return True - # The received metadata_dict includes no metadata for tools, so a new repository_metadata table + # self.metadata_dict includes no metadata for tools, so a new repository_metadata table # record is not needed. return False - def new_tool_dependency_metadata_required( self, repository_metadata, metadata_dict ): + def new_tool_dependency_metadata_required( self, repository_metadata ): """ Compare the last saved metadata for each tool dependency in the repository with the new - metadata in metadata_dict to determine if a new repository_metadata table record is required - or if the last saved metadata record can be updated for tool_dependencies instead. + metadata in self.metadata_dict to determine if a new repository_metadata table record is + required or if the last saved metadata record can be updated for tool_dependencies instead. """ if repository_metadata: metadata = repository_metadata.metadata if metadata: if 'tool_dependencies' in metadata: saved_tool_dependencies = metadata[ 'tool_dependencies' ] - new_tool_dependencies = metadata_dict.get( 'tool_dependencies', None ) + new_tool_dependencies = self.metadata_dict.get( 'tool_dependencies', None ) if new_tool_dependencies: # TODO: We used to include the following here to handle the case where # tool dependency definitions were deleted. However, this erroneously @@ -777,22 +780,22 @@ # in the repository, so we can update the existing repository metadata. return False else: - if 'tool_dependencies' in metadata_dict: + if 'tool_dependencies' in self.metadata_dict: # There is no saved repository metadata, so we need to create a new repository_metadata # record. return True else: - # The received metadata_dict includes no metadata for tool dependencies, so a new - # repository_metadata record is not needed. + # self.metadata_dict includes no metadata for tool dependencies, so a new repository_metadata + # record is not needed. return False - def new_workflow_metadata_required( self, repository_metadata, metadata_dict ): + def new_workflow_metadata_required( self, repository_metadata ): """ Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of exported workflows. If this changes at some future time, this method should be enhanced accordingly. """ - if 'workflows' in metadata_dict: + if 'workflows' in self.metadata_dict: if repository_metadata: # The repository has metadata, so update the workflows value - # no new record is needed. @@ -801,17 +804,17 @@ # There is no saved repository metadata, so we need to create a # new repository_metadata table record. return True - # The received metadata_dict includes no metadata for workflows, so a new + # self.metadata_dict includes no metadata for workflows, so a new # repository_metadata table record is not needed. return False - def reset_all_metadata_on_repository_in_tool_shed( self, id ): + def reset_all_metadata_on_repository_in_tool_shed( self ): """Reset all metadata on a single repository in a tool shed.""" - repository = suc.get_repository_in_tool_shed( self.app, id ) - log.debug( "Resetting all metadata on repository: %s" % repository.name ) - repo_dir = repository.repo_path( self.app ) - repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_dir, create=False ) - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( self.user, repository ) + log.debug( "Resetting all metadata on repository: %s" % self.repository.name ) + repo = hg_util.get_repo_for_repository( self.app, + repository=None, + repo_path=self.repository.repo_path( self.app ), + create=False ) # The list of changeset_revisions refers to repository_metadata records that have been created # or updated. When the following loop completes, we'll delete all repository_metadata records # for this repository that do not have a changeset_revision value in this list. @@ -822,33 +825,22 @@ metadata_dict = None ancestor_changeset_revision = None ancestor_metadata_dict = None - invalid_file_tups = [] - for changeset in repository.get_changesets_for_setting_metadata( self.app ): + for changeset in self.repository.get_changesets_for_setting_metadata( self.app ): work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ramorits" ) - current_changeset_revision = str( repo.changectx( changeset ) ) ctx = repo.changectx( changeset ) log.debug( "Cloning repository changeset revision: %s", str( ctx.rev() ) ) - cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) ) + cloned_ok, error_message = hg_util.clone_repository( self.repository_clone_url, work_dir, str( ctx.rev() ) ) if cloned_ok: log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) ) - current_metadata_dict, invalid_tups = \ - self.generate_metadata_for_changeset_revision( repository=repository, - changeset_revision=current_changeset_revision, - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=work_dir, - resetting_all_metadata_on_repository=True, - updating_installed_repository=False, - persist=False ) - # We'll only display error messages for the repository tip (it may be better to display error - # messages for each installable changeset revision). - if current_changeset_revision == repository.tip( self.app ): - invalid_file_tups.extend( invalid_tups ) - if current_metadata_dict: + self.set_changeset_revision( str( repo.changectx( changeset ) ) ) + self.set_relative_install_dir( work_dir ) + self.set_repository_files_dir( work_dir ) + self.generate_metadata_for_changeset_revision() + if self.metadata_dict: if metadata_changeset_revision is None and metadata_dict is None: # We're at the first change set in the change log. - metadata_changeset_revision = current_changeset_revision - metadata_dict = current_metadata_dict + metadata_changeset_revision = self.changeset_revision + metadata_dict = self.metadata_dict if ancestor_changeset_revision: # Compare metadata from ancestor and current. The value of comparison will be one of: # self.NO_METADATA - no metadata for either ancestor or current, so continue from current @@ -856,66 +848,56 @@ # self.SUBSET - ancestor metadata is a subset of current metadata, so continue from current # self.NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current # metadata, so persist ancestor metadata. - comparison = self.compare_changeset_revisions( ancestor_changeset_revision, - ancestor_metadata_dict, - current_changeset_revision, - current_metadata_dict ) + comparison = self.compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict ) if comparison in [ self.NO_METADATA, self.EQUAL, self.SUBSET ]: - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict + ancestor_changeset_revision = self.changeset_revision + ancestor_metadata_dict = self.metadata_dict elif comparison == self.NOT_EQUAL_AND_NOT_SUBSET: metadata_changeset_revision = ancestor_changeset_revision metadata_dict = ancestor_metadata_dict - repository_metadata = self.create_or_update_repository_metadata( id, - repository, - metadata_changeset_revision, - metadata_dict ) + repository_metadata = self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict + ancestor_changeset_revision = self.changeset_revision + ancestor_metadata_dict = self.metadata_dict else: # We're at the beginning of the change log. - ancestor_changeset_revision = current_changeset_revision - ancestor_metadata_dict = current_metadata_dict + ancestor_changeset_revision = self.changeset_revision + ancestor_metadata_dict = self.metadata_dict if not ctx.children(): - metadata_changeset_revision = current_changeset_revision - metadata_dict = current_metadata_dict + metadata_changeset_revision = self.changeset_revision + metadata_dict = self.metadata_dict # We're at the end of the change log. - repository_metadata = self.create_or_update_repository_metadata( id, - repository, - metadata_changeset_revision, - metadata_dict ) + repository_metadata = self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = None ancestor_metadata_dict = None elif ancestor_metadata_dict: - # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not. + # We reach here only if self.metadata_dict is empty and ancestor_metadata_dict is not. if not ctx.children(): # We're at the end of the change log. - repository_metadata = self.create_or_update_repository_metadata( id, - repository, - metadata_changeset_revision, - metadata_dict ) + repository_metadata = self.create_or_update_repository_metadata( metadata_changeset_revision, metadata_dict ) changeset_revisions.append( metadata_changeset_revision ) ancestor_changeset_revision = None ancestor_metadata_dict = None basic_util.remove_dir( work_dir ) # Delete all repository_metadata records for this repository that do not have a changeset_revision # value in changeset_revisions. - self.clean_repository_metadata( id, changeset_revisions ) + self.clean_repository_metadata( changeset_revisions ) # Set tool version information for all downloadable changeset revisions. Get the list of changeset # revisions from the changelog. - self.reset_all_tool_versions( id, repo ) + self.reset_all_tool_versions( repo ) # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. self.app.tool_data_tables.data_tables = {} - return invalid_file_tups, metadata_dict - def reset_all_tool_versions( self, id, repo ): + def reset_all_tool_versions( self, repo ): """Reset tool version lineage for those changeset revisions that include valid tools.""" + encoded_repository_id = self.app.security.encode_id( self.repository.id ) changeset_revisions_that_contain_tools = [] for changeset in repo.changelog: changeset_revision = str( repo.changectx( changeset ) ) - repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, + encoded_repository_id, + changeset_revision ) if repository_metadata: metadata = repository_metadata.metadata if metadata: @@ -926,7 +908,9 @@ # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. for index, changeset_revision in enumerate( changeset_revisions_that_contain_tools ): tool_versions_dict = {} - repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app, + encoded_repository_id, + changeset_revision ) metadata = repository_metadata.metadata tool_dicts = metadata[ 'tools' ] if index == 0: @@ -937,7 +921,7 @@ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ] else: for tool_dict in tool_dicts: - parent_id = self.get_parent_id( id, + parent_id = self.get_parent_id( encoded_repository_id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], @@ -962,11 +946,11 @@ for repository_id in repository_ids: try: repository = suc.get_repository_in_tool_shed( self.app, repository_id ) - invalid_file_tups, metadata_dict = \ - self.reset_all_metadata_on_repository_in_tool_shed( repository_id ) - if invalid_file_tups: + self.set_repository( repository ) + self.reset_all_metadata_on_repository_in_tool_shed() + if self.invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( self.app, - invalid_file_tups, + self.invalid_file_tups, repository, None, as_html=False ) @@ -974,7 +958,7 @@ unsuccessful_count += 1 else: log.debug( "Successfully reset metadata on repository %s owned by %s" % \ - ( str( repository.name ), str( repository.user.username ) ) ) + ( str( repository.name ), str( repository.user.username ) ) ) successful_count += 1 except: log.exception( "Error attempting to reset metadata on repository %s" % str( repository.name ) ) @@ -989,75 +973,68 @@ status = 'error' return message, status - def set_repository_metadata( self, host, repository, content_alert_str='', **kwd ): + def set_repository( self, repository ): + super( RepositoryMetadataManager, self ).set_repository( repository ) + self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( self.user, repository ) + + def set_repository_metadata( self, host, content_alert_str='', **kwd ): """ - Set metadata using the repository's current disk files, returning specific error + Set metadata using the self.repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset has problems. """ message = '' status = 'done' - encoded_id = self.app.security.encode_id( repository.id ) - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( self.user, repository ) - repo_dir = repository.repo_path( self.app ) + encoded_id = self.app.security.encode_id( self.repository.id ) + repo_dir = self.repository.repo_path( self.app ) repo = hg_util.get_repo_for_repository( self.app, repository=None, repo_path=repo_dir, create=False ) - metadata_dict, invalid_file_tups = \ - self.generate_metadata_for_changeset_revision( repository=repository, - changeset_revision=repository.tip( self.app ), - repository_clone_url=repository_clone_url, - relative_install_dir=repo_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - if metadata_dict: + self.generate_metadata_for_changeset_revision() + if self.metadata_dict: repository_metadata = None - repository_type_class = self.app.repository_types_registry.get_class_by_label( repository.type ) + repository_type_class = self.app.repository_types_registry.get_class_by_label( self.repository.type ) tip_only = isinstance( repository_type_class, TipOnly ) - if not tip_only and self.new_metadata_required_for_utilities( repository, metadata_dict ): + if not tip_only and self.new_metadata_required_for_utilities(): # Create a new repository_metadata table row. - repository_metadata = self.create_or_update_repository_metadata( encoded_id, - repository, - repository.tip( self.app ), - metadata_dict ) + repository_metadata = self.create_or_update_repository_metadata( self.repository.tip( self.app ), + self.metadata_dict ) # If this is the first record stored for this repository, see if we need to send any email alerts. - if len( repository.downloadable_revisions ) == 1: + if len( self.repository.downloadable_revisions ) == 1: suc.handle_email_alerts( self.app, host, - repository, + self.repository, content_alert_str='', new_repo_alert=True, admin_only=False ) else: - # Update the latest stored repository metadata with the contents and attributes of metadata_dict. + # Update the latest stored repository metadata with the contents and attributes of self.metadata_dict. repository_metadata = metadata_util.get_latest_repository_metadata( self.app, - repository.id, + self.repository.id, downloadable=False ) if repository_metadata: - downloadable = metadata_util.is_downloadable( metadata_dict ) + downloadable = metadata_util.is_downloadable( self.metadata_dict ) # Update the last saved repository_metadata table row. - repository_metadata.changeset_revision = repository.tip( self.app ) - repository_metadata.metadata = metadata_dict + repository_metadata.changeset_revision = self.repository.tip( self.app ) + repository_metadata.metadata = self.metadata_dict repository_metadata.downloadable = downloadable - if 'datatypes' in metadata_dict: + if 'datatypes' in self.metadata_dict: repository_metadata.includes_datatypes = True else: repository_metadata.includes_datatypes = False # We don't store information about the special type of repository dependency that is needed only for # compiling a tool dependency defined for the dependent repository. - repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', {} ) + repository_dependencies_dict = self.metadata_dict.get( 'repository_dependencies', {} ) repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] ) has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \ suc.get_repository_dependency_types( repository_dependencies ) repository_metadata.has_repository_dependencies = has_repository_dependencies - if 'tool_dependencies' in metadata_dict: + if 'tool_dependencies' in self.metadata_dict: repository_metadata.includes_tool_dependencies = True else: repository_metadata.includes_tool_dependencies = False - if 'tools' in metadata_dict: + if 'tools' in self.metadata_dict: repository_metadata.includes_tools = True else: repository_metadata.includes_tools = False - if 'workflows' in metadata_dict: + if 'workflows' in self.metadata_dict: repository_metadata.includes_workflows = True else: repository_metadata.includes_workflows = False @@ -1070,11 +1047,9 @@ self.sa_session.flush() else: # There are no metadata records associated with the repository. - repository_metadata = self.create_or_update_repository_metadata( encoded_id, - repository, - repository.tip( self.app ), - metadata_dict ) - if 'tools' in metadata_dict and repository_metadata and status != 'error': + repository_metadata = self.create_or_update_repository_metadata( self.repository.tip( self.app ), + self.metadata_dict ) + if 'tools' in self.metadata_dict and repository_metadata and status != 'error': # Set tool versions on the new downloadable change set. The order of the list of changesets is # critical, so we use the repo's changelog. changeset_revisions = [] @@ -1083,25 +1058,22 @@ if suc.get_repository_metadata_by_changeset_revision( self.app, encoded_id, changeset_revision ): changeset_revisions.append( changeset_revision ) self.add_tool_versions( encoded_id, repository_metadata, changeset_revisions ) - elif len( repo ) == 1 and not invalid_file_tups: + elif len( repo ) == 1 and not self.invalid_file_tups: message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % \ - str( repository.tip( self.app ) ) + str( self.repository.tip( self.app ) ) message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" - if invalid_file_tups: + if self.invalid_file_tups: message = tool_util.generate_message_for_invalid_tools( self.app, - invalid_file_tups, - repository, - metadata_dict ) + self.invalid_file_tups, + self.repository, + self.metadata_dict ) status = 'error' # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. self.app.tool_data_tables.data_tables = {} return message, status - def set_repository_metadata_due_to_new_tip( self, host, repository, content_alert_str=None, **kwd ): - """Set metadata on the repository tip in the tool shed.""" - error_message, status = self.set_repository_metadata( host, - repository, - content_alert_str=content_alert_str, - **kwd ) + def set_repository_metadata_due_to_new_tip( self, host, content_alert_str=None, **kwd ): + """Set metadata on the tip of self.repository in the tool shed.""" + error_message, status = self.set_repository_metadata( host, content_alert_str=content_alert_str, **kwd ) return status, error_message Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.