1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/a43b16b64b1a/ changeset: a43b16b64b1a user: greg date: 2012-11-15 22:54:10 summary: More shed_util refactoring. affected #: 6 files diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -157,154 +157,6 @@ except: pass return converter_path, display_path -def can_generate_tool_dependency_metadata( root, metadata_dict ): - """ - Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml - file is defined in the <requirement> tag for at least one tool in the repository. - """ - can_generate_dependency_metadata = False - for elem in root: - tool_dependency_type = elem.tag - tool_dependency_version = elem.get( 'version', None ) - if tool_dependency_type == 'package': - can_generate_dependency_metadata = False - tool_dependency_name = elem.get( 'name', None ) - if tool_dependency_name and tool_dependency_version: - for tool_dict in metadata_dict.get( 'tools', [] ): - requirements = tool_dict.get( 'requirements', [] ) - for requirement_dict in requirements: - req_name = requirement_dict.get( 'name', None ) - req_version = requirement_dict.get( 'version', None ) - req_type = requirement_dict.get( 'type', None ) - if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type: - can_generate_dependency_metadata = True - break - if requirements and not can_generate_dependency_metadata: - # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement> - # tag for any tool in the repository. - break - if not can_generate_dependency_metadata: - break - elif tool_dependency_type == 'set_environment': - # Here elem is something like: <set_environment version="1.0"> - for env_var_elem in elem: - can_generate_dependency_metadata = False - # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable> - env_var_name = env_var_elem.get( 'name', None ) - if env_var_name: - for tool_dict in metadata_dict.get( 'tools', [] ): - requirements = tool_dict.get( 'requirements', [] ) - for requirement_dict in requirements: - # {"name": "R_SCRIPT_PATH", "type": "set_environment", "version": null} - req_name = requirement_dict.get( 'name', None ) - req_type = requirement_dict.get( 'type', None ) - if req_name==env_var_name and req_type==tool_dependency_type: - can_generate_dependency_metadata = True - break - if requirements and not can_generate_dependency_metadata: - # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement> - # tag for any tool in the repository. - break - return can_generate_dependency_metadata -def clean_repository_metadata( trans, id, changeset_revisions ): - # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. - # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older - # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later.. - changeset_revisions_checked = [] - for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ - .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ - .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision, - trans.model.RepositoryMetadata.table.c.update_time.desc() ): - changeset_revision = repository_metadata.changeset_revision - can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions - if can_delete: - trans.sa_session.delete( repository_metadata ) - trans.sa_session.flush() -def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): - # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of - # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only - # when this method returns the string 'not equal and not subset'. - ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) - ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) - ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ] - ancestor_guids.sort() - ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] ) - ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) - current_datatypes = current_metadata_dict.get( 'datatypes', [] ) - current_tools = current_metadata_dict.get( 'tools', [] ) - current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] - current_guids.sort() - current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] ) - current_workflows = current_metadata_dict.get( 'workflows', [] ) - # Handle case where no metadata exists for either changeset. - if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: - return 'no metadata' - workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) - datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) - # Handle case where all metadata is the same. - if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': - return 'equal' - if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]: - is_subset = True - for guid in ancestor_guids: - if guid not in current_guids: - is_subset = False - break - if is_subset: - return 'subset' - return 'not equal and not subset' -def compare_datatypes( ancestor_datatypes, current_datatypes ): - # Determine if ancestor_datatypes is the same as current_datatypes - # or if ancestor_datatypes is a subset of current_datatypes. Each - # datatype dict looks something like: - # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} - if len( ancestor_datatypes ) <= len( current_datatypes ): - for ancestor_datatype in ancestor_datatypes: - # Currently the only way to differentiate datatypes is by name. - ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] - ancestor_datatype_extension = ancestor_datatype[ 'extension' ] - ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) - found_in_current = False - for current_datatype in current_datatypes: - if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ - current_datatype[ 'extension' ] == ancestor_datatype_extension and \ - current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_datatypes ) == len( current_datatypes ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' -def compare_workflows( ancestor_workflows, current_workflows ): - # Determine if ancestor_workflows is the same as current_workflows - # or if ancestor_workflows is a subset of current_workflows. - if len( ancestor_workflows ) <= len( current_workflows ): - for ancestor_workflow_tup in ancestor_workflows: - # ancestor_workflows is a list of tuples where each contained tuple is - # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] - ancestor_workflow_dict = ancestor_workflow_tup[1] - # Currently the only way to differentiate workflows is by name. - ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] - num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) - found_in_current = False - for current_workflow_tup in current_workflows: - current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, - # then the workflows are the same. Of course, this may - # not be true... - if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: - found_in_current = True - break - if not found_in_current: - return 'not equal and not subset' - if len( ancestor_workflows ) == len( current_workflows ): - return 'equal' - else: - return 'subset' - return 'not equal and not subset' def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ): # Persist the current in-memory list of config_elems to a file named by the value of config_filename. fd, filename = tempfile.mkstemp() @@ -326,20 +178,6 @@ copy_sample_file( trans.app, relative_path, dest_path=dest_path ) sample_files.append( name ) return sample_files -def clean_repository_clone_url( repository_clone_url ): - if repository_clone_url.find( '@' ) > 0: - # We have an url that includes an authenticated user, something like: - # http://test@bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '@' ) - tmp_url = items[ 1 ] - elif repository_clone_url.find( '//' ) > 0: - # We have an url that includes only a protocol, something like: - # http://bx.psu.edu:9009/repos/some_username/column - items = repository_clone_url.split( '//' ) - tmp_url = items[ 1 ] - else: - tmp_url = repository_clone_url - return tmp_url def clean_tool_shed_url( tool_shed_url ): if tool_shed_url.find( ':' ) > 0: # Eliminate the port, if any, since it will result in an invalid directory name. @@ -472,166 +310,6 @@ """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) return url_join( tool_shed_url, 'repos', repository.owner, repository.name ) -def generate_datatypes_metadata( datatypes_config, metadata_dict ): - """Update the received metadata_dict with information from the parsed datatypes_config.""" - tree = ElementTree.parse( datatypes_config ) - root = tree.getroot() - ElementInclude.include( root ) - repository_datatype_code_files = [] - datatype_files = root.find( 'datatype_files' ) - if datatype_files: - for elem in datatype_files.findall( 'datatype_file' ): - name = elem.get( 'name', None ) - repository_datatype_code_files.append( name ) - metadata_dict[ 'datatype_files' ] = repository_datatype_code_files - datatypes = [] - registration = root.find( 'registration' ) - if registration: - for elem in registration.findall( 'datatype' ): - datatypes_dict = {} - display_in_upload = elem.get( 'display_in_upload', None ) - if display_in_upload: - datatypes_dict[ 'display_in_upload' ] = display_in_upload - dtype = elem.get( 'type', None ) - if dtype: - datatypes_dict[ 'dtype' ] = dtype - extension = elem.get( 'extension', None ) - if extension: - datatypes_dict[ 'extension' ] = extension - max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None ) - if max_optional_metadata_filesize: - datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize - mimetype = elem.get( 'mimetype', None ) - if mimetype: - datatypes_dict[ 'mimetype' ] = mimetype - subclass = elem.get( 'subclass', None ) - if subclass: - datatypes_dict[ 'subclass' ] = subclass - if datatypes_dict: - datatypes.append( datatypes_dict ) - if datatypes: - metadata_dict[ 'datatypes' ] = datatypes - return metadata_dict -def generate_environment_dependency_metadata( elem, tool_dependencies_dict ): - """The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set.""" - requirements_dict = {} - for env_elem in elem: - env_name = env_elem.get( 'name', None ) - if env_name: - requirements_dict [ 'name' ] = env_name - requirements_dict [ 'type' ] = 'environment variable' - if requirements_dict: - if 'set_environment' in tool_dependencies_dict: - tool_dependencies_dict[ 'set_environment' ].append( requirements_dict ) - else: - tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ] - return tool_dependencies_dict -def generate_package_dependency_metadata( elem, tool_dependencies_dict ): - """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set.""" - requirements_dict = {} - package_name = elem.get( 'name', None ) - package_version = elem.get( 'version', None ) - if package_name and package_version: - dependency_key = '%s/%s' % ( package_name, package_version ) - requirements_dict [ 'name' ] = package_name - requirements_dict [ 'version' ] = package_version - requirements_dict [ 'type' ] = 'package' - for sub_elem in elem: - if sub_elem.tag == 'readme': - requirements_dict[ 'readme' ] = sub_elem.text - if requirements_dict: - tool_dependencies_dict[ dependency_key ] = requirements_dict - return tool_dependencies_dict -def generate_tool_dependency_metadata( app, repository, tool_dependencies_config, metadata_dict, original_repository_metadata=None ): - """ - If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository, - then update the received metadata_dict with information from the parsed tool_dependencies_config. - """ - if original_repository_metadata: - # Keep a copy of the original tool dependencies dictionary in the metadata. - original_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None ) - else: - original_tool_dependencies_dict = None - try: - tree = ElementTree.parse( tool_dependencies_config ) - except Exception, e: - log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) ) - return metadata_dict - root = tree.getroot() - ElementInclude.include( root ) - tool_dependencies_dict = {} - if can_generate_tool_dependency_metadata( root, metadata_dict ): - for elem in root: - if elem.tag == 'package': - tool_dependencies_dict = generate_package_dependency_metadata( elem, tool_dependencies_dict ) - elif elem.tag == 'set_environment': - tool_dependencies_dict = generate_environment_dependency_metadata( elem, tool_dependencies_dict ) - # Handle tool dependency installation via other means here (future). - if tool_dependencies_dict: - metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict - else: - log.debug( "Name, version and type from the <requirement> tag does not match the information in the tool_dependencies.xml file. Tool dependencies will be ignored." ) - if tool_dependencies_dict: - if original_tool_dependencies_dict: - # We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to - # tool dependencies appropriately. - handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict ) - metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict - return metadata_dict -def generate_tool_guid( repository_clone_url, tool ): - """ - Generate a guid for the installed tool. It is critical that this guid matches the guid for - the tool in the Galaxy tool shed from which it is being installed. The form of the guid is - <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> - """ - tmp_url = clean_repository_clone_url( repository_clone_url ) - return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) -def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): - """Update the received metadata_dict with changes that have been applied to the received tool.""" - # Generate the guid - guid = generate_tool_guid( repository_clone_url, tool ) - # Handle tool.requirements. - tool_requirements = [] - for tr in tool.requirements: - requirement_dict = dict( name=tr.name, - type=tr.type, - version=tr.version ) - tool_requirements.append( requirement_dict ) - # Handle tool.tests. - tool_tests = [] - if tool.tests: - for ttb in tool.tests: - required_files = [] - for required_file in ttb.required_files: - value, extra = required_file - required_files.append( ( value ) ) - inputs = [] - for input in ttb.inputs: - name, value, extra = input - inputs.append( ( name, value ) ) - outputs = [] - for output in ttb.outputs: - name, file_name, extra = output - outputs.append( ( name, strip_path( file_name ) if file_name else None ) ) - test_dict = dict( name=ttb.name, - required_files=required_files, - inputs=inputs, - outputs=outputs ) - tool_tests.append( test_dict ) - tool_dict = dict( id=tool.id, - guid=guid, - name=tool.name, - version=tool.version, - description=tool.description, - version_string_cmd = tool.version_string_cmd, - tool_config=tool_config, - requirements=tool_requirements, - tests=tool_tests ) - if 'tools' in metadata_dict: - metadata_dict[ 'tools' ].append( tool_dict ) - else: - metadata_dict[ 'tools' ] = [ tool_dict ] - return metadata_dict def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ): if tool_section is not None: tool_elem = SubElement( tool_section, 'tool' ) @@ -652,7 +330,6 @@ version_elem = SubElement( tool_elem, 'version' ) version_elem.text = tool.version return tool_elem - def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ): """Generate a list of ElementTree Element objects for each section or tool.""" elem_list = [] @@ -958,34 +635,6 @@ if tool: repository_tools_tups.append( ( relative_path, guid, tool ) ) return repository_tools_tups -def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ): - if resetting_all_metadata_on_repository: - # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata. - work_dir = repository_files_dir - sample_file_metadata_paths = [] - sample_file_copy_paths = [] - for root, dirs, files in os.walk( repository_files_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name.endswith( '.sample' ): - if resetting_all_metadata_on_repository: - full_path_to_sample_file = os.path.join( root, name ) - stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' ) - if stripped_path_to_sample_file.startswith( '/' ): - stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ] - relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file ) - if os.path.exists( relative_path_to_sample_file ): - sample_file_copy_paths.append( relative_path_to_sample_file ) - else: - sample_file_copy_paths.append( full_path_to_sample_file ) - else: - relative_path_to_sample_file = os.path.join( root, name ) - sample_file_copy_paths.append( relative_path_to_sample_file ) - if tool_path and relative_install_dir: - if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ): - relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :] - sample_file_metadata_paths.append( relative_path_to_sample_file ) - return sample_file_metadata_paths, sample_file_copy_paths def get_shed_tool_conf_dict( app, shed_tool_conf ): """ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry @@ -1120,13 +769,6 @@ return shed_url # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml. return None -def get_readme_file_names( repository_name ): - readme_files = [ 'readme', 'read_me', 'install' ] - valid_filenames = [ r for r in readme_files ] - for r in readme_files: - valid_filenames.append( '%s.txt' % r ) - valid_filenames.append( '%s.txt' % repository_name ) - return valid_filenames def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ): """ Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the @@ -1522,34 +1164,6 @@ removed = True error_message = '' return removed, error_message -def reset_all_metadata_on_installed_repository( trans, id ): - """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" - repository = get_installed_tool_shed_repository( trans, id ) - tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) - repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) - tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) - if relative_install_dir: - original_metadata_dict = repository.metadata - metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, - repository=repository, - repository_clone_url=repository_clone_url, - shed_config_dict = repository.get_shed_config_dict( trans.app ), - relative_install_dir=relative_install_dir, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False ) - repository.metadata = metadata_dict - if metadata_dict != original_metadata_dict: - update_in_shed_tool_config( trans.app, repository ) - trans.sa_session.add( repository ) - trans.sa_session.flush() - log.debug( 'Metadata has been reset on repository %s.' % repository.name ) - else: - log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name ) - else: - log.debug( 'Error locating installation directory for repository %s.' % repository.name ) - return invalid_file_tups, metadata_dict def to_html_str( text ): """Translates the characters in text to an html string""" translated = [] diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 lib/galaxy/util/shed_util_common.py --- a/lib/galaxy/util/shed_util_common.py +++ b/lib/galaxy/util/shed_util_common.py @@ -1,4 +1,4 @@ -import os, shutil, tempfile, logging +import os, shutil, tempfile, logging, string from galaxy import util from galaxy.tools import parameters from galaxy.util import inflector @@ -23,7 +23,7 @@ '&' : '&', '\'' : ''' } MAX_CONTENT_SIZE = 32768 -NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ] +NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'repository_dependencies.xml', 'tool_dependencies.xml' ] GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER' TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER' VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" ) @@ -49,6 +49,55 @@ option_value = trans.security.encode_id( repository.id ) repositories_select_field.add_option( option_label, option_value ) return repositories_select_field +def can_generate_tool_dependency_metadata( root, metadata_dict ): + """ + Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml + file is defined in the <requirement> tag for at least one tool in the repository. + """ + can_generate_dependency_metadata = False + for elem in root: + tool_dependency_type = elem.tag + tool_dependency_version = elem.get( 'version', None ) + if tool_dependency_type == 'package': + can_generate_dependency_metadata = False + tool_dependency_name = elem.get( 'name', None ) + if tool_dependency_name and tool_dependency_version: + for tool_dict in metadata_dict.get( 'tools', [] ): + requirements = tool_dict.get( 'requirements', [] ) + for requirement_dict in requirements: + req_name = requirement_dict.get( 'name', None ) + req_version = requirement_dict.get( 'version', None ) + req_type = requirement_dict.get( 'type', None ) + if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type: + can_generate_dependency_metadata = True + break + if requirements and not can_generate_dependency_metadata: + # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement> + # tag for any tool in the repository. + break + if not can_generate_dependency_metadata: + break + elif tool_dependency_type == 'set_environment': + # Here elem is something like: <set_environment version="1.0"> + for env_var_elem in elem: + can_generate_dependency_metadata = False + # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable> + env_var_name = env_var_elem.get( 'name', None ) + if env_var_name: + for tool_dict in metadata_dict.get( 'tools', [] ): + requirements = tool_dict.get( 'requirements', [] ) + for requirement_dict in requirements: + # {"name": "R_SCRIPT_PATH", "type": "set_environment", "version": null} + req_name = requirement_dict.get( 'name', None ) + req_type = requirement_dict.get( 'type', None ) + if req_name==env_var_name and req_type==tool_dependency_type: + can_generate_dependency_metadata = True + break + if requirements and not can_generate_dependency_metadata: + # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement> + # tag for any tool in the repository. + break + return can_generate_dependency_metadata def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ): """ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make @@ -93,6 +142,34 @@ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name ) invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) ) return invalid_files_and_errors_tups +def clean_repository_clone_url( repository_clone_url ): + if repository_clone_url.find( '@' ) > 0: + # We have an url that includes an authenticated user, something like: + # http://test@bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '@' ) + tmp_url = items[ 1 ] + elif repository_clone_url.find( '//' ) > 0: + # We have an url that includes only a protocol, something like: + # http://bx.psu.edu:9009/repos/some_username/column + items = repository_clone_url.split( '//' ) + tmp_url = items[ 1 ] + else: + tmp_url = repository_clone_url + return tmp_url +def clean_repository_metadata( trans, id, changeset_revisions ): + # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions. + # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older + # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later.. + changeset_revisions_checked = [] + for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \ + .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \ + .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision, + trans.model.RepositoryMetadata.table.c.update_time.desc() ): + changeset_revision = repository_metadata.changeset_revision + can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions + if can_delete: + trans.sa_session.delete( repository_metadata ) + trans.sa_session.flush() def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ): """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository.""" try: @@ -107,6 +184,91 @@ error_message = 'Error cloning repository: %s' % str( e ) log.debug( error_message ) return False, error_message +def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ): + # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of + # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only + # when this method returns the string 'not equal and not subset'. + ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] ) + ancestor_tools = ancestor_metadata_dict.get( 'tools', [] ) + ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ] + ancestor_guids.sort() + ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] ) + ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] ) + current_datatypes = current_metadata_dict.get( 'datatypes', [] ) + current_tools = current_metadata_dict.get( 'tools', [] ) + current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ] + current_guids.sort() + current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] ) + current_workflows = current_metadata_dict.get( 'workflows', [] ) + # Handle case where no metadata exists for either changeset. + if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes: + return 'no metadata' + workflow_comparison = compare_workflows( ancestor_workflows, current_workflows ) + datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes ) + # Handle case where all metadata is the same. + if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal': + return 'equal' + if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]: + is_subset = True + for guid in ancestor_guids: + if guid not in current_guids: + is_subset = False + break + if is_subset: + return 'subset' + return 'not equal and not subset' +def compare_datatypes( ancestor_datatypes, current_datatypes ): + # Determine if ancestor_datatypes is the same as current_datatypes + # or if ancestor_datatypes is a subset of current_datatypes. Each + # datatype dict looks something like: + # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} + if len( ancestor_datatypes ) <= len( current_datatypes ): + for ancestor_datatype in ancestor_datatypes: + # Currently the only way to differentiate datatypes is by name. + ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ] + ancestor_datatype_extension = ancestor_datatype[ 'extension' ] + ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None ) + found_in_current = False + for current_datatype in current_datatypes: + if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \ + current_datatype[ 'extension' ] == ancestor_datatype_extension and \ + current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_datatypes ) == len( current_datatypes ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' +def compare_workflows( ancestor_workflows, current_workflows ): + # Determine if ancestor_workflows is the same as current_workflows + # or if ancestor_workflows is a subset of current_workflows. + if len( ancestor_workflows ) <= len( current_workflows ): + for ancestor_workflow_tup in ancestor_workflows: + # ancestor_workflows is a list of tuples where each contained tuple is + # [ <relative path to the .ga file in the repository>, <exported workflow dict> ] + ancestor_workflow_dict = ancestor_workflow_tup[1] + # Currently the only way to differentiate workflows is by name. + ancestor_workflow_name = ancestor_workflow_dict[ 'name' ] + num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] ) + found_in_current = False + for current_workflow_tup in current_workflows: + current_workflow_dict = current_workflow_tup[1] + # Assume that if the name and number of steps are euqal, + # then the workflows are the same. Of course, this may + # not be true... + if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps: + found_in_current = True + break + if not found_in_current: + return 'not equal and not subset' + if len( ancestor_workflows ) == len( current_workflows ): + return 'equal' + else: + return 'subset' + return 'not equal and not subset' def concat_messages( msg1, msg2 ): if msg1: if msg2: @@ -165,6 +327,60 @@ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name ) else: return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name ) +def generate_datatypes_metadata( datatypes_config, metadata_dict ): + """Update the received metadata_dict with information from the parsed datatypes_config.""" + tree = ElementTree.parse( datatypes_config ) + root = tree.getroot() + ElementInclude.include( root ) + repository_datatype_code_files = [] + datatype_files = root.find( 'datatype_files' ) + if datatype_files: + for elem in datatype_files.findall( 'datatype_file' ): + name = elem.get( 'name', None ) + repository_datatype_code_files.append( name ) + metadata_dict[ 'datatype_files' ] = repository_datatype_code_files + datatypes = [] + registration = root.find( 'registration' ) + if registration: + for elem in registration.findall( 'datatype' ): + datatypes_dict = {} + display_in_upload = elem.get( 'display_in_upload', None ) + if display_in_upload: + datatypes_dict[ 'display_in_upload' ] = display_in_upload + dtype = elem.get( 'type', None ) + if dtype: + datatypes_dict[ 'dtype' ] = dtype + extension = elem.get( 'extension', None ) + if extension: + datatypes_dict[ 'extension' ] = extension + max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None ) + if max_optional_metadata_filesize: + datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize + mimetype = elem.get( 'mimetype', None ) + if mimetype: + datatypes_dict[ 'mimetype' ] = mimetype + subclass = elem.get( 'subclass', None ) + if subclass: + datatypes_dict[ 'subclass' ] = subclass + if datatypes_dict: + datatypes.append( datatypes_dict ) + if datatypes: + metadata_dict[ 'datatypes' ] = datatypes + return metadata_dict +def generate_environment_dependency_metadata( elem, tool_dependencies_dict ): + """The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set.""" + requirements_dict = {} + for env_elem in elem: + env_name = env_elem.get( 'name', None ) + if env_name: + requirements_dict [ 'name' ] = env_name + requirements_dict [ 'type' ] = 'environment variable' + if requirements_dict: + if 'set_environment' in tool_dependencies_dict: + tool_dependencies_dict[ 'set_environment' ].append( requirements_dict ) + else: + tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ] + return tool_dependencies_dict def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ): if as_html: new_line = '<br/>' @@ -201,7 +417,7 @@ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end ) message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line ) return message -def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None, +def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict=None, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ): """ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip, @@ -212,6 +428,8 @@ The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config. """ + if shed_config_dict is None: + shed_config_dict = {} if updating_installed_repository: # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which # we have pulled updates. @@ -270,18 +488,23 @@ if '.hg' in dirs: dirs.remove( '.hg' ) for name in files: + # See if we have a repository dependencies defined. + if name == 'repository_dependencies.xml': + relative_path_to_repository_dependencies = get_relative_path_to_repository_file( root, + name, + relative_install_dir, + work_dir, + shed_config_dict, + resetting_all_metadata_on_repository ) + metadata_dict = generate_repository_dependency_metadata( relative_path_to_repository_dependencies, metadata_dict ) # See if we have a READ_ME file. - if name.lower() in readme_file_names: - if resetting_all_metadata_on_repository: - full_path_to_readme = os.path.join( root, name ) - stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' ) - if stripped_path_to_readme.startswith( '/' ): - stripped_path_to_readme = stripped_path_to_readme[ 1: ] - relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme ) - else: - relative_path_to_readme = os.path.join( root, name ) - if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ): - relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] + elif name.lower() in readme_file_names: + relative_path_to_readme = get_relative_path_to_repository_file( root, + name, + relative_install_dir, + work_dir, + shed_config_dict, + resetting_all_metadata_on_repository ) metadata_dict[ 'readme' ] = relative_path_to_readme # See if we have a tool config. elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ): @@ -311,16 +534,14 @@ invalid_tool_configs.append( name ) break if can_set_metadata: - if resetting_all_metadata_on_repository: - full_path_to_tool_config = os.path.join( root, name ) - stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' ) - if stripped_path_to_tool_config.startswith( '/' ): - stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ] - relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config ) - else: - relative_path_to_tool_config = os.path.join( root, name ) - if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ): - relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] + relative_path_to_tool_config = get_relative_path_to_repository_file( root, + name, + relative_install_dir, + work_dir, + shed_config_dict, + resetting_all_metadata_on_repository ) + + metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict ) else: for tup in invalid_files_and_errors_tups: @@ -350,6 +571,131 @@ app.config.tool_data_path = original_tool_data_path app.config.tool_data_table_config_path = original_tool_data_table_config_path return metadata_dict, invalid_file_tups +def generate_package_dependency_metadata( elem, tool_dependencies_dict ): + """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set.""" + requirements_dict = {} + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + dependency_key = '%s/%s' % ( package_name, package_version ) + requirements_dict [ 'name' ] = package_name + requirements_dict [ 'version' ] = package_version + requirements_dict [ 'type' ] = 'package' + for sub_elem in elem: + if sub_elem.tag == 'readme': + requirements_dict[ 'readme' ] = sub_elem.text + if requirements_dict: + tool_dependencies_dict[ dependency_key ] = requirements_dict + return tool_dependencies_dict +def generate_repository_dependency_metadata( repository_dependencies_config, metadata_dict ): + repository_dependencies_tups = [] + try: + # Make sure we're looking at a valid repository_dependencies.xml file. + tree = util.parse_xml( repository_dependencies_config ) + root = element_tree.getroot() + is_valid = element_tree_root.tag == 'repositories' + except Exception, e: + log.debug( "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) ) ) + is_valid = False + if is_valid: + for repository_elem in root.findall( 'repository' ): + repository_dependencies_tups.append( ( repository_elem.attrib[ 'toolshed' ], + repository_elem.attrib[ 'name' ], + repository_elem.attrib[ 'owner'], + repository_elem.attrib[ 'changeset_revision' ] ) ) + if repository_dependencies_tups: + metadata_dict[ 'repository_dependencies' ] = repository_dependencies_tups + return metadata_dict +def generate_tool_dependency_metadata( app, repository, tool_dependencies_config, metadata_dict, original_repository_metadata=None ): + """ + If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository, + then update the received metadata_dict with information from the parsed tool_dependencies_config. + """ + if original_repository_metadata: + # Keep a copy of the original tool dependencies dictionary in the metadata. + original_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None ) + else: + original_tool_dependencies_dict = None + try: + tree = ElementTree.parse( tool_dependencies_config ) + except Exception, e: + log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) ) + return metadata_dict + root = tree.getroot() + ElementInclude.include( root ) + tool_dependencies_dict = {} + if can_generate_tool_dependency_metadata( root, metadata_dict ): + for elem in root: + if elem.tag == 'package': + tool_dependencies_dict = generate_package_dependency_metadata( elem, tool_dependencies_dict ) + elif elem.tag == 'set_environment': + tool_dependencies_dict = generate_environment_dependency_metadata( elem, tool_dependencies_dict ) + # Handle tool dependency installation via other means here (future). + if tool_dependencies_dict: + metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict + else: + log.debug( "Name, version and type from the <requirement> tag does not match the information in the tool_dependencies.xml file. Tool dependencies will be ignored." ) + if tool_dependencies_dict: + if original_tool_dependencies_dict: + # We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to + # tool dependencies appropriately. + handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict ) + metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict + return metadata_dict +def generate_tool_guid( repository_clone_url, tool ): + """ + Generate a guid for the installed tool. It is critical that this guid matches the guid for + the tool in the Galaxy tool shed from which it is being installed. The form of the guid is + <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version> + """ + tmp_url = clean_repository_clone_url( repository_clone_url ) + return '%s/%s/%s' % ( tmp_url, tool.id, tool.version ) +def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ): + """Update the received metadata_dict with changes that have been applied to the received tool.""" + # Generate the guid + guid = generate_tool_guid( repository_clone_url, tool ) + # Handle tool.requirements. + tool_requirements = [] + for tr in tool.requirements: + requirement_dict = dict( name=tr.name, + type=tr.type, + version=tr.version ) + tool_requirements.append( requirement_dict ) + # Handle tool.tests. + tool_tests = [] + if tool.tests: + for ttb in tool.tests: + required_files = [] + for required_file in ttb.required_files: + value, extra = required_file + required_files.append( ( value ) ) + inputs = [] + for input in ttb.inputs: + name, value, extra = input + inputs.append( ( name, value ) ) + outputs = [] + for output in ttb.outputs: + name, file_name, extra = output + outputs.append( ( name, strip_path( file_name ) if file_name else None ) ) + test_dict = dict( name=ttb.name, + required_files=required_files, + inputs=inputs, + outputs=outputs ) + tool_tests.append( test_dict ) + tool_dict = dict( id=tool.id, + guid=guid, + name=tool.name, + version=tool.version, + description=tool.description, + version_string_cmd = tool.version_string_cmd, + tool_config=tool_config, + requirements=tool_requirements, + tests=tool_tests ) + if 'tools' in metadata_dict: + metadata_dict[ 'tools' ].append( tool_dict ) + else: + metadata_dict[ 'tools' ] = [ tool_dict ] + return metadata_dict def get_changectx_for_changeset( repo, changeset_revision, **kwd ): """Retrieve a specified changectx from a repository""" for changeset in repo.changelog: @@ -393,6 +739,13 @@ if deleted: return 'DELETED' return None +def get_readme_file_names( repository_name ): + readme_files = [ 'readme', 'read_me', 'install' ] + valid_filenames = [ r for r in readme_files ] + for r in readme_files: + valid_filenames.append( '%s.txt' % r ) + valid_filenames.append( '%s.txt' % repository_name ) + return valid_filenames def get_repository_file_contents( file_path ): if is_gzip( file_path ): to_html = to_html_str( '\ngzip compressed file\n' ) @@ -454,6 +807,34 @@ fh.close() return tmp_filename return None +def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ): + if resetting_all_metadata_on_repository: + # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata. + work_dir = repository_files_dir + sample_file_metadata_paths = [] + sample_file_copy_paths = [] + for root, dirs, files in os.walk( repository_files_dir ): + if root.find( '.hg' ) < 0: + for name in files: + if name.endswith( '.sample' ): + if resetting_all_metadata_on_repository: + full_path_to_sample_file = os.path.join( root, name ) + stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' ) + if stripped_path_to_sample_file.startswith( '/' ): + stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ] + relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file ) + if os.path.exists( relative_path_to_sample_file ): + sample_file_copy_paths.append( relative_path_to_sample_file ) + else: + sample_file_copy_paths.append( full_path_to_sample_file ) + else: + relative_path_to_sample_file = os.path.join( root, name ) + sample_file_copy_paths.append( relative_path_to_sample_file ) + if tool_path and relative_install_dir: + if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ): + relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :] + sample_file_metadata_paths.append( relative_path_to_sample_file ) + return sample_file_metadata_paths, sample_file_copy_paths def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ): parent_id = None # Compare from most recent to oldest. @@ -472,6 +853,20 @@ if parent_id is None: # The tool did not change through all of the changeset revisions. return old_id +def get_relative_path_to_repository_file( root, name, relative_install_dir, work_dir, shed_config_dict, resetting_all_metadata_on_repository ): + if resetting_all_metadata_on_repository: + full_path_to_file = os.path.join( root, name ) + stripped_path_to_file = full_path_to_file.replace( work_dir, '' ) + if stripped_path_to_file.startswith( '/' ): + stripped_path_to_file = stripped_path_to_file[ 1: ] + relative_path_to_file = os.path.join( relative_install_dir, stripped_path_to_file ) + else: + relative_path_to_file = os.path.join( root, name ) + if relative_install_dir and \ + shed_config_dict.get( 'tool_path' ) and \ + relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ): + relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ] + return relative_path_to_file def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ): # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories. message = '' @@ -566,6 +961,34 @@ shutil.rmtree( dir ) except: pass +def reset_all_metadata_on_installed_repository( trans, id ): + """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" + repository = get_installed_tool_shed_repository( trans, id ) + tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) + repository_clone_url = generate_clone_url_for_installed_repository( trans, repository ) + tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app ) + if relative_install_dir: + original_metadata_dict = repository.metadata + metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app, + repository=repository, + repository_clone_url=repository_clone_url, + shed_config_dict = repository.get_shed_config_dict( trans.app ), + relative_install_dir=relative_install_dir, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False ) + repository.metadata = metadata_dict + if metadata_dict != original_metadata_dict: + update_in_shed_tool_config( trans.app, repository ) + trans.sa_session.add( repository ) + trans.sa_session.flush() + log.debug( 'Metadata has been reset on repository %s.' % repository.name ) + else: + log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name ) + else: + log.debug( 'Error locating installation directory for repository %s.' % repository.name ) + return invalid_file_tups, metadata_dict def reset_all_metadata_on_repository_in_tool_shed( trans, id ): """Reset all metadata on a single repository in a tool shed.""" def reset_all_tool_versions( trans, id, repo ): diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/common.mako --- a/templates/webapps/community/repository/common.mako +++ b/templates/webapps/community/repository/common.mako @@ -83,7 +83,7 @@ hg clone <a href="${clone_str}">${clone_str}</a></%def> -<%def name="render_repository_items( repository_metadata_id, metadata, can_set_metadata=False )"> +<%def name="render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )"><% from galaxy.tool_shed.encoding_util import tool_shed_encode %> %if metadata or can_set_metadata: <p/> @@ -91,6 +91,28 @@ <div class="toolFormTitle">Preview tools and inspect metadata by tool version</div><div class="toolFormBody"> %if metadata: + %if 'repository_dependencies' in metadata: + <div class="form-row"> + <table class="grid"> + <tr> + <td><b>tool shed</b></td> + <td><b>name</b></td> + <td><b>version</b></td> + <td><b>type</b></td> + </tr> + %for repository_dependency_tup in metadata[ 'repository_dependencies' ]: + <% toolshed, name, owner, changeset_revision = repository_dependency_tup %> + <tr> + <td>${toolshed | h}</td> + <td>${name | h}</td> + <td>${owner | h}</td> + <td>${changeset_revision | h}</td> + </tr> + %endfor + </table> + </div> + <div style="clear: both"></div> + %endif %if 'tool_dependencies' in metadata: <% # See if tool dependencies are packages, environment settings or both. diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/manage_repository.mako --- a/templates/webapps/community/repository/manage_repository.mako +++ b/templates/webapps/community/repository/manage_repository.mako @@ -204,7 +204,7 @@ </form></div></div> -${render_repository_items( repository_metadata_id, metadata, can_set_metadata=True )} +${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=True )} <p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div> diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/preview_tools_in_changeset.mako --- a/templates/webapps/community/repository/preview_tools_in_changeset.mako +++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako @@ -79,4 +79,4 @@ </div></div><p/> -${render_repository_items( repository_metadata_id, metadata )} +${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )} diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/view_repository.mako --- a/templates/webapps/community/repository/view_repository.mako +++ b/templates/webapps/community/repository/view_repository.mako @@ -181,7 +181,7 @@ %endif </div></div> -${render_repository_items( repository_metadata_id, metadata )} +${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )} %if repository.categories: <p/><div class="toolForm"> Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.