commit/galaxy-central: greg: Partial framework support for enhanced tool dependency definitions that enable installation of binaries into specified architectures. More coming soon...
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/f4a839856cfb/ Changeset: f4a839856cfb User: greg Date: 2013-09-06 15:38:12 Summary: Partial framework support for enhanced tool dependency definitions that enable installation of binaries into specified architectures. More coming soon... Affected #: 8 files diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -252,7 +252,7 @@ tool_shed_repository.uninstalled = True # Remove all installed tool dependencies, but don't touch any repository dependencies.. for tool_dependency in tool_shed_repository.installed_tool_dependencies: - uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency ) + uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency ) if error_message: errors = '%s %s' % ( errors, error_message ) tool_shed_repository.deleted = True @@ -1559,7 +1559,7 @@ if tool_dependency.can_uninstall: tool_dependencies_for_uninstallation.append( tool_dependency ) for tool_dependency in tool_dependencies_for_uninstallation: - uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans, tool_dependency ) + uninstalled, error_message = tool_dependency_util.remove_tool_dependency( trans.app, tool_dependency ) if error_message: errors = True message = '%s %s' % ( message, error_message ) diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py @@ -281,8 +281,6 @@ source_dir=os.path.join( action_dict[ 'source_directory' ] ), destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) ) elif action_type == 'move_file': - # TODO: Remove this hack that resets current_dir so that the pre-compiled bwa binary can be found. - # current_dir = '/Users/gvk/workspaces_2008/bwa/bwa-0.5.9' td_common_util.move_file( current_dir=current_dir, source=os.path.join( action_dict[ 'source' ] ), destination_dir=os.path.join( action_dict[ 'destination' ] ) ) diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -12,6 +12,7 @@ from tool_shed.util import encoding_util from tool_shed.util import tool_dependency_util from tool_shed.util import xml_util +from tool_shed.galaxy_install.tool_dependencies import td_common_util from galaxy.model.orm import and_ from galaxy.web import url_for from galaxy.util import asbool @@ -106,6 +107,92 @@ text = common_util.tool_shed_get( app, tool_shed_url, url ) return text +def handle_complex_repository_dependency_for_package( app, elem, package_name, package_version, tool_shed_repository ): + tool_dependency = None + tool_shed = elem.attrib[ 'toolshed' ] + required_repository_name = elem.attrib[ 'name' ] + required_repository_owner = elem.attrib[ 'owner' ] + default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ] + required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, + tool_shed, + required_repository_name, + required_repository_owner, + default_required_repository_changeset_revision ) + tmp_filename = None + if required_repository: + required_repository_changeset_revision = required_repository.installed_changeset_revision + # Define the installation directory for the required tool dependency package in the required repository. + required_repository_package_install_dir = \ + get_tool_dependency_install_dir( app=app, + repository_name=required_repository_name, + repository_owner=required_repository_owner, + repository_changeset_revision=required_repository_changeset_revision, + tool_dependency_type='package', + tool_dependency_name=package_name, + tool_dependency_version=package_version ) + # Define the this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the + # required repository's installed tool dependency package. + dependent_install_dir = get_tool_dependency_install_dir( app=app, + repository_name=tool_shed_repository.name, + repository_owner=tool_shed_repository.owner, + repository_changeset_revision=tool_shed_repository.installed_changeset_revision, + tool_dependency_type='package', + tool_dependency_name=package_name, + tool_dependency_version=package_version ) + # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package. + # We can get everything we need from the discovered installed required_repository. + if required_repository.status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED, + app.model.ToolShedRepository.installation_status.INSTALLED ]: + if not os.path.exists( required_repository_package_install_dir ): + print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir ) + repo_files_dir = required_repository.repo_files_directory( app ) + tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' ) + if tool_dependencies_config: + config_to_use = tool_dependencies_config + else: + message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \ + ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) ) + raise Exception( message ) + else: + # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision + # should be updated if it's not current. + text = get_updated_changeset_revisions_from_tool_shed( app=app, + tool_shed_url=tool_shed, + name=required_repository_name, + owner=required_repository_owner, + changeset_revision=required_repository_changeset_revision ) + if text: + updated_changeset_revisions = listify( text ) + # The list of changeset revisions is in reverse order, so the newest will be first. + required_repository_changeset_revision = updated_changeset_revisions[ 0 ] + # Make a call to the tool shed to get the required repository's tool_dependencies.xml file. + tmp_filename = create_temporary_tool_dependencies_config( app, + tool_shed, + required_repository_name, + required_repository_owner, + required_repository_changeset_revision ) + config_to_use = tmp_filename + tool_dependency, actions_dict = populate_actions_dict( app=app, + dependent_install_dir=dependent_install_dir, + required_install_dir=required_repository_package_install_dir, + tool_shed_repository=tool_shed_repository, + required_repository=required_repository, + package_name=package_name, + package_version=package_version, + tool_dependencies_config=config_to_use ) + if tmp_filename: + try: + os.remove( tmp_filename ) + except: + pass + # Install and build the package via fabric. + install_and_build_package_via_fabric( app, tool_dependency, actions_dict ) + else: + message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \ + ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) ) + raise Exception( message ) + return tool_dependency + def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem, required_repository ): """ Populate a list of actions for creating an env.sh file for a dependent repository. The received elem is the <package> tag set associated @@ -222,88 +309,9 @@ for package_elem in elem: if package_elem.tag == 'repository': # We have a complex repository dependency definition. - tool_shed = package_elem.attrib[ 'toolshed' ] - required_repository_name = package_elem.attrib[ 'name' ] - required_repository_owner = package_elem.attrib[ 'owner' ] - default_required_repository_changeset_revision = package_elem.attrib[ 'changeset_revision' ] - required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, - tool_shed, - required_repository_name, - required_repository_owner, - default_required_repository_changeset_revision ) - tmp_filename = None - if required_repository: - required_repository_changeset_revision = required_repository.installed_changeset_revision - # Define the installation directory for the required tool dependency package in the required repository. - required_repository_package_install_dir = \ - get_tool_dependency_install_dir( app=app, - repository_name=required_repository_name, - repository_owner=required_repository_owner, - repository_changeset_revision=required_repository_changeset_revision, - tool_dependency_type='package', - tool_dependency_name=package_name, - tool_dependency_version=package_version ) - # Define the this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the - # required repository's installed tool dependency package. - dependent_install_dir = get_tool_dependency_install_dir( app=app, - repository_name=tool_shed_repository.name, - repository_owner=tool_shed_repository.owner, - repository_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dependency_type='package', - tool_dependency_name=package_name, - tool_dependency_version=package_version ) - # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package. - # We can get everything we need from the discovered installed required_repository. - if required_repository.status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED, - app.model.ToolShedRepository.installation_status.INSTALLED ]: - if not os.path.exists( required_repository_package_install_dir ): - print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir ) - repo_files_dir = required_repository.repo_files_directory( app ) - tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' ) - if tool_dependencies_config: - config_to_use = tool_dependencies_config - else: - message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \ - ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) ) - raise Exception( message ) - else: - # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision - # should be updated if it's not current. - text = get_updated_changeset_revisions_from_tool_shed( app=app, - tool_shed_url=tool_shed, - name=required_repository_name, - owner=required_repository_owner, - changeset_revision=required_repository_changeset_revision ) - if text: - updated_changeset_revisions = listify( text ) - # The list of changeset revisions is in reverse order, so the newest will be first. - required_repository_changeset_revision = updated_changeset_revisions[ 0 ] - # Make a call to the tool shed to get the required repository's tool_dependencies.xml file. - tmp_filename = create_temporary_tool_dependencies_config( app, - tool_shed, - required_repository_name, - required_repository_owner, - required_repository_changeset_revision ) - config_to_use = tmp_filename - tool_dependency, actions_dict = populate_actions_dict( app=app, - dependent_install_dir=dependent_install_dir, - required_install_dir=required_repository_package_install_dir, - tool_shed_repository=tool_shed_repository, - required_repository=required_repository, - package_name=package_name, - package_version=package_version, - tool_dependencies_config=config_to_use ) - if tmp_filename: - try: - os.remove( tmp_filename ) - except: - pass - # Install and build the package via fabric. - install_and_build_package_via_fabric( app, tool_dependency, actions_dict ) - else: - message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \ - ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) ) - raise Exception( message ) + rd_tool_dependency = handle_complex_repository_dependency_for_package( app, package_elem, package_name, package_version, tool_shed_repository ) + if rd_tool_dependency and rd_tool_dependency.status == app.model.ToolDependency.installation_status.ERROR: + print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message ) elif package_elem.tag == 'install': # <install version="1.0"> # Get the installation directory for tool dependencies that will be installed for the received tool_shed_repository. @@ -333,114 +341,32 @@ type='package', status=app.model.ToolDependency.installation_status.INSTALLING, set_status=True ) - # Get the information that defines the current platform. + # Get the information about the current platform in case the tool dependency definition includes tag sets for installing + # compiled binaries. platform_info_dict = tool_dependency_util.get_platform_info_dict() if package_install_version == '1.0': - # Handle tool dependency installation using a fabric method included in the Galaxy framework. The first thing we do - # is check the installation architecture to see if we have a precompiled binary that works on the target system. + # Handle tool dependency installation using a fabric method included in the Galaxy framework. + actions_elem_tuples = td_common_util.parse_package_elem( package_elem, + platform_info_dict=platform_info_dict, + include_after_install_actions=True ) + # At this point we have a list of <actions> elems that are either defined within an <actions_group> tag set with <actions> + # sub-elements that contains os and architecture attributes filtered by the platform into which the appropriate compiled + # binary will be installed, or not defined within an <actions_group> tag set and not filtered. binary_installed = False - actions_elem_tuples = [] - # Build a list of grouped and ungrouped <actions> tagsets to be processed in the order they are defined in the - # tool_dependencies.xml file. - for elem in package_elem: - # Default to not treating actions as grouped. - grouped = False - # Skip any element that is not <actions> or <actions_group>. This will also skip comments and <readme> tags. - if elem.tag not in [ 'actions', 'actions_group' ]: - continue - if elem.tag == 'actions': - # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system. - grouped = False - actions_elem_tuples.append( ( grouped, elem ) ) - else: - # Record the number of <actions> elements, in order to filter out any <action> elements that precede <actions> - # elements. - actions_elem_count = len( elem.findall( 'actions' ) ) - # Record the number of <actions> elements that have architecture and os specified, in order to filter out any - # platform-independent <actions> elements that come before platform-specific <actions> elements. This call to - # elem.findall is filtered by tags that have both the os and architecture specified. - # For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section 19.7.2.1. - platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) ) - platform_actions_elements_processed = 0 - actions_elems_processed = 0 - # We have an actions_group element, and its child <actions> elements should therefore be compared with the current - # operating system and processor architecture. - grouped = True - # The tagsets that will go into the actions_elem_list are those that install a precompiled binary if the - # architecture and operating system match its defined attributes. If precompiled binary is not installed - # the first <actions> tag following those that have the os and architecture attributes will be processed - # in order to install and compile the source. - actions_elem_list = [] - # The tagsets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These - # will only be processed if they are at the end of the <actions_group> tagset. See below for details. - after_install_actions = [] - platform_independent_actions = [] - # Loop through the <actions_group> element and build the actions_elem_list and the after_install_actions list. - for child_element in elem: - if child_element.tag == 'actions': - actions_elems_processed += 1 - system = child_element.get( 'os' ) - architecture = child_element.get( 'architecture' ) - # Skip <actions> tags that have only one of architecture or os specified, in order for the count in - # platform_actions_elements_processed to remain accurate. - if ( system and not architecture ) or ( architecture and not system ): - log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' ) - continue - # Since we are inside an <actions_group> tagset, compare it with our current platform information and filter - # the <actions> tagsets that don't match. Require both the os and architecture attributes to be defined in - # order to find a match. - if system and architecture: - platform_actions_elements_processed += 1 - # If either the os or architecture do not match the platform, this <actions> tag will not be considered - # a match. Skip it and proceed with checking the next one. - if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture: - continue - else: - # <actions> tags without both os and architecture attributes are only allowed to be specified after - # platform-specific <actions> tags. If we find a platform-independent <actions> tag before all - # platform-specific <actions> tags have been processed, log a message stating this and skip to the - # next <actions> tag. - if platform_actions_elements_processed < platform_actions_element_count: - message = 'Error: <actions> tags without os and architecture attributes are only allowed ' - message += 'after <actions> tags with os and architecture attributes specified. Skipping ' - message += 'current <actions> tag.' - log.debug( message ) - continue - # If we reach this point, it means one of two things: 1) The system and architecture attributes are not - # defined in this <actions> tag, or 2) The system and architecture attributes are defined, and they are - # an exact match for the current platform. Append the child element to the list of elements to process. - actions_elem_list.append( child_element ) - elif child_element.tag == 'action': - # Any <action> tags within an <actions_group> tagset must come after all <actions> tags. - if actions_elems_processed == actions_elem_count: - # If all <actions> elements have been processed, then this <action> element can be appended to the - # list of actions to execute within this group. - after_install_actions.append( child_element ) - else: - # If any <actions> elements remain to be processed, then log a message stating that <action> - # elements are not allowed to precede any <actions> elements within an <actions_group> tagset. - message = 'Error: <action> tags are only allowed at the end of an <actions_group> ' - message += 'tagset, after all <actions> tags. ' - message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) ) - log.debug( message ) - continue - if after_install_actions: - actions_elem_list.extend( after_install_actions ) - actions_elem_tuples.append( ( grouped, actions_elem_list ) ) - # At this point we have a list of <actions> elems that are either defined within an <actions_group> tagset, and filtered by - # the current platform, or not defined within an <actions_group> tagset, and not filtered. - for grouped, actions_elems in actions_elem_tuples: - if grouped: - # Platform matching is only performed inside <actions_group> tagsets, os and architecture attributes are otherwise ignored. + for in_actions_group, actions_elems in actions_elem_tuples: + if in_actions_group: + # Platform matching is only performed inside <actions_group> tag sets, os and architecture attributes are otherwise + # ignored. for actions_elem in actions_elems: system = actions_elem.get( 'os' ) architecture = actions_elem.get( 'architecture' ) - # If this <actions> element has the os and architecture attributes defined, then we only want to process - # until a successful installation is achieved. + # If this <actions> element has the os and architecture attributes defined, then we only want to process until a + # successful installation is achieved. if system and architecture: - # If an <actions> tag has been defined that matches our current platform, and the recipe specified - # within that <actions> tag has been successfully processed, skip any remaining platform-specific - # <actions> tags. + # If an <actions> tag has been defined that matches our current platform, and the recipe specified within + # that <actions> tag has been successfully processed, skip any remaining platform-specific <actions> tags. + # We cannot break out of the look here because there may be <action> tags at the end of the <actions_group> + # tag set that must be processed. if binary_installed: continue # No platform-specific <actions> recipe has yet resulted in a successful installation. @@ -451,33 +377,32 @@ actions_elem=actions_elem, action_elem=None ) sa_session.refresh( tool_dependency ) - if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: + if tool_dependency.status == app.model.ToolDependency.installation_status.INSTALLED: # If an <actions> tag was found that matches the current platform, and the install_via_fabric method # did not result in an error state, set binary_installed to True in order to skip any remaining # platform-specific <actions> tags. - if not binary_installed: - binary_installed = True + binary_installed = True else: - # Otherwise, move on to the next matching <actions> tag, or any defined <actions> tags that do not - # contain platform-dependent recipes. - if binary_installed: - binary_installed = False - print 'Encountered an error downloading binary for %s version %s: %s' % \ - ( package_name, package_version, tool_dependency.error_message ) + # Process the next matching <actions> tag, or any defined <actions> tags that do not contain platform + # dependent recipes. + print 'Error downloading binary for %s version %s: %s' % \ + ( package_name, package_version, tool_dependency.error_message ) else: # If no <actions> tags have been defined that match our current platform, or none of the matching # <actions> tags resulted in a successful tool dependency status, proceed with one and only one # <actions> tag that is not defined to be platform-specific. if not binary_installed: - log.debug( 'Platform-specific recipe failed or not found. Proceeding with platform-independent install recipe.' ) + print 'Binary installation did not occur, so proceeding with install and compile recipe.' + # Make sure to reset for installation if attempt at binary installation resulted in an error. + if tool_dependency.status != app.model.ToolDependency.installation_status.NEW: + removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency ) install_via_fabric( app, tool_dependency, install_dir, package_name=package_name, actions_elem=actions_elem, action_elem=None ) - break - # Perform any final actions that have been defined within the actions_group tagset, but outside of + # Perform any final actions that have been defined within the actions_group tag set, but outside of # an <actions> tag, such as a set_environment entry, or a download_file or download_by_url command to # retrieve extra data for this tool dependency. Only do this if the tool dependency is not in an error # state, otherwise skip this action. @@ -490,7 +415,7 @@ action_elem=actions_elem ) else: # <actions> tags outside of an <actions_group> tag shall not check os or architecture, and if the attributes are - # defined, they will be ignored. All <actions> tags outside of an <actions_group> tagset shall always be processed. + # defined, they will be ignored. All <actions> tags outside of an <actions_group> tag set shall always be processed. # This is the default and original behavior of the install_package method. install_via_fabric( app, tool_dependency, @@ -519,12 +444,12 @@ def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ): """Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric.""" - sa_session = app.model.context.current def evaluate_template( text ): """ Substitute variables defined in XML blocks from dependencies file.""" return Template( text ).safe_substitute( td_common_util.get_env_var_values( install_dir ) ) + sa_session = app.model.context.current if not os.path.exists( install_dir ): os.makedirs( install_dir ) actions_dict = dict( install_dir=install_dir ) @@ -535,7 +460,7 @@ env_var_dicts = [] if actions_elem is not None: elems = actions_elem - if elems.get( 'architecture' ) is not None: + if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None: is_binary_download = True else: is_binary_download = False @@ -750,7 +675,6 @@ new_value = new_value.split( ';' )[ 0 ] return new_value - def populate_actions_dict( app, dependent_install_dir, required_install_dir, tool_shed_repository, required_repository, package_name, package_version, tool_dependencies_config ): """ Populate an actions dictionary that can be sent to fabric_util.install_and_build_package. This method handles the scenario where a tool_dependencies.xml diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py @@ -239,6 +239,105 @@ os.makedirs( destination_directory ) shutil.move( source_file, destination_directory ) +def parse_package_elem( package_elem, platform_info_dict=None, include_after_install_actions=True ): + """ + Parse a <package> element within a tool dependency definition and return a list of action tuples. This method is called when setting + metadata on a repository that includes a tool_dependencies.xml file or when installing a repository that includes a tool_dependencies.xml + file. If installing, platform_info_dict must be a valid dictionary and include_after_install_actions must be True. + """ + # The actions_elem_tuples list contains <actions> tag sets (possibly inside of an <actions_group> tag set) to be processed in the order + # they are defined in the tool_dependencies.xml file. + actions_elem_tuples = [] + # The tag sets that will go into the actions_elem_list are those that install a compiled binary if the architecture and operating system + # match it's defined attributes. If compiled binary is not installed, the first <actions> tag set [following those that have the os and + # architecture attributes] that does not have os or architecture attributes will be processed. This tag set must contain the recipe for + # downloading and compiling source. + actions_elem_list = [] + for elem in package_elem: + if elem.tag == 'actions': + # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system. + in_actions_group = False + actions_elem_tuples.append( ( in_actions_group, elem ) ) + elif elem.tag == 'actions_group': + # We have an actions_group element, and its child <actions> elements should therefore be compared with the current operating system + # and processor architecture. + in_actions_group = True + # Record the number of <actions> elements so we can filter out any <action> elements that precede <actions> elements. + actions_elem_count = len( elem.findall( 'actions' ) ) + # Record the number of <actions> elements that have architecture and os specified, in order to filter out any platform-independent + # <actions> elements that come before platform-specific <actions> elements. This call to elem.findall is filtered by tags that have + # both the os and architecture specified. For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section + # 19.7.2.1. + platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) ) + platform_actions_elements_processed = 0 + actions_elems_processed = 0 + # The tag sets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These will be processed + # only if they are at the very end of the <actions_group> tag set (after all <actions> tag sets). See below for details. + after_install_actions = [] + # Inspect the <actions_group> element and build the actions_elem_list and the after_install_actions list. + for child_element in elem: + if child_element.tag == 'actions': + actions_elems_processed += 1 + system = child_element.get( 'os' ) + architecture = child_element.get( 'architecture' ) + # Skip <actions> tags that have only one of architecture or os specified, in order for the count in + # platform_actions_elements_processed to remain accurate. + if ( system and not architecture ) or ( architecture and not system ): + log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' ) + continue + # Since we are inside an <actions_group> tag set, compare it with our current platform information and filter the <actions> + # tag sets that don't match. Require both the os and architecture attributes to be defined in order to find a match. + if system and architecture: + platform_actions_elements_processed += 1 + # If either the os or architecture do not match the platform, this <actions> tag will not be considered a match. Skip + # it and proceed with checking the next one. + if platform_info_dict: + if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture: + continue + else: + # We must not be installing a repository into Galaxy, so determining if we can install a binary is not necessary. + continue + else: + # <actions> tags without both os and architecture attributes are only allowed to be specified after platform-specific + # <actions> tags. If we find a platform-independent <actions> tag before all platform-specific <actions> tags have been + # processed. + if platform_actions_elements_processed < platform_actions_element_count: + message = 'Error: <actions> tags without os and architecture attributes are only allowed after all <actions> tags with ' + message += 'os and architecture attributes have been defined. Skipping the <actions> tag set with no os or architecture ' + message += 'attributes that has been defined between two <actions> tag sets that have these attributes defined. ' + log.debug( message ) + continue + # If we reach this point, it means one of two things: 1) The system and architecture attributes are not defined in this + # <actions> tag, or 2) The system and architecture attributes are defined, and they are an exact match for the current + # platform. Append the child element to the list of elements to process. + actions_elem_list.append( child_element ) + elif child_element.tag == 'action': + # Any <action> tags within an <actions_group> tag set must come after all <actions> tags. + if actions_elems_processed == actions_elem_count: + # If all <actions> elements have been processed, then this <action> element can be appended to the list of actions to + # execute within this group. + after_install_actions.append( child_element ) + else: + # If any <actions> elements remain to be processed, then log a message stating that <action> elements are not allowed + # to precede any <actions> elements within an <actions_group> tag set. + message = 'Error: <action> tags are only allowed at the end of an <actions_group> tag set after all <actions> tags. ' + message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) ) + log.debug( message ) + continue + if platform_info_dict is None and not include_after_install_actions: + # We must be setting metadata on a repository. + actions_elem_tuples.append( ( in_actions_group, actions_elem_list[ 0 ] ) ) + elif platform_info_dict is not None and include_after_install_actions: + # We must be installing a repository. + if after_install_actions: + actions_elem_list.extend( after_install_actions ) + actions_elem_tuples.append( ( in_actions_group, actions_elem_list ) ) + else: + # Skip any element that is not <actions> or <actions_group> - this will skip comments, <repository> tags and <readme> tags. + in_actions_group = False + continue + return actions_elem_tuples + def tar_extraction_directory( file_path, file_name ): """Try to return the correct extraction directory.""" file_name = file_name.strip() diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/commit_util.py --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -133,6 +133,24 @@ bzipped_file.close() shutil.move( uncompressed, uploaded_file_name ) +def handle_complex_repository_dependency_elem( trans, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ): + """ + Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag that defines a complex repository + dependency. + """ + # The received sub_elem looks something like the following: + # <repository name="package_eigen_2_0" owner="test" prior_installation_required="True" /> + revised, repository_elem, error_message = handle_repository_dependency_elem( trans, sub_elem, unpopulate=unpopulate ) + if error_message: + exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message + raise Exception( exception_message ) + if revised: + elem[ sub_elem_index ] = repository_elem + sub_elem_altered = True + if not altered: + altered = True + return altered, sub_elem_altered, elem + def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ): repo_dir = repository.repo_path( trans.app ) @@ -300,7 +318,30 @@ error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) ) return revised, elem, error_message +def handle_set_environment_for_install( trans, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ): + # <action type="set_environment_for_install"> + # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0"> + # <package name="eigen" version="2.0.17" /> + # </repository> + # </action> + for repo_index, repo_elem in enumerate( action_elem ): + revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) + if error_message: + exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message + raise Exception( exception_message ) + if revised: + action_elem[ repo_index ] = repository_elem + package_altered = True + if not altered: + altered = True + if package_altered: + actions_elem[ action_index ] = action_elem + return package_altered, altered, actions_elem + def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ): + """ + Populate or unpopulate the tooshed and changeset_revision attributes of each <repository> tag defined within a tool_dependencies.xml file. + """ altered = False # Make sure we're looking at a valid tool_dependencies.xml file. tree, error_message = xml_util.parse_xml( tool_dependencies_config ) @@ -308,51 +349,88 @@ return False, None root = tree.getroot() if root.tag == 'tool_dependency': + package_altered = False for root_index, root_elem in enumerate( root ): # <package name="eigen" version="2.0.17"> + package_altered = False if root_elem.tag == 'package': - package_altered = False for package_index, package_elem in enumerate( root_elem ): if package_elem.tag == 'repository': - # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" /> - revised, repository_elem, error_message = handle_repository_dependency_elem( trans, package_elem, unpopulate=unpopulate ) - if error_message: - exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message - raise Exception( exception_message ) - if revised: - root_elem[ package_index ] = repository_elem - package_altered = True - if not altered: - altered = True + # We have a complex repository dependency. + altered, package_altered, root_elem = handle_complex_repository_dependency_elem( trans, + root_elem, + package_index, + package_elem, + package_altered, + altered, + unpopulate=unpopulate ) elif package_elem.tag == 'install': # <install version="1.0"> for actions_index, actions_elem in enumerate( package_elem ): - for action_index, action_elem in enumerate( actions_elem ): - action_type = action_elem.get( 'type' ) - if action_type == 'set_environment_for_install': - # <action type="set_environment_for_install"> - # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0"> - # <package name="eigen" version="2.0.17" /> - # </repository> - # </action> - for repo_index, repo_elem in enumerate( action_elem ): - revised, repository_elem, error_message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate ) - if error_message: - exception_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message - raise Exception( exception_message ) - if revised: - action_elem[ repo_index ] = repository_elem - package_altered = True - if not altered: - altered = True - if package_altered: - actions_elem[ action_index ] = action_elem + if actions_elem.tag == 'actions_group': + # Inspect all entries in the <actions_group> tag set, skipping <actions> tag sets that define os and architecture + # attributes. We want to inspect only the last <actions> tag set contained within the <actions_group> tag set to + # see if a complex repository dependency is defined. + for actions_group_index, actions_group_elem in enumerate( actions_elem ): + if actions_group_elem.tag == 'actions': + # Skip all actions tags that include os or architecture attributes. + system = actions_group_elem.get( 'os' ) + architecture = actions_group_elem.get( 'architecture' ) + if system or architecture: + continue + # ... + # <actions> + # <package name="libgtextutils" version="0.6"> + # <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" /> + # </package> + # ... + for last_actions_index, last_actions_elem in enumerate( actions_group_elem ): + last_actions_package_altered = False + if last_actions_elem.tag == 'package': + for last_actions_elem_package_index, last_actions_elem_package_elem in enumerate( last_actions_elem ): + if last_actions_elem_package_elem.tag == 'repository': + # We have a complex repository dependency. + altered, last_actions_package_altered, last_actions_elem = \ + handle_complex_repository_dependency_elem( trans, + last_actions_elem, + last_actions_elem_package_index, + last_actions_elem_package_elem, + last_actions_package_altered, + altered, + unpopulate=unpopulate ) + if last_actions_package_altered: + last_actions_elem[ last_actions_elem_package_index ] = last_actions_elem_package_elem + actions_group_elem[ last_actions_index ] = last_actions_elem + else: + last_actions_elem_action_type = last_actions_elem.get( 'type' ) + if last_actions_elem_action_type == 'set_environment_for_install': + last_actions_package_altered, altered, last_actions_elem = \ + handle_set_environment_for_install( trans, + last_actions_package_altered, + altered, + actions_group_elem, + last_actions_index, + last_actions_elem, + unpopulate=unpopulate ) + elif actions_elem.tag == 'actions': + # We are not in an <actions_group> tag set, so we must be in an <actions> tag set. + for action_index, action_elem in enumerate( actions_elem ): + + action_type = action_elem.get( 'type' ) + if action_type == 'set_environment_for_install': + package_altered, altered, actions_elem = handle_set_environment_for_install( trans, + package_altered, + altered, + actions_elem, + action_index, + action_elem, + unpopulate=unpopulate ) if package_altered: package_elem[ actions_index ] = actions_elem - if package_altered: - root_elem[ package_index ] = package_elem - if package_altered: - root[ root_index ] = root_elem + if package_altered: + root_elem[ package_index ] = package_elem + if package_altered: + root[ root_index ] = root_elem return altered, root return False, None diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -242,9 +242,8 @@ def get_installed_and_missing_tool_dependencies_for_new_install( trans, all_tool_dependencies ): """Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy.""" - # FIXME: this method currently populates and returns only missing tool dependencies since tool dependencies defined for complex repository dependency - # relationships is not currently supported. This method should be enhanced to search for installed tool dependencies defined as complex repository - # dependency relationships when that feature is implemented. + # FIXME: confirm that this method currently populates and returns only missing tool dependencies. If so, this method should be enhanced to search for + # installed tool dependencies defined as complex repository dependency relationships. if all_tool_dependencies: tool_dependencies = {} missing_tool_dependencies = {} diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -751,6 +751,32 @@ # dependency definition will be set as invalid. This is currently the only case where a tool dependency definition is # considered invalid. repository_dependency_tup, repository_dependency_is_valid, error_message = handle_repository_elem( app=app, repository_elem=sub_elem ) + elif sub_elem.tag == 'install': + package_install_version = sub_elem.get( 'version', '1.0' ) + if package_install_version == '1.0': + # Complex repository dependencies can be defined within the last <actions> tag set contained in an <actions_group> tag set. + # Comments, <repository> tag sets and <readme> tag sets will be skipped in td_common_util.parse_package_elem(). + actions_elem_tuples = td_common_util.parse_package_elem( sub_elem, platform_info_dict=None, include_after_install_actions=False ) + if actions_elem_tuples: + # We now have a list of a single tuple that looks something like: [(True, <Element 'actions' at 0x104017850>)] + actions_elem_tuple = actions_elem_tuples[ 0 ] + in_actions_group, actions_elem = actions_elem_tuple + if in_actions_group: + # Since we're inside an <actions_group> tag set, inspect the actions_elem to see if a complex repository dependency + # is defined. + for action_elem in actions_elem: + if action_elem.tag == 'package': + # <package name="libgtextutils" version="0.6"> + # <repository name="package_libgtextutils_0_6" owner="test" prior_installation_required="True" /> + # </package> + ae_package_name = action_elem.get( 'name', None ) + ae_package_version = action_elem.get( 'version', None ) + if ae_package_name and ae_package_version: + for sub_action_elem in action_elem: + if sub_action_elem.tag == 'repository': + # We have a complex repository dependency. + repository_dependency_tup, repository_dependency_is_valid, error_message = \ + handle_repository_elem( app=app, repository_elem=sub_action_elem ) if requirements_dict: dependency_key = '%s/%s' % ( package_name, package_version ) if repository_dependency_is_valid: diff -r 53f51406718dbe21dc1deec10749e9dadd0ee77c -r f4a839856cfb06c01845cdd96f2893f4b70250bb lib/tool_shed/util/tool_dependency_util.py --- a/lib/tool_shed/util/tool_dependency_util.py +++ b/lib/tool_shed/util/tool_dependency_util.py @@ -365,14 +365,15 @@ missing_tool_dependencies = repository_missing_tool_dependencies return installed_tool_dependencies, missing_tool_dependencies -def remove_tool_dependency( trans, tool_dependency ): - dependency_install_dir = tool_dependency.installation_directory( trans.app ) +def remove_tool_dependency( app, tool_dependency ): + sa_session = app.model.context.current + dependency_install_dir = tool_dependency.installation_directory( app ) removed, error_message = remove_tool_dependency_installation_directory( dependency_install_dir ) if removed: - tool_dependency.status = trans.model.ToolDependency.installation_status.UNINSTALLED + tool_dependency.status = app.model.ToolDependency.installation_status.UNINSTALLED tool_dependency.error_message = None - trans.sa_session.add( tool_dependency ) - trans.sa_session.flush() + sa_session.add( tool_dependency ) + sa_session.flush() return removed, error_message def remove_tool_dependency_installation_directory( dependency_install_dir ): Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org