2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4a4a84d35bc5/ Changeset: 4a4a84d35bc5 User: Dave Bouvier Date: 2013-09-03 18:20:11 Summary: Update functional tests to reflect changes in how text is sanitized. Affected #: 2 files
diff -r 6e291fa55e10aa235196ac1c9f4c4dcb0afeaf6d -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 test/tool_shed/functional/test_0000_basic_repository_features.py --- a/test/tool_shed/functional/test_0000_basic_repository_features.py +++ b/test/tool_shed/functional/test_0000_basic_repository_features.py @@ -151,7 +151,7 @@ commit_message="Uploaded filtering.txt", strings_displayed=[], strings_not_displayed=[] ) - self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] ) + self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0055_upload_filtering_test_data( self ): '''Upload filtering test data.''' @@ -219,19 +219,19 @@ commit_message="Uploaded readme.txt", strings_displayed=[], strings_not_displayed=[] ) - self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] ) + self.display_manage_repository_page( repository, strings_displayed=[ 'This is a readme file.' ] ) # Verify that there is a different readme file for each metadata revision. metadata_revisions = self.get_repository_metadata_revisions( repository ) self.display_manage_repository_page( repository, - strings_displayed=[ 'Readme file for filtering 1.1.0', - 'This is a readme file.' ] ) + strings_displayed=[ 'Readme file for filtering 1.1.0', + 'This is a readme file.' ] )
def test_0075_delete_readme_txt_file( self ): '''Delete the readme.txt file.''' repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name ) self.delete_files_from_repository( repository, filenames=[ 'readme.txt' ] ) self.check_count_of_metadata_revisions_associated_with_repository( repository, metadata_count=2 ) - self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] ) + self.display_manage_repository_page( repository, strings_displayed=[ 'Readme file for filtering 1.1.0' ] )
def test_0080_search_for_valid_filter_tool( self ): '''Search for the filtering tool by tool ID, name, and version.''' @@ -335,7 +335,7 @@ commit_message="Uploaded new readme.txt with invalid ascii characters.", strings_displayed=[], strings_not_displayed=[] ) - self.display_manage_repository_page( repository, strings_displayed=[ 'These characters should not' ] ) + self.display_manage_repository_page( repository, strings_displayed=[ 'These characters should not' ] )
def test_0130_verify_handling_of_invalid_characters( self ): '''Load the above changeset in the change log and confirm that there is no server error displayed.''' @@ -353,5 +353,5 @@ # Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured # unicode decoding error message. strings_displayed = [ 'Changeset %d:%s' % ( revision_number, revision_hash ), 'filtering_0000', 'user1', 'repos', 'added:', - '+These characters should not' ] + '+These characters should not' ] self.load_changeset_in_tool_shed( repository_id, changeset_revision, strings_displayed=strings_displayed ) \ No newline at end of file
diff -r 6e291fa55e10aa235196ac1c9f4c4dcb0afeaf6d -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 test/tool_shed/functional/test_0410_repository_component_review_access_control.py --- a/test/tool_shed/functional/test_0410_repository_component_review_access_control.py +++ b/test/tool_shed/functional/test_0410_repository_component_review_access_control.py @@ -186,10 +186,10 @@ self.login( email=common.test_user_3_email, username=common.test_user_3_name ) repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name ) user = test_db_util.get_user( common.test_user_2_email ) - strings_displayed = [ 'A good set of functional tests.', - 'Clear and concise readme file', - 'a true pleasure to read.', - 'Excellent tool, easy to use.' ] + strings_displayed = [ 'A good set of functional tests.', + 'Clear and concise readme file', + 'a true pleasure to read.', + 'Excellent tool, easy to use.' ] changeset_revision = self.get_repository_tip( repository ) review = test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision ) self.browse_component_review( review, strings_displayed=strings_displayed )
https://bitbucket.org/galaxy/galaxy-central/commits/b9da077a79c3/ Changeset: b9da077a79c3 User: Dave Bouvier Date: 2013-09-03 18:22:29 Summary: Tool dependency definitions: Add <actions_group> tag and support for filtering contained <actions> tags by architecture and operating system. Affected #: 4 files
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py @@ -217,13 +217,25 @@ # Eliminate the download_by_url action so remaining actions can be processed correctly. filtered_actions = actions[ 1: ] url = action_dict[ 'url' ] + is_binary = action_dict.get( 'is_binary', False ) + log.debug( 'Attempting to download via url: %s', url ) if 'target_filename' in action_dict: - # Sometimes compressed archives extracts their content to a folder other than the default defined file name. Using this + # Sometimes compressed archives extract their content to a folder other than the default defined file name. Using this # attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction. downloaded_filename = action_dict[ 'target_filename' ] else: downloaded_filename = os.path.split( url )[ -1 ] dir = td_common_util.url_download( work_dir, downloaded_filename, url, extract=True ) + if is_binary: + log_file = os.path.join( install_dir, INSTALLATION_LOG ) + log.debug( 'log_file: %s' % log_file ) + if os.path.exists( log_file ): + logfile = open( log_file, 'ab' ) + else: + logfile = open( log_file, 'wb' ) + logfile.write( 'Successfully downloaded from url: %s\n' % action_dict[ 'url' ] ) + logfile.close() + log.debug( 'Successfully downloaded from url: %s' % action_dict[ 'url' ] ) elif action_type == 'shell_command': # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action> # Eliminate the shell_command clone action so remaining actions can be processed correctly.
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -333,13 +333,174 @@ type='package', status=app.model.ToolDependency.installation_status.INSTALLING, set_status=True ) + # Get the information that defines the current platform. + platform_info_dict = tool_dependency_util.get_platform_info_dict() if package_install_version == '1.0': - # Handle tool dependency installation using a fabric method included in the Galaxy framework. - for actions_elem in package_elem: - install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=package_name ) - sa_session.refresh( tool_dependency ) - if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: - print package_name, 'version', package_version, 'installed in', install_dir + # Handle tool dependency installation using a fabric method included in the Galaxy framework. The first thing we do + # is check the installation architecture to see if we have a precompiled binary that works on the target system. + binary_installed = False + actions_elem_tuples = [] + # Build a list of grouped and ungrouped <actions> tagsets to be processed in the order they are defined in the + # tool_dependencies.xml file. + for elem in package_elem: + # Default to not treating actions as grouped. + grouped = False + # Skip any element that is not <actions> or <actions_group>. This will also skip comments and <readme> tags. + if elem.tag not in [ 'actions', 'actions_group' ]: + continue + if elem.tag == 'actions': + # We have an <actions> tag that should not be matched against a specific combination of architecture and operating system. + grouped = False + actions_elem_tuples.append( ( grouped, elem ) ) + else: + # Record the number of <actions> elements, in order to filter out any <action> elements that precede <actions> + # elements. + actions_elem_count = len( elem.findall( 'actions' ) ) + # Record the number of <actions> elements that have architecture and os specified, in order to filter out any + # platform-independent <actions> elements that come before platform-specific <actions> elements. This call to + # elem.findall is filtered by tags that have both the os and architecture specified. + # For more details, see http://docs.python.org/2/library/xml.etree.elementtree.html Section 19.7.2.1. + platform_actions_element_count = len( elem.findall( 'actions[@architecture][@os]' ) ) + platform_actions_elements_processed = 0 + actions_elems_processed = 0 + # We have an actions_group element, and its child <actions> elements should therefore be compared with the current + # operating system and processor architecture. + grouped = True + # The tagsets that will go into the actions_elem_list are those that install a precompiled binary if the + # architecture and operating system match its defined attributes. If precompiled binary is not installed + # the first <actions> tag following those that have the os and architecture attributes will be processed + # in order to install and compile the source. + actions_elem_list = [] + # The tagsets that will go into the after_install_actions list are <action> tags instead of <actions> tags. These + # will only be processed if they are at the end of the <actions_group> tagset. See below for details. + after_install_actions = [] + platform_independent_actions = [] + # Loop through the <actions_group> element and build the actions_elem_list and the after_install_actions list. + for child_element in elem: + if child_element.tag == 'actions': + actions_elems_processed += 1 + system = child_element.get( 'os' ) + architecture = child_element.get( 'architecture' ) + # Skip <actions> tags that have only one of architecture or os specified, in order for the count in + # platform_actions_elements_processed to remain accurate. + if ( system and not architecture ) or ( architecture and not system ): + log.debug( 'Error: Both architecture and os attributes must be specified in an <actions> tag.' ) + continue + # Since we are inside an <actions_group> tagset, compare it with our current platform information and filter + # the <actions> tagsets that don't match. Require both the os and architecture attributes to be defined in + # order to find a match. + if system and architecture: + platform_actions_elements_processed += 1 + # If either the os or architecture do not match the platform, this <actions> tag will not be considered + # a match. Skip it and proceed with checking the next one. + if platform_info_dict[ 'os' ] != system or platform_info_dict[ 'architecture' ] != architecture: + continue + else: + # <actions> tags without both os and architecture attributes are only allowed to be specified after + # platform-specific <actions> tags. If we find a platform-independent <actions> tag before all + # platform-specific <actions> tags have been processed, log a message stating this and skip to the + # next <actions> tag. + if platform_actions_elements_processed < platform_actions_element_count: + message = 'Error: <actions> tags without os and architecture attributes are only allowed ' + message += 'after <actions> tags with os and architecture attributes specified. Skipping ' + message += 'current <actions> tag.' + log.debug( message ) + continue + # If we reach this point, it means one of two things: 1) The system and architecture attributes are not + # defined in this <actions> tag, or 2) The system and architecture attributes are defined, and they are + # an exact match for the current platform. Append the child element to the list of elements to process. + actions_elem_list.append( child_element ) + elif child_element.tag == 'action': + # Any <action> tags within an <actions_group> tagset must come after all <actions> tags. + if actions_elems_processed == actions_elem_count: + # If all <actions> elements have been processed, then this <action> element can be appended to the + # list of actions to execute within this group. + after_install_actions.append( child_element ) + else: + # If any <actions> elements remain to be processed, then log a message stating that <action> + # elements are not allowed to precede any <actions> elements within an <actions_group> tagset. + message = 'Error: <action> tags are only allowed at the end of an <actions_group> ' + message += 'tagset, after all <actions> tags. ' + message += 'Skipping <%s> element with type %s.' % ( child_element.tag, child_element.get( 'type' ) ) + log.debug( message ) + continue + if after_install_actions: + actions_elem_list.extend( after_install_actions ) + actions_elem_tuples.append( ( grouped, actions_elem_list ) ) + # At this point we have a list of <actions> elems that are either defined within an <actions_group> tagset, and filtered by + # the current platform, or not defined within an <actions_group> tagset, and not filtered. + for grouped, actions_elems in actions_elem_tuples: + if grouped: + # Platform matching is only performed inside <actions_group> tagsets, os and architecture attributes are otherwise ignored. + for actions_elem in actions_elems: + system = actions_elem.get( 'os' ) + architecture = actions_elem.get( 'architecture' ) + # If this <actions> element has the os and architecture attributes defined, then we only want to process + # until a successful installation is achieved. + if system and architecture: + # If an <actions> tag has been defined that matches our current platform, and the recipe specified + # within that <actions> tag has been successfully processed, skip any remaining platform-specific + # <actions> tags. + if binary_installed: + continue + # No platform-specific <actions> recipe has yet resulted in a successful installation. + install_via_fabric( app, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elem, + action_elem=None ) + sa_session.refresh( tool_dependency ) + if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: + # If an <actions> tag was found that matches the current platform, and the install_via_fabric method + # did not result in an error state, set binary_installed to True in order to skip any remaining + # platform-specific <actions> tags. + if not binary_installed: + binary_installed = True + else: + # Otherwise, move on to the next matching <actions> tag, or any defined <actions> tags that do not + # contain platform-dependent recipes. + if binary_installed: + binary_installed = False + print 'Encountered an error downloading binary for %s version %s: %s' % \ + ( package_name, package_version, tool_dependency.error_message ) + else: + # If no <actions> tags have been defined that match our current platform, or none of the matching + # <actions> tags resulted in a successful tool dependency status, proceed with one and only one + # <actions> tag that is not defined to be platform-specific. + if not binary_installed: + log.debug( 'Platform-specific recipe failed or not found. Proceeding with platform-independent install recipe.' ) + install_via_fabric( app, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elem, + action_elem=None ) + break + # Perform any final actions that have been defined within the actions_group tagset, but outside of + # an <actions> tag, such as a set_environment entry, or a download_file or download_by_url command to + # retrieve extra data for this tool dependency. Only do this if the tool dependency is not in an error + # state, otherwise skip this action. + if actions_elem.tag == 'action' and tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: + install_via_fabric( app, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=None, + action_elem=actions_elem ) + else: + # <actions> tags outside of an <actions_group> tag shall not check os or architecture, and if the attributes are + # defined, they will be ignored. All <actions> tags outside of an <actions_group> tagset shall always be processed. + # This is the default and original behavior of the install_package method. + install_via_fabric( app, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elems, + action_elem=None ) + sa_session.refresh( tool_dependency ) + if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: + print package_name, 'version', package_version, 'installed in', install_dir else: raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' ) elif package_elem.tag == 'readme': @@ -356,7 +517,7 @@ # print 'Installing tool dependencies via fabric script ', proprietary_fabfile_path return tool_dependency
-def install_via_fabric( app, tool_dependency, actions_elem, install_dir, package_name=None, proprietary_fabfile_path=None, **kwd ): +def install_via_fabric( app, tool_dependency, install_dir, package_name=None, proprietary_fabfile_path=None, actions_elem=None, action_elem=None, **kwd ): """Parse a tool_dependency.xml file's <actions> tag set to gather information for the installation via fabric.""" sa_session = app.model.context.current
@@ -372,8 +533,21 @@ actions = [] all_env_shell_file_paths = [] env_var_dicts = [] - # Make sure to skip all comments, since they are now included in the XML tree. - for action_elem in actions_elem.findall( 'action' ): + if actions_elem is not None: + elems = actions_elem + if elems.get( 'architecture' ) is not None: + is_binary_download = True + else: + is_binary_download = False + elif action_elem: + # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded. + elems = [ action_elem ] + else: + elems = [] + for action_elem in elems: + # Make sure to skip all comments, since they are now included in the XML tree. + if action_elem.tag != 'action': + continue action_dict = {} action_type = action_elem.get( 'type', 'shell_command' ) if action_type == 'download_binary': @@ -420,6 +594,8 @@ raise Exception( "Unsupported template language '%s' in tool dependency definition." % str( language ) ) elif action_type == 'download_by_url': # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1....</action> + if is_binary_download: + action_dict[ 'is_binary' ] = True if action_elem.text: action_dict[ 'url' ] = action_elem.text target_filename = action_elem.get( 'target_filename', None )
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -370,7 +370,7 @@ tool_dependency = install_package( app, elem, tool_shed_repository, tool_dependencies=tool_dependencies ) except Exception, e: error_message = "Error installing tool dependency %s version %s: %s" % ( str( package_name ), str( package_version ), str( e ) ) - log.debug( error_message ) + log.exception( error_message ) if tool_dependency: tool_dependency.status = app.model.ToolDependency.installation_status.ERROR tool_dependency.error_message = error_message
diff -r 4a4a84d35bc56b45152b6efbed7496de68424fe8 -r b9da077a79c3ce0323bd8f8e5aed3e311d1fd8ae lib/tool_shed/util/tool_dependency_util.py --- a/lib/tool_shed/util/tool_dependency_util.py +++ b/lib/tool_shed/util/tool_dependency_util.py @@ -160,10 +160,10 @@
def get_download_url_for_platform( url_templates, platform_info_dict ): ''' - Compare the dict returned by get_platform_info() with the values specified in the base_url element. Return + Compare the dict returned by get_platform_info() with the values specified in the url_template element. Return true if and only if all defined attributes match the corresponding dict entries. If an entry is not - defined in the base_url element, it is assumed to be irrelevant at this stage. For example, - <base_url os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.$%7Barchitecture%7D/faToTwoB...</base_url> + defined in the url_template element, it is assumed to be irrelevant at this stage. For example, + <url_template os="darwin">http://hgdownload.cse.ucsc.edu/admin/exe/macOSX.$%7Barchitecture%7D/faToTwoB...</url_template> where the OS must be 'darwin', but the architecture is filled in later using string.Template. ''' os_ok = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
galaxy-commits@lists.galaxyproject.org