1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/6fecbad49afd/ Changeset: 6fecbad49afd User: greg Date: 2014-05-14 20:55:31 Summary: Phase 2 of the tool dependency package installation framework rewrite: 1) rename the RecipeManager class to be the StepManager class and add a new TagManager class, both of which are contained in the recipe_manager.py module. 2) Add appropriate new classes for handling recipe tag sets to a new tag_handler.py module. Add a new InstallManager class with functions for installing tool dependencies. Eliminate the use of fabric_util.py and install_util.py. Affected #: 18 files diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -23,8 +23,11 @@ from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util from tool_shed.util import workflow_util +from tool_shed.util import xml_util from tool_shed.galaxy_install import repository_util import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids +from tool_shed.galaxy_install.install_manager import InstallManager +from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager import pkg_resources eggs.require( 'mercurial' ) @@ -444,20 +447,74 @@ @web.expose @web.require_admin def initiate_tool_dependency_installation( self, trans, tool_dependencies, **kwd ): - """Install specified dependencies for repository tools.""" + """ + Install specified dependencies for repository tools. The received list of tool_dependencies + are the database records for those dependencies defined in the tool_dependencies.xml file + (contained in the repository) that should be installed. This allows for filtering out dependencies + that have not been checked for installation on the 'Manage tool dependencies' page for an installed + tool shed repository. + """ # Get the tool_shed_repository from one of the tool_dependencies. message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) err_msg = '' + attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ] tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME, tool_shed_repository.repo_path( trans.app ) ) - installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_dependencies, - from_tool_migration_manager=False ) + # Parse the tool_dependencies.xml config. + tree, error_message = xml_util.parse_xml( tool_dependencies_config ) + installed_tool_dependencies = [] + install_manager = InstallManager() + tag_manager = TagManager() + root = tree.getroot() + for elem in root: + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + # elem is a package tag set. + attr_tup = ( package_name, package_version, 'package' ) + try: + index = attr_tups_of_dependencies_for_install.index( attr_tup ) + except Exception, e: + index = None + if index is not None: + tool_dependency = tool_dependencies[ index ] + tool_dependency, proceed_with_install, action_elem_tuples = \ + tag_manager.process_tag_set( trans.app, + tool_shed_repository, + tool_dependency, + elem, + package_name, + package_version, + from_tool_migration_manager=False, + tool_dependency_db_records=tool_dependencies ) + if proceed_with_install: + try: + tool_dependency = install_manager.install_package( trans.app, + elem, + tool_shed_repository, + tool_dependencies=tool_dependencies, + from_tool_migration_manager=False ) + except Exception, e: + error_message = "Error installing tool dependency package %s version %s: %s" % \ + ( str( package_name ), str( package_version ), str( e ) ) + log.exception( error_message ) + if tool_dependency: + # Since there was an installation error, update the tool dependency status to Error. The + # remove_installation_path option must be left False here. + tool_dependency = \ + tool_dependency_util.handle_tool_dependency_installation_error( trans.app, + tool_dependency, + error_message, + remove_installation_path=False ) + if tool_dependency and tool_dependency.status in [ trans.app.install_model.ToolDependency.installation_status.INSTALLED, + trans.app.install_model.ToolDependency.installation_status.ERROR ]: + installed_tool_dependencies.append( tool_dependency ) + if trans.app.config.manage_dependency_relationships: + # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager. + trans.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR: text = util.unicodify( installed_tool_dependency.error_message ) diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py --- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py +++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py @@ -185,8 +185,8 @@ operation='install latest revision' ) ), grids.GridOperation( label="Install", condition=( lambda item: \ - not item.deleted and \ - item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ), + not item.deleted and \ + item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ), allow_multiple=False, url_args=dict( controller='admin_toolshed', action='manage_repository', @@ -196,7 +196,7 @@ not item.deleted and \ item.status not in \ [ tool_shed_install.ToolShedRepository.installation_status.ERROR, - tool_shed_install.ToolShedRepository.installation_status.NEW ] ), + tool_shed_install.ToolShedRepository.installation_status.NEW ] ), allow_multiple=False, url_args=dict( controller='admin_toolshed', action='browse_repositories', diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -1,8 +1,318 @@ import logging import os +from galaxy import eggs + +eggs.require( 'paramiko' ) +eggs.require( 'ssh' ) +eggs.require( 'Fabric' ) + +from fabric.api import lcd + +from tool_shed.util import tool_dependency_util + +from tool_shed.galaxy_install.tool_dependencies import td_common_util +from tool_shed.galaxy_install.tool_dependencies.recipe.env_file_builder import EnvFileBuilder +from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment +from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager +from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager + log = logging.getLogger( __name__ ) +INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet', + 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ] + class InstallManager( object ): - pass \ No newline at end of file + + def get_tool_shed_repository_install_dir( self, app, tool_shed_repository ): + return os.path.abspath( tool_shed_repository.repo_files_directory( app ) ) + + def install_and_build_package( self, app, tool_shed_repository, tool_dependency, actions_dict ): + """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command.""" + tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository ) + install_dir = actions_dict[ 'install_dir' ] + package_name = actions_dict[ 'package_name' ] + actions = actions_dict.get( 'actions', None ) + filtered_actions = [] + env_file_builder = EnvFileBuilder( install_dir ) + install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir, + install_dir=install_dir ) + step_manager = StepManager() + if actions: + with install_environment.make_tmp_dir() as work_dir: + with lcd( work_dir ): + # The first action in the list of actions will be the one that defines the initial download process. + # There are currently three supported actions; download_binary, download_by_url and clone via a + # shell_command action type. The recipe steps will be filtered at this stage in the process, with + # the filtered actions being used in the next stage below. The installation directory (i.e., dir) + # is also defined in this stage and is used in the next stage below when defining current_dir. + action_type, action_dict = actions[ 0 ] + if action_type in INSTALL_ACTIONS: + # Some of the parameters passed here are needed only by a subset of the step handler classes, + # but to allow for a standard method signature we'll pass them along. We don't check the + # tool_dependency status in this stage because it should not have been changed based on a + # download. + tool_dependency, filtered_actions, dir = \ + step_manager.execute_step( app=app, + tool_dependency=tool_dependency, + package_name=package_name, + actions=actions, + action_type=action_type, + action_dict=action_dict, + filtered_actions=filtered_actions, + env_file_builder=env_file_builder, + install_environment=install_environment, + work_dir=work_dir, + current_dir=None, + initial_download=True ) + else: + # We're handling a complex repository dependency where we only have a set_environment tag set. + # <action type="set_environment"> + # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable> + # </action> + filtered_actions = [ a for a in actions ] + dir = install_dir + # We're in stage 2 of the installation process. The package has been down-loaded, so we can + # now perform all of the actions defined for building it. + for action_tup in filtered_actions: + current_dir = os.path.abspath( os.path.join( work_dir, dir ) ) + with lcd( current_dir ): + action_type, action_dict = action_tup + tool_dependency, tmp_filtered_actions, tmp_dir = \ + step_manager.execute_step( app=app, + tool_dependency=tool_dependency, + package_name=package_name, + actions=actions, + action_type=action_type, + action_dict=action_dict, + filtered_actions=filtered_actions, + env_file_builder=env_file_builder, + install_environment=install_environment, + work_dir=work_dir, + current_dir=current_dir, + initial_download=False ) + if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]: + # If the tool_dependency status is in an error state, return it with no additional + # processing. + return tool_dependency + # Make sure to handle the special case where the value of dir is reset (this happens when + # the action_type is change_directiory). In all other action types, dir will be returned as + # None. + if tmp_dir is not None: + dir = tmp_dir + return tool_dependency + + def install_and_build_package_via_fabric( self, app, tool_shed_repository, tool_dependency, actions_dict ): + sa_session = app.install_model.context + try: + # There is currently only one fabric method. + tool_dependency = self.install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict ) + except Exception, e: + log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) ) + # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must + # be left False here. + error_message = '%s\n%s' % ( td_common_util.format_traceback(), str( e ) ) + tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app, + tool_dependency, + error_message, + remove_installation_path=False ) + tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency ) + return tool_dependency + + def install_via_fabric( self, app, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None, + actions_elem=None, action_elem=None, **kwd ): + """ + Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using + self.install_and_build_package(). The use of fabric is being eliminated, so some of these functions + may need to be renamed at some point. + """ + sa_session = app.install_model.context + if not os.path.exists( install_dir ): + os.makedirs( install_dir ) + actions_dict = dict( install_dir=install_dir ) + if package_name: + actions_dict[ 'package_name' ] = package_name + actions = [] + is_binary_download = False + if actions_elem is not None: + elems = actions_elem + if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None: + is_binary_download = True + elif action_elem is not None: + # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded. + elems = [ action_elem ] + else: + elems = [] + step_manager = StepManager() + tool_shed_repository_install_dir = self.get_tool_shed_repository_install_dir( app, tool_shed_repository ) + install_environment = InstallEnvironment( tool_shed_repository_install_dir, install_dir ) + for action_elem in elems: + # Make sure to skip all comments, since they are now included in the XML tree. + if action_elem.tag != 'action': + continue + action_dict = {} + action_type = action_elem.get( 'type', None ) + if action_type is not None: + action_dict = step_manager.prepare_step( app=app, + tool_dependency=tool_dependency, + action_type=action_type, + action_elem=action_elem, + action_dict=action_dict, + install_environment=install_environment, + is_binary_download=is_binary_download ) + action_tuple = ( action_type, action_dict ) + if action_type == 'set_environment': + if action_tuple not in actions: + actions.append( action_tuple ) + else: + actions.append( action_tuple ) + if actions: + actions_dict[ 'actions' ] = actions + if custom_fabfile_path is not None: + # TODO: this is not yet supported or functional, but when it is handle it using the fabric api. + raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' ) + else: + tool_dependency = self.install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ) + return tool_dependency + + def install_package( self, app, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ): + """ + Install a tool dependency package defined by the XML element elem. The value of tool_dependencies is + a partial or full list of ToolDependency records associated with the tool_shed_repository. + """ + tag_manager = TagManager() + sa_session = app.install_model.context + # The value of package_name should match the value of the "package" type in the tool config's + # <requirements> tag set, but it's not required. + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if tool_dependencies and package_name and package_version: + tool_dependency = None + for tool_dependency in tool_dependencies: + if package_name == str( tool_dependency.name ) and package_version == str( tool_dependency.version ): + break + if tool_dependency is not None: + for package_elem in elem: + tool_dependency, proceed_with_install, actions_elem_tuples = \ + tag_manager.process_tag_set( app, + tool_shed_repository, + tool_dependency, + package_elem, + package_name, + package_version, + from_tool_migration_manager=from_tool_migration_manager, + tool_dependency_db_records=None ) + if proceed_with_install and actions_elem_tuples: + # Get the installation directory for tool dependencies that will be installed for the received + # tool_shed_repository. + install_dir = \ + tool_dependency_util.get_tool_dependency_install_dir( app=app, + repository_name=tool_shed_repository.name, + repository_owner=tool_shed_repository.owner, + repository_changeset_revision=tool_shed_repository.installed_changeset_revision, + tool_dependency_type='package', + tool_dependency_name=package_name, + tool_dependency_version=package_version ) + # At this point we have a list of <actions> elems that are either defined within an <actions_group> + # tag set with <actions> sub-elements that contains os and architecture attributes filtered by the + # platform into which the appropriate compiled binary will be installed, or not defined within an + # <actions_group> tag set and not filtered. Here is an example actions_elem_tuple. + # [(True, [<Element 'actions' at 0x109293d10>)] + binary_installed = False + for actions_elem_tuple in actions_elem_tuples: + in_actions_group, actions_elems = actions_elem_tuple + if in_actions_group: + # Platform matching is only performed inside <actions_group> tag sets, os and architecture + # attributes are otherwise ignored. + can_install_from_source = False + for actions_elem in actions_elems: + system = actions_elem.get( 'os' ) + architecture = actions_elem.get( 'architecture' ) + # If this <actions> element has the os and architecture attributes defined, then we only + # want to process until a successful installation is achieved. + if system and architecture: + # If an <actions> tag has been defined that matches our current platform, and the + # recipe specified within that <actions> tag has been successfully processed, skip + # any remaining platform-specific <actions> tags. We cannot break out of the loop + # here because there may be <action> tags at the end of the <actions_group> tag set + # that must be processed. + if binary_installed: + continue + # No platform-specific <actions> recipe has yet resulted in a successful installation. + tool_dependency = self.install_via_fabric( app, + tool_shed_repository, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elem, + action_elem=None ) + if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED: + # If an <actions> tag was found that matches the current platform, and + # self.install_via_fabric() did not result in an error state, set binary_installed + # to True in order to skip any remaining platform-specific <actions> tags. + binary_installed = True + else: + # Process the next matching <actions> tag, or any defined <actions> tags that do not + # contain platform dependent recipes. + log.debug( 'Error downloading binary for tool dependency %s version %s: %s' % \ + ( str( package_name ), str( package_version ), str( tool_dependency.error_message ) ) ) + else: + if actions_elem.tag == 'actions': + # We've reached an <actions> tag that defines the recipe for installing and compiling from + # source. If binary installation failed, we proceed with the recipe. + if not binary_installed: + installation_directory = tool_dependency.installation_directory( app ) + if os.path.exists( installation_directory ): + # Delete contents of installation directory if attempt at binary installation failed. + installation_directory_contents = os.listdir( installation_directory ) + if installation_directory_contents: + removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency ) + if removed: + can_install_from_source = True + else: + log.debug( 'Error removing old files from installation directory %s: %s' % \ + ( str( tool_dependency.installation_directory( app ), str( error_message ) ) ) ) + else: + can_install_from_source = True + else: + can_install_from_source = True + if can_install_from_source: + # We now know that binary installation was not successful, so proceed with the <actions> + # tag set that defines the recipe to install and compile from source. + log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % \ + str( tool_dependency.name ) ) + tool_dependency = self.install_via_fabric( app, + tool_shed_repository, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elem, + action_elem=None ) + if actions_elem.tag == 'action' and \ + tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: + # If the tool dependency is not in an error state, perform any final actions that have been + # defined within the actions_group tag set, but outside of an <actions> tag, which defines + # the recipe for installing and compiling from source. + tool_dependency = self.install_via_fabric( app, + tool_shed_repository, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=None, + action_elem=actions_elem ) + else: + # Checks for "os" and "architecture" attributes are not made for any <actions> tag sets outside of + # an <actions_group> tag set. If the attributes are defined, they will be ignored. All <actions> tags + # outside of an <actions_group> tag set will always be processed. + tool_dependency = self.install_via_fabric( app, + tool_shed_repository, + tool_dependency, + install_dir, + package_name=package_name, + actions_elem=actions_elems, + action_elem=None ) + if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: + log.debug( 'Tool dependency %s version %s has been installed in %s.' % \ + ( str( package_name ), str( package_version ), str( install_dir ) ) ) + return tool_dependency diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -20,7 +20,10 @@ from tool_shed.util import metadata_util from tool_shed.util import tool_dependency_util from tool_shed.util import tool_util -from xml.etree import ElementTree as XmlET +from tool_shed.util import xml_util + +from tool_shed.galaxy_install.install_manager import InstallManager +from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager from galaxy import eggs eggs.require( 'mercurial' ) @@ -621,11 +624,58 @@ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir ) - installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app, - tool_shed_repository=tool_shed_repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=tool_shed_repository.tool_dependencies, - from_tool_migration_manager=False ) + # Parse the tool_dependencies.xml config. + tree, error_message = xml_util.parse_xml( tool_dependencies_config ) + install_manager = InstallManager() + tag_manager = TagManager() + root = tree.getroot() + for elem in root: + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + repository_tool_dependencies = util.listify( tool_shed_repository.tool_dependencies ) + # elem is a package tag set. + attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in repository_tool_dependencies ] + attr_tup = ( package_name, package_version, 'package' ) + try: + index = attr_tups_of_dependencies_for_install.index( attr_tup ) + except Exception, e: + index = None + if index is not None: + tool_dependency = repository_tool_dependencies[ index ] + tool_dependency, proceed_with_install, action_elem_tuples = \ + tag_manager.process_tag_set( trans.app, + tool_shed_repository, + tool_dependency, + elem, + package_name, + package_version, + from_tool_migration_manager=False, + tool_dependency_db_records=repository_tool_dependencies ) + if proceed_with_install: + try: + tool_dependency = install_manager.install_package( trans.app, + elem, + tool_shed_repository, + tool_dependencies=repository_tool_dependencies, + from_tool_migration_manager=False ) + except Exception, e: + error_message = "Error installing tool dependency package %s version %s: %s" % \ + ( str( package_name ), str( package_version ), str( e ) ) + log.exception( error_message ) + if tool_dependency: + # Since there was an installation error, update the tool dependency status to Error. The + # remove_installation_path option must be left False here. + tool_dependency = \ + tool_dependency_util.handle_tool_dependency_installation_error( trans.app, + tool_dependency, + error_message, + remove_installation_path=False ) + if tool_dependency and tool_dependency.status in [ trans.app.install_model.ToolDependency.installation_status.INSTALLED, + trans.app.install_model.ToolDependency.installation_status.ERROR ]: + if trans.app.config.manage_dependency_relationships: + # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager. + trans.app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency ) suc.remove_dir( work_dir ) suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, @@ -877,11 +927,61 @@ trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES ) # Get the tool_dependencies.xml file from the repository. tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', repository.repo_path( trans.app ) ) - installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app, - tool_shed_repository=repository, - tool_dependencies_config=tool_dependencies_config, - tool_dependencies=repository.tool_dependencies, - from_tool_migration_manager=False ) + installed_tool_dependencies = [] + # Parse the tool_dependencies.xml config. + tree, error_message = xml_util.parse_xml( tool_dependencies_config ) + if tree is None: + return installed_tool_dependencies + install_manager = InstallManager() + tag_manager = TagManager() + root = tree.getroot() + for elem in root: + package_name = elem.get( 'name', None ) + package_version = elem.get( 'version', None ) + if package_name and package_version: + # elem is a package tag set. + attr_tups_of_dependencies_for_install = [ ( td.name, td.version, td.type ) for td in tool_dependencies ] + attr_tup = ( package_name, package_version, 'package' ) + try: + index = attr_tups_of_dependencies_for_install.index( attr_tup ) + except Exception, e: + index = None + if index is not None: + tool_dependency = tool_dependency_db_records[ index ] + tool_dependency, proceed_with_install, action_elem_tuples = \ + tag_manager.process_tag_set( trans.app, + tool_shed_repository, + tool_dependency, + elem, + package_name, + package_version, + from_tool_migration_manager=False, + tool_dependency_db_records=tool_dependencies ) + if proceed_with_install: + try: + tool_dependency = install_manager.install_package( trans.app, + elem, + tool_shed_repository, + tool_dependencies=tool_dependencies, + from_tool_migration_manager=False ) + except Exception, e: + error_message = "Error installing tool dependency package %s version %s: %s" % \ + ( str( package_name ), str( package_version ), str( e ) ) + log.exception( error_message ) + if tool_dependency: + # Since there was an installation error, update the tool dependency status to Error. The + # remove_installation_path option must be left False here. + tool_dependency = \ + tool_dependency_util.handle_tool_dependency_installation_error( trans.app, + tool_dependency, + error_message, + remove_installation_path=False ) + if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED, + app.install_model.ToolDependency.installation_status.ERROR ]: + installed_tool_dependencies.append( tool_dependency ) + if app.config.manage_dependency_relationships: + # Add the tool_dependency to the in-memory dictionaries in the installed_repository_manager. + app.installed_repository_manager.handle_tool_dependency_install( tool_shed_repository, tool_dependency ) for installed_tool_dependency in installed_tool_dependencies: if installed_tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.ERROR ]: repair_dict = add_repair_dict_entry( repository.name, installed_tool_dependency.error_message ) diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging -import os - -from galaxy import eggs - -eggs.require( 'paramiko' ) -eggs.require( 'ssh' ) -eggs.require( 'Fabric' ) - -from fabric.api import env -from fabric.api import lcd - -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import InstallEnvironment -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager - -log = logging.getLogger( __name__ ) - -INSTALL_ACTIONS = [ 'download_binary', 'download_by_url', 'download_file', 'setup_perl_environmnet', - 'setup_r_environmnet', 'setup_ruby_environmnet', 'shell_command' ] - -def check_fabric_version(): - version = env.version - if int( version.split( "." )[ 0 ] ) < 1: - raise NotImplementedError( "Install Fabric version 1.0 or later." ) - -def get_tool_shed_repository_install_dir( app, tool_shed_repository ): - return os.path.abspath( tool_shed_repository.repo_files_directory( app ) ) - -def install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict ): - """Install a Galaxy tool dependency package either via a url or a mercurial or git clone command.""" - tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository ) - install_dir = actions_dict[ 'install_dir' ] - package_name = actions_dict[ 'package_name' ] - actions = actions_dict.get( 'actions', None ) - filtered_actions = [] - env_file_builder = EnvFileBuilder( install_dir ) - install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir, - install_dir=install_dir ) - recipe_manager = RecipeManager() - if actions: - with install_environment.make_tmp_dir() as work_dir: - with lcd( work_dir ): - # The first action in the list of actions will be the one that defines the initial download process. - # There are currently three supported actions; download_binary, download_by_url and clone via a - # shell_command action type. The recipe steps will be filtered at this stage in the process, with - # the filtered actions being used in the next stage below. The installation directory (i.e., dir) - # is also defined in this stage and is used in the next stage below when defining current_dir. - action_type, action_dict = actions[ 0 ] - if action_type in INSTALL_ACTIONS: - # Some of the parameters passed here are needed only by a subset of the step handler classes, - # but to allow for a standard method signature we'll pass them along. We don't check the - # tool_dependency status in this stage because it should not have been changed based on a - # download. - tool_dependency, filtered_actions, dir = \ - recipe_manager.execute_step( app=app, - tool_dependency=tool_dependency, - package_name=package_name, - actions=actions, - action_type=action_type, - action_dict=action_dict, - filtered_actions=filtered_actions, - env_file_builder=env_file_builder, - install_environment=install_environment, - work_dir=work_dir, - current_dir=None, - initial_download=True ) - else: - # We're handling a complex repository dependency where we only have a set_environment tag set. - # <action type="set_environment"> - # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable> - # </action> - filtered_actions = [ a for a in actions ] - dir = install_dir - # We're in stage 2 of the installation process. The package has been down-loaded, so we can - # now perform all of the actions defined for building it. - for action_tup in filtered_actions: - current_dir = os.path.abspath( os.path.join( work_dir, dir ) ) - with lcd( current_dir ): - action_type, action_dict = action_tup - tool_dependency, tmp_filtered_actions, tmp_dir = \ - recipe_manager.execute_step( app=app, - tool_dependency=tool_dependency, - package_name=package_name, - actions=actions, - action_type=action_type, - action_dict=action_dict, - filtered_actions=filtered_actions, - env_file_builder=env_file_builder, - install_environment=install_environment, - work_dir=work_dir, - current_dir=current_dir, - initial_download=False ) - if tool_dependency.status in [ app.install_model.ToolDependency.installation_status.ERROR ]: - # If the tool_dependency status is in an error state, return it with no additional - # processing. - return tool_dependency - # Make sure to handle the special case where the value of dir is reset (this happens when - # the action_type is change_directiory). In all other action types, dir will be returned as - # None. - if tmp_dir is not None: - dir = tmp_dir - return tool_dependency diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ /dev/null @@ -1,734 +0,0 @@ -import logging -import os -import shutil -import stat -import subprocess -import sys -import tempfile -import fabric_util -import td_common_util -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import EnvFileBuilder -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import InstallEnvironment -from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import RecipeManager -import tool_shed.util.shed_util_common as suc -from tool_shed.util import common_util -from tool_shed.util import encoding_util -from tool_shed.util import tool_dependency_util -from tool_shed.util import xml_util -from tool_shed.galaxy_install.tool_dependencies import td_common_util -from galaxy.model.orm import and_ -from galaxy.util import asbool -from galaxy.util import listify - -log = logging.getLogger( __name__ ) - -def create_temporary_tool_dependencies_config( app, tool_shed_url, name, owner, changeset_revision ): - """Make a call to the tool shed to get the required repository's tool_dependencies.xml file.""" - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) - params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) - url = common_util.url_join( tool_shed_url, - 'repository/get_tool_dependencies_config_contents%s' % params ) - text = common_util.tool_shed_get( app, tool_shed_url, url ) - if text: - # Write the contents to a temporary file on disk so it can be reloaded and parsed. - fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc" ) - tmp_filename = fh.name - fh.close() - fh = open( tmp_filename, 'wb' ) - fh.write( text ) - fh.close() - return tmp_filename - else: - message = "Unable to retrieve required tool_dependencies.xml file from the tool shed for revision " - message += "%s of installed repository %s owned by %s." % ( str( changeset_revision ), str( name ), str( owner ) ) - raise Exception( message ) - return None - -def create_tool_dependency_with_initialized_env_sh_file( app, dependent_install_dir, tool_shed_repository, required_repository, package_name, - package_version, tool_dependencies_config ): - """ - Create or get a tool_dependency record that is defined by the received package_name and package_version. An env.sh file will be - created for the tool_dependency in the received dependent_install_dir. - """ - #The received required_repository refers to a tool_shed_repository record that is defined as a complex repository dependency for this - # tool_dependency. The required_repository may or may not be currently installed (it doesn't matter). If it is installed, it is - # associated with a tool_dependency that has an env.sh file that this new tool_dependency must be able to locate and "source". If it - # is not installed, we can still determine where that env.sh file will be, so we'll initialize this new tool_dependency's env.sh file - # in either case. If the require repository end up with an installation error, this new tool dependency will still be fine because its - # containing repository will be defined as missing dependencies. - tool_dependencies = [] - if not os.path.exists( dependent_install_dir ): - os.makedirs( dependent_install_dir ) - required_tool_dependency_env_file_path = None - if tool_dependencies_config: - required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config ) - if required_td_tree: - required_td_root = required_td_tree.getroot() - for required_td_elem in required_td_root: - # Find the appropriate package name and version. - if required_td_elem.tag == 'package': - # <package name="bwa" version="0.5.9"> - required_td_package_name = required_td_elem.get( 'name', None ) - required_td_package_version = required_td_elem.get( 'version', None ) - # Check the database to see if we have a record for the required tool dependency (we may not which is ok). If we - # find a record, we need to see if it is in an error state and if so handle it appropriately. - required_tool_dependency = \ - tool_dependency_util.get_tool_dependency_by_name_version_type_repository( app, - required_repository, - required_td_package_name, - required_td_package_version, - 'package' ) - if required_td_package_name == package_name and required_td_package_version == package_version: - # Get or create a database tool_dependency record with which the installed package on disk will be associated. - tool_dependency = \ - tool_dependency_util.create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=package_name, - version=package_version, - type='package', - status=app.install_model.ToolDependency.installation_status.NEVER_INSTALLED, - set_status=True ) - # Create an env.sh file for the tool_dependency whose first line will source the env.sh file located in - # the path defined by required_tool_dependency_env_file_path. It doesn't matter if the required env.sh - # file currently exists.. - required_tool_dependency_env_file_path = \ - tool_dependency_util.get_required_repository_package_env_sh_path( app, - package_name, - package_version, - required_repository ) - env_file_builder = EnvFileBuilder( tool_dependency.installation_directory( app ) ) - env_file_builder.append_line( action="source", value=required_tool_dependency_env_file_path ) - return_code = env_file_builder.return_code - if return_code: - error_message = 'Error defining env.sh file for package %s, return_code: %s' % \ - ( str( package_name ), str( return_code ) ) - tool_dependency = \ - tool_dependency_util.handle_tool_dependency_installation_error( app, - tool_dependency, - error_message, - remove_installation_path=False ) - elif required_tool_dependency is not None and required_tool_dependency.in_error_state: - error_message = "This tool dependency's required tool dependency %s version %s has status %s." % \ - ( str( required_tool_dependency.name ), str( required_tool_dependency.version ), str( required_tool_dependency.status ) ) - tool_dependency = \ - tool_dependency_util.handle_tool_dependency_installation_error( app, - tool_dependency, - error_message, - remove_installation_path=False ) - else: - tool_dependency = \ - tool_dependency_util.set_tool_dependency_attributes( app, - tool_dependency=tool_dependency, - status=app.install_model.ToolDependency.installation_status.INSTALLED ) - tool_dependencies.append( tool_dependency ) - return tool_dependencies - -def get_absolute_path_to_file_in_repository( repo_files_dir, file_name ): - """Return the absolute path to a specified disk file contained in a repository.""" - stripped_file_name = strip_path( file_name ) - file_path = None - for root, dirs, files in os.walk( repo_files_dir ): - if root.find( '.hg' ) < 0: - for name in files: - if name == stripped_file_name: - return os.path.abspath( os.path.join( root, name ) ) - return file_path - -def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ): - sa_session = app.install_model.context - # The protocol is not stored, but the port is if it exists. - tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed_url ) - tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \ - .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, - app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() - if tool_shed_repository: - return tool_shed_repository - # The tool_shed_repository must have been updated to a newer changeset revision than the one defined in the repository_dependencies.xml file, - # so call the tool shed to get all appropriate newer changeset revisions. - text = get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ) - if text: - changeset_revisions = listify( text ) - for changeset_revision in changeset_revisions: - tool_shed_repository = sa_session.query( app.install_model.ToolShedRepository ) \ - .filter( and_( app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, - app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() - if tool_shed_repository: - return tool_shed_repository - return None - -def get_updated_changeset_revisions_from_tool_shed( app, tool_shed_url, name, owner, changeset_revision ): - """ - Get all appropriate newer changeset revisions for the repository defined by - the received tool_shed_url / name / owner combination. - """ - tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url ) - params = '?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) - url = common_util.url_join( tool_shed_url, - 'repository/updated_changeset_revisions%s' % params ) - text = common_util.tool_shed_get( app, tool_shed_url, url ) - return text - - -def handle_complex_repository_dependency_for_package( app, elem, package_name, package_version, tool_shed_repository, from_tool_migration_manager=False ): - """ - Inspect the repository defined by a complex repository dependency definition and take certain steps to enable installation - of the received package name and version to proceed. The received elem is the <repository> tag set which defines the complex - repository dependency. The received tool_shed_repository is the installed tool shed repository for which the tool dependency - defined by the received package_name and package_version is being installed. - """ - handled_tool_dependencies = [] - tool_shed = elem.attrib[ 'toolshed' ] - # The protocol is not stored, but the port is if it exists. - tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed ) - required_repository_name = elem.attrib[ 'name' ] - required_repository_owner = elem.attrib[ 'owner' ] - default_required_repository_changeset_revision = elem.attrib[ 'changeset_revision' ] - required_repository = get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, - tool_shed, - required_repository_name, - required_repository_owner, - default_required_repository_changeset_revision ) - tmp_filename = None - if required_repository: - required_repository_changeset_revision = required_repository.installed_changeset_revision - # Define the installation directory for the required tool dependency package in the required repository. - required_repository_package_install_dir = \ - tool_dependency_util.get_tool_dependency_install_dir( app=app, - repository_name=required_repository_name, - repository_owner=required_repository_owner, - repository_changeset_revision=required_repository_changeset_revision, - tool_dependency_type='package', - tool_dependency_name=package_name, - tool_dependency_version=package_version ) - # Define this dependent repository's tool dependency installation directory that will contain the env.sh file with a path to the - # required repository's installed tool dependency package. - dependent_install_dir = \ - tool_dependency_util.get_tool_dependency_install_dir( app=app, - repository_name=tool_shed_repository.name, - repository_owner=tool_shed_repository.owner, - repository_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dependency_type='package', - tool_dependency_name=package_name, - tool_dependency_version=package_version ) - if os.path.exists( dependent_install_dir ): - # The install manager handles tool migration stages and the sync_database_with_file_system() - # method handles two scenarios: (1) where a Galaxy file system environment related to installed - # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy - # database tables associated with these installed items, and (2) the Tool Shed's install and test - # framework which installs repositories in 2 stages, those of type tool_dependency_definition - # followed by those containing valid tools and tool functional test components. Neither of these - # scenarios apply when the install manager is running. - if from_tool_migration_manager: - can_install_tool_dependency = True - else: - # Notice that we'll throw away the following tool_dependency if it can be installed. - tool_dependency, can_install_tool_dependency = \ - tool_dependency_util.sync_database_with_file_system( app, - tool_shed_repository, - package_name, - package_version, - dependent_install_dir, - tool_dependency_type='package' ) - if not can_install_tool_dependency: - log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), " % \ - ( str( tool_dependency.name, str( tool_dependency.version ) ) ) ) - log.debug( "so appending it to the list of handled tool dependencies." ) - handled_tool_dependencies.append( tool_dependency ) - else: - can_install_tool_dependency = True - if can_install_tool_dependency: - # Set this dependent repository's tool dependency env.sh file with a path to the required repository's installed tool dependency package. - # We can get everything we need from the discovered installed required_repository. - if required_repository.is_deactivated_or_installed: - if not os.path.exists( required_repository_package_install_dir ): - print 'Missing required tool dependency directory %s' % str( required_repository_package_install_dir ) - repo_files_dir = required_repository.repo_files_directory( app ) - tool_dependencies_config = get_absolute_path_to_file_in_repository( repo_files_dir, 'tool_dependencies.xml' ) - if tool_dependencies_config: - config_to_use = tool_dependencies_config - else: - message = "Unable to locate required tool_dependencies.xml file for revision %s of installed repository %s owned by %s." % \ - ( str( required_repository.changeset_revision ), str( required_repository.name ), str( required_repository.owner ) ) - raise Exception( message ) - else: - # Make a call to the tool shed to get the changeset revision to which the current value of required_repository_changeset_revision - # should be updated if it's not current. - text = get_updated_changeset_revisions_from_tool_shed( app=app, - tool_shed_url=tool_shed, - name=required_repository_name, - owner=required_repository_owner, - changeset_revision=required_repository_changeset_revision ) - if text: - updated_changeset_revisions = listify( text ) - # The list of changeset revisions is in reverse order, so the newest will be first. - required_repository_changeset_revision = updated_changeset_revisions[ 0 ] - # Make a call to the tool shed to get the required repository's tool_dependencies.xml file. - tmp_filename = create_temporary_tool_dependencies_config( app, - tool_shed, - required_repository_name, - required_repository_owner, - required_repository_changeset_revision ) - config_to_use = tmp_filename - handled_tool_dependencies = create_tool_dependency_with_initialized_env_sh_file( app=app, - dependent_install_dir=dependent_install_dir, - tool_shed_repository=tool_shed_repository, - required_repository=required_repository, - package_name=package_name, - package_version=package_version, - tool_dependencies_config=config_to_use ) - suc.remove_file( tmp_filename ) - else: - message = "Unable to locate required tool shed repository named %s owned by %s with revision %s." % \ - ( str( required_repository_name ), str( required_repository_owner ), str( default_required_repository_changeset_revision ) ) - raise Exception( message ) - return handled_tool_dependencies - -def install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ): - sa_session = app.install_model.context - try: - # There is currently only one fabric method. - tool_dependency = fabric_util.install_and_build_package( app, tool_shed_repository, tool_dependency, actions_dict ) - except Exception, e: - log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) ) - # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must - # be left False here. - error_message = '%s\n%s' % ( td_common_util.format_traceback(), str( e ) ) - tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app, - tool_dependency, - error_message, - remove_installation_path=False ) - tool_dependency = tool_dependency_util.mark_tool_dependency_installed( app, tool_dependency ) - return tool_dependency - -def install_package( app, elem, tool_shed_repository, tool_dependencies=None, from_tool_migration_manager=False ): - """ - Install a tool dependency package defined by the XML element elem. The value of tool_dependencies is - a partial or full list of ToolDependency records associated with the tool_shed_repository. - """ - sa_session = app.install_model.context - tool_dependency = None - # The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required. - package_name = elem.get( 'name', None ) - package_version = elem.get( 'version', None ) - if tool_dependencies and package_name and package_version: - for package_elem in elem: - if package_elem.tag == 'repository': - # We have a complex repository dependency definition. - rd_tool_dependencies = handle_complex_repository_dependency_for_package( app, - package_elem, - package_name, - package_version, - tool_shed_repository, - from_tool_migration_manager=from_tool_migration_manager ) - for rd_tool_dependency in rd_tool_dependencies: - if rd_tool_dependency.status == app.install_model.ToolDependency.installation_status.ERROR: - # We'll log the error here, but continue installing packages since some may not require this dependency. - print "Error installing tool dependency for required repository: %s" % str( rd_tool_dependency.error_message ) - elif package_elem.tag == 'install': - # <install version="1.0"> - # Get the installation directory for tool dependencies that will be installed for the received tool_shed_repository. - install_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app, - repository_name=tool_shed_repository.name, - repository_owner=tool_shed_repository.owner, - repository_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dependency_type='package', - tool_dependency_name=package_name, - tool_dependency_version=package_version ) - if os.path.exists( install_dir ): - # The install manager handles tool migration stages and the sync_database_with_file_system() - # method handles two scenarios: (1) where a Galaxy file system environment related to installed - # Tool Shed repositories and tool dependencies has somehow gotten out of sync with the Galaxy - # database tables associated with these installed items, and (2) the Tool Shed's install and test - # framework which installs repositories in 2 stages, those of type tool_dependency_definition - # followed by those containing valid tools and tool functional test components. Neither of these - # scenarios apply when the install manager is running. - if from_tool_migration_manager: - can_install_tool_dependency = True - else: - # Notice that we'll throw away the following tool_dependency if it can be installed. - tool_dependency, can_install_tool_dependency = \ - tool_dependency_util.sync_database_with_file_system( app, - tool_shed_repository, - package_name, - package_version, - install_dir, - tool_dependency_type='package' ) - if not can_install_tool_dependency: - log.debug( "Tool dependency %s version %s cannot be installed (it was probably previously installed), so returning it." % \ - ( str( tool_dependency.name ), str( tool_dependency.version ) ) ) - return tool_dependency - else: - can_install_tool_dependency = True - if can_install_tool_dependency: - package_install_version = package_elem.get( 'version', '1.0' ) - status = app.install_model.ToolDependency.installation_status.INSTALLING - tool_dependency = \ - tool_dependency_util.create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=package_name, - version=package_version, - type='package', - status=status, - set_status=True ) - # Get the information about the current platform in case the tool dependency definition includes tag sets - # for installing compiled binaries. - platform_info_dict = tool_dependency_util.get_platform_info_dict() - if package_install_version == '1.0': - # Handle tool dependency installation using a fabric method included in the Galaxy framework. - actions_elem_tuples = td_common_util.parse_package_elem( package_elem, - platform_info_dict=platform_info_dict, - include_after_install_actions=True ) - if actions_elem_tuples: - # At this point we have a list of <actions> elems that are either defined within an <actions_group> - # tag set with <actions> sub-elements that contains os and architecture attributes filtered by the - # platform into which the appropriate compiled binary will be installed, or not defined within an - # <actions_group> tag set and not filtered. Here is an example actions_elem_tuple. - # [(True, [<Element 'actions' at 0x109293d10>)] - binary_installed = False - for actions_elem_tuple in actions_elem_tuples: - in_actions_group, actions_elems = actions_elem_tuple - if in_actions_group: - # Platform matching is only performed inside <actions_group> tag sets, os and architecture - # attributes are otherwise ignored. - can_install_from_source = False - for actions_elem in actions_elems: - system = actions_elem.get( 'os' ) - architecture = actions_elem.get( 'architecture' ) - # If this <actions> element has the os and architecture attributes defined, then we only - # want to process until a successful installation is achieved. - if system and architecture: - # If an <actions> tag has been defined that matches our current platform, and the - # recipe specified within that <actions> tag has been successfully processed, skip - # any remaining platform-specific <actions> tags. We cannot break out of the loop - # here because there may be <action> tags at the end of the <actions_group> tag set - # that must be processed. - if binary_installed: - continue - # No platform-specific <actions> recipe has yet resulted in a successful installation. - tool_dependency = install_via_fabric( app, - tool_shed_repository, - tool_dependency, - install_dir, - package_name=package_name, - actions_elem=actions_elem, - action_elem=None ) - if tool_dependency.status == app.install_model.ToolDependency.installation_status.INSTALLED: - # If an <actions> tag was found that matches the current platform, and the - # install_via_fabric method did not result in an error state, set binary_installed - # to True in order to skip any remaining platform-specific <actions> tags. - binary_installed = True - else: - # Process the next matching <actions> tag, or any defined <actions> tags that do not - # contain platform dependent recipes. - log.debug( 'Error downloading binary for tool dependency %s version %s: %s' % \ - ( str( package_name ), str( package_version ), str( tool_dependency.error_message ) ) ) - else: - if actions_elem.tag == 'actions': - # We've reached an <actions> tag that defines the recipe for installing and compiling from - # source. If binary installation failed, we proceed with the recipe. - if not binary_installed: - installation_directory = tool_dependency.installation_directory( app ) - if os.path.exists( installation_directory ): - # Delete contents of installation directory if attempt at binary installation failed. - installation_directory_contents = os.listdir( installation_directory ) - if installation_directory_contents: - removed, error_message = tool_dependency_util.remove_tool_dependency( app, tool_dependency ) - if removed: - can_install_from_source = True - else: - log.debug( 'Error removing old files from installation directory %s: %s' % \ - ( str( tool_dependency.installation_directory( app ), str( error_message ) ) ) ) - else: - can_install_from_source = True - else: - can_install_from_source = True - if can_install_from_source: - # We now know that binary installation was not successful, so proceed with the <actions> - # tag set that defines the recipe to install and compile from source. - log.debug( 'Proceeding with install and compile recipe for tool dependency %s.' % \ - str( tool_dependency.name ) ) - tool_dependency = install_via_fabric( app, - tool_shed_repository, - tool_dependency, - install_dir, - package_name=package_name, - actions_elem=actions_elem, - action_elem=None ) - if actions_elem.tag == 'action' and tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: - # If the tool dependency is not in an error state, perform any final actions that have been - # defined within the actions_group tag set, but outside of an <actions> tag, which defines - # the recipe for installing and compiling from source. - tool_dependency = install_via_fabric( app, - tool_shed_repository, - tool_dependency, - install_dir, - package_name=package_name, - actions_elem=None, - action_elem=actions_elem ) - else: - # Checks for "os" and "architecture" attributes are not made for any <actions> tag sets outside of - # an <actions_group> tag set. If the attributes are defined, they will be ignored. All <actions> tags - # outside of an <actions_group> tag set will always be processed. - tool_dependency = install_via_fabric( app, - tool_shed_repository, - tool_dependency, - install_dir, - package_name=package_name, - actions_elem=actions_elems, - action_elem=None ) - if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR: - log.debug( 'Tool dependency %s version %s has been installed in %s.' % \ - ( str( package_name ), str( package_version ), str( install_dir ) ) ) - else: - error_message = 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) ) - error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> ' - error_message += 'tag set.' - # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must - # be left False here. - tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app, - tool_dependency, - error_message, - remove_installation_path=False ) - return tool_dependency - else: - raise NotImplementedError( 'Only install version 1.0 is currently supported (i.e., change your tag to be <install version="1.0">).' ) - elif package_elem.tag == 'readme': - # Nothing to be done. - continue - #elif package_elem.tag == 'custom_fabfile': - # # TODO: This is not yet supported or functionally correct... - # # Handle tool dependency installation where the repository includes one or more custom fabric scripts. - # if not fabric_version_checked: - # check_fabric_version() - # fabric_version_checked = True - # fabfile_name = package_elem.get( 'name', None ) - # custom_fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) ) - # print 'Installing tool dependencies via fabric script ', custom_fabfile_path - return tool_dependency - -def install_via_fabric( app, tool_shed_repository, tool_dependency, install_dir, package_name=None, custom_fabfile_path=None, - actions_elem=None, action_elem=None, **kwd ): - """ - Parse a tool_dependency.xml file's <actions> tag set to gather information for installation using the - fabric_util.install_and_build_package() method. The use of fabric is being eliminated, so some of these - functions may need to be renamed at some point. - """ - sa_session = app.install_model.context - if not os.path.exists( install_dir ): - os.makedirs( install_dir ) - actions_dict = dict( install_dir=install_dir ) - if package_name: - actions_dict[ 'package_name' ] = package_name - actions = [] - is_binary_download = False - if actions_elem is not None: - elems = actions_elem - if elems.get( 'os' ) is not None and elems.get( 'architecture' ) is not None: - is_binary_download = True - elif action_elem is not None: - # We were provided with a single <action> element to perform certain actions after a platform-specific tarball was downloaded. - elems = [ action_elem ] - else: - elems = [] - recipe_manager = RecipeManager() - tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository ) - install_environment = InstallEnvironment( tool_shed_repository_install_dir, install_dir ) - for action_elem in elems: - # Make sure to skip all comments, since they are now included in the XML tree. - if action_elem.tag != 'action': - continue - action_dict = {} - action_type = action_elem.get( 'type', None ) - if action_type is not None: - action_dict = recipe_manager.prepare_step( app=app, - tool_dependency=tool_dependency, - action_type=action_type, - action_elem=action_elem, - action_dict=action_dict, - install_environment=install_environment, - is_binary_download=is_binary_download ) - action_tuple = ( action_type, action_dict ) - if action_type == 'set_environment': - if action_tuple not in actions: - actions.append( action_tuple ) - else: - actions.append( action_tuple ) - if actions: - actions_dict[ 'actions' ] = actions - if custom_fabfile_path is not None: - # TODO: this is not yet supported or functional, but when it is handle it using the fabric api. - # execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=package_name ) - raise Exception( 'Tool dependency installation using proprietary fabric scripts is not yet supported.' ) - else: - tool_dependency = install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ) - return tool_dependency - -def execute_custom_fabric_script( app, elem, custom_fabfile_path, install_dir, package_name=None, **kwd ): - """ - TODO: Handle this using the fabric api. - Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method. - """ - if not os.path.exists( install_dir ): - os.makedirs( install_dir ) - # Default value for env_dependency_path. - env_dependency_path = install_dir - method_name = elem.get( 'name', None ) - params_str = '' - actions = [] - for param_elem in elem: - param_name = param_elem.get( 'name' ) - if param_name: - if param_name == 'actions': - for action_elem in param_elem: - actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) ) - if actions: - params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join( actions ) ) - else: - if param_elem.text: - param_value = encoding_util.tool_shed_encode( param_elem.text ) - params_str += '%s=%s,' % ( param_name, param_value ) - if package_name: - params_str += 'package_name=%s' % package_name - else: - params_str = params_str.rstrip( ',' ) - try: - cmd = 'fab -f %s %s:%s' % ( custom_fabfile_path, method_name, params_str ) - returncode, message = run_subprocess( app, cmd ) - except Exception, e: - return "Exception executing fabric script %s: %s. " % ( str( custom_fabfile_path ), str( e ) ) - if returncode: - return message - handle_environment_settings( app, tool_dependency, install_dir, cmd ) - -def run_subprocess( app, cmd ): - env = os.environ - PYTHONPATH = env.get( 'PYTHONPATH', '' ) - if PYTHONPATH: - env[ 'PYTHONPATH' ] = '%s:%s' % ( os.path.abspath( os.path.join( app.config.root, 'lib' ) ), PYTHONPATH ) - else: - env[ 'PYTHONPATH' ] = os.path.abspath( os.path.join( app.config.root, 'lib' ) ) - message = '' - tmp_name = tempfile.NamedTemporaryFile( prefix="tmp-toolshed-rs" ).name - tmp_stderr = open( tmp_name, 'wb' ) - proc = subprocess.Popen( cmd, shell=True, env=env, stderr=tmp_stderr.fileno() ) - returncode = proc.wait() - tmp_stderr.close() - if returncode: - tmp_stderr = open( tmp_name, 'rb' ) - message = '%s\n' % str( tmp_stderr.read() ) - tmp_stderr.close() - suc.remove_file( tmp_name ) - return returncode, message - -def set_environment( app, elem, tool_shed_repository, attr_tups_of_dependencies_for_install ): - """ - Create a ToolDependency to set an environment variable. This is different from the process used to - set an environment variable that is associated with a package. An example entry in a tool_dependencies.xml - file is:: - - <set_environment version="1.0"> - <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable> - </set_environment> - """ - # TODO: Add support for a repository dependency definition within this tool dependency type's tag set. This should look something like - # the following. See the implementation of support for this in the tool dependency package type's method above. - # This function is only called for set environment actions as defined below, not within an <install version="1.0"> tool - # dependency type. Here is an example of the tag set this function does handle: - # <action type="set_environment"> - # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable> - # </action> - # Here is an example of the tag set this function does not handle: - # <set_environment version="1.0"> - # <repository toolshed="<tool shed>" name="<repository name>" owner="<repository owner>" changeset_revision="<changeset revision>" /> - # </set_environment> - sa_session = app.install_model.context - tool_dependencies = [] - env_var_version = elem.get( 'version', '1.0' ) - tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository ) - for env_var_elem in elem: - # Althoug we're in a loop here, this method will always return only a single ToolDependency or None. - env_var_name = env_var_elem.get( 'name', None ) - # The value of env_var_name must match the text value of at least 1 <requirement> tag in the tool config's <requirements> tag set whose - # "type" attribute is "set_environment" (e.g., <requirement type="set_environment">R_SCRIPT_PATH</requirement>). - env_var_action = env_var_elem.get( 'action', None ) - if env_var_name and env_var_action: - # Tool dependencies of type "set_environmnet" always have the version attribute set to None. - attr_tup = ( env_var_name, None, 'set_environment' ) - if attr_tup in attr_tups_of_dependencies_for_install: - install_dir = \ - tool_dependency_util.get_tool_dependency_install_dir( app=app, - repository_name=tool_shed_repository.name, - repository_owner=tool_shed_repository.owner, - repository_changeset_revision=tool_shed_repository.installed_changeset_revision, - tool_dependency_type='set_environment', - tool_dependency_name=env_var_name, - tool_dependency_version=None ) - install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir, - install_dir=install_dir ) - env_var_dict = td_common_util.create_env_var_dict( elem=env_var_elem, - install_environment=install_environment ) - if env_var_dict: - if not os.path.exists( install_dir ): - os.makedirs( install_dir ) - status = app.install_model.ToolDependency.installation_status.INSTALLING - tool_dependency = \ - tool_dependency_util.create_or_update_tool_dependency( app=app, - tool_shed_repository=tool_shed_repository, - name=env_var_name, - version=None, - type='set_environment', - status=status, - set_status=True ) - if env_var_version == '1.0': - # Create this tool dependency's env.sh file. - env_file_builder = EnvFileBuilder( install_dir ) - return_code = env_file_builder.append_line( make_executable=True, **env_var_dict ) - if return_code: - error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \ - ( str( tool_dependency.name ), str( return_code ) ) - log.debug( error_message ) - status = app.install_model.ToolDependency.installation_status.ERROR - tool_dependency = \ - tool_dependency_util.set_tool_dependency_attributes( app, - tool_dependency=tool_dependency, - status=status, - error_message=error_message, - remove_from_disk=False ) - else: - if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR, - app.install_model.ToolDependency.installation_status.INSTALLED ]: - status = app.install_model.ToolDependency.installation_status.INSTALLED - tool_dependency = \ - tool_dependency_util.set_tool_dependency_attributes( app, - tool_dependency=tool_dependency, - status=status, - error_message=None, - remove_from_disk=False ) - log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \ - ( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) ) - else: - error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).' - status = app.install_model.ToolDependency.installation_status.ERROR - tool_dependency = \ - tool_dependency_util.set_tool_dependency_attributes( app, - tool_dependency=tool_dependency, - status=status, - error_message=error_message, - remove_from_disk=False ) - tool_dependencies.append( tool_dependency ) - return tool_dependencies - -def strip_path( fpath ): - if not fpath: - return fpath - try: - file_path, file_name = os.path.split( fpath ) - except: - file_name = fpath - return file_name diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/asynchronous_reader.py @@ -0,0 +1,32 @@ +import logging +import os +import threading + +log = logging.getLogger( __name__ ) + + +class AsynchronousReader( threading.Thread ): + """ + A helper class to implement asynchronous reading of a stream in a separate thread. Read lines are pushed + onto a queue to be consumed in another thread. + """ + + def __init__( self, fd, queue ): + threading.Thread.__init__( self ) + self._fd = fd + self._queue = queue + self.lines = [] + + def run( self ): + """Read lines and put them on the queue.""" + thread_lock = threading.Lock() + thread_lock.acquire() + for line in iter( self._fd.readline, '' ): + stripped_line = line.rstrip() + self.lines.append( stripped_line ) + self._queue.put( stripped_line ) + thread_lock.release() + + def installation_complete( self ): + """Make sure there is more installation and compilation logging content expected.""" + return not self.is_alive() and self._queue.empty() diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/env_file_builder.py @@ -0,0 +1,95 @@ +import logging +import os +import stat + +log = logging.getLogger( __name__ ) + + +class EnvFileBuilder( object ): + + def __init__( self, install_dir ): + self.install_dir = install_dir + self.return_code = 0 + + def append_line( self, make_executable=True, **kwd ): + env_var_dict = dict( **kwd ) + env_entry, env_file = self.create_or_update_env_shell_file( self.install_dir, env_var_dict ) + return_code = self.file_append( env_entry, env_file, make_executable=make_executable ) + self.return_code = self.return_code or return_code + return self.return_code + + @staticmethod + def create_or_update_env_shell_file( install_dir, env_var_dict ): + env_var_action = env_var_dict[ 'action' ] + env_var_value = env_var_dict[ 'value' ] + if env_var_action in [ 'prepend_to', 'set_to', 'append_to' ]: + env_var_name = env_var_dict[ 'name' ] + if env_var_action == 'prepend_to': + changed_value = '%s:$%s' % ( env_var_value, env_var_name ) + elif env_var_action == 'set_to': + changed_value = '%s' % env_var_value + elif env_var_action == 'append_to': + changed_value = '$%s:%s' % ( env_var_name, env_var_value ) + line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name ) + elif env_var_action == "source": + line = "if [ -f %s ] ; then . %s ; fi" % ( env_var_value, env_var_value ) + else: + raise Exception( "Unknown shell file action %s" % env_var_action ) + env_shell_file_path = os.path.join( install_dir, 'env.sh' ) + return line, env_shell_file_path + + def file_append( self, text, file_path, make_executable=True ): + """ + Append a line to a file unless the line already exists in the file. This method creates the file if + it doesn't exist. If make_executable is True, the permissions on the file are set to executable by + the owner. + """ + file_dir = os.path.dirname( file_path ) + if not os.path.exists( file_dir ): + try: + os.makedirs( file_dir ) + except Exception, e: + log.exception( str( e ) ) + return 1 + if os.path.exists( file_path ): + try: + new_env_file_contents = [] + env_file_contents = file( file_path, 'r' ).readlines() + # Clean out blank lines from the env.sh file. + for line in env_file_contents: + line = line.rstrip() + if line: + new_env_file_contents.append( line ) + env_file_contents = new_env_file_contents + except Exception, e: + log.exception( str( e ) ) + return 1 + else: + env_file_handle = open( file_path, 'w' ) + env_file_handle.close() + env_file_contents = [] + if make_executable: + # Explicitly set the file's executable bits. + try: + os.chmod( file_path, int( '111', base=8 ) | os.stat( file_path )[ stat.ST_MODE ] ) + except Exception, e: + log.exception( str( e ) ) + return 1 + # Convert the received text to a list, in order to support adding one or more lines to the file. + if isinstance( text, basestring ): + text = [ text ] + for line in text: + line = line.rstrip() + if line and line not in env_file_contents: + env_file_contents.append( line ) + try: + file( file_path, 'w' ).write( '\n'.join( env_file_contents ) ) + except Exception, e: + log.exception( str( e ) ) + return 1 + return 0 + + def handle_action_shell_file_paths( self, action_dict ): + shell_file_paths = action_dict.get( 'action_shell_file_paths', [] ) + for shell_file_path in shell_file_paths: + self.append_line( action="source", value=shell_file_path ) diff -r 23bb24573f1370b4f6322651d471d11ff5352447 -r 6fecbad49afd69beebb55fb8a9b83ecae29af723 lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/install_environment.py @@ -0,0 +1,273 @@ +import logging +import os +import Queue +import shutil +import subprocess +import tempfile +import threading +import time +from contextlib import contextmanager + +# TODO: eliminate the use of fabric here. +from galaxy import eggs + +eggs.require( 'paramiko' ) +eggs.require( 'ssh' ) +eggs.require( 'Fabric' ) + +from fabric.operations import _AttributeString +from fabric import state +from fabric.api import prefix + +from galaxy.util import DATABASE_MAX_STRING_SIZE +from galaxy.util import DATABASE_MAX_STRING_SIZE_PRETTY +from galaxy.util import shrink_string_by_size +from galaxy.util import unicodify + +from tool_shed.galaxy_install.tool_dependencies import td_common_util +from tool_shed.galaxy_install.tool_dependencies.recipe import asynchronous_reader + +log = logging.getLogger( __name__ ) + +class InstallEnvironment( object ): + """Object describing the environment built up as part of the process of building and installing a package.""" + + + def __init__( self, tool_shed_repository_install_dir, install_dir ): + """ + The value of the received tool_shed_repository_install_dir is the root installation directory + of the repository containing the tool dependency, and the value of the received install_dir is + the root installation directory of the tool dependency. + """ + self.env_shell_file_paths = [] + self.install_dir = install_dir + self.tool_shed_repository_install_dir = tool_shed_repository_install_dir + + def __call__( self ): + with settings( warn_only=True, **td_common_util.get_env_var_values( self ) ): + with prefix( self.__setup_environment() ): + yield + + def add_env_shell_file_paths( self, paths ): + for path in paths: + self.env_shell_file_paths.append( str( path ) ) + + def build_command( self, command, action_type='shell_command' ): + """ + Build command line for execution from simple command, but + configuring environment described by this object. + """ + env_cmds = self.environment_commands( action_type ) + return '\n'.join( env_cmds + [ command ] ) + + def close_file_descriptor( self, fd ): + """Attempt to close a file descriptor.""" + start_timer = time.time() + error = '' + while True: + try: + fd.close() + break + except IOError, e: + # Undoubtedly close() was called during a concurrent operation on the same file object. + log.debug( 'Error closing file descriptor: %s' % str( e ) ) + time.sleep( .5 ) + current_wait_time = time.time() - start_timer + if current_wait_time >= 600: + error = 'Error closing file descriptor: %s' % str( e ) + break + return error + + def enqueue_output( self, stdout, stdout_queue, stderr, stderr_queue ): + """ + This method places streamed stdout and stderr into a threaded IPC queue target. Received data + is printed and saved to that thread's queue. The calling thread can then retrieve the data using + thread.stdout and thread.stderr. + """ + stdout_logger = logging.getLogger( 'install_environment.STDOUT' ) + stderr_logger = logging.getLogger( 'install_environment.STDERR' ) + for line in iter( stdout.readline, '' ): + output = line.rstrip() + stdout_logger.debug( output ) + stdout_queue.put( output ) + stdout_queue.put( None ) + for line in iter( stderr.readline, '' ): + output = line.rstrip() + stderr_logger.debug( output ) + stderr_queue.put( output ) + stderr_queue.put( None ) + + def environment_commands( self, action_type ): + """Build a list of commands used to construct the environment described by this object.""" + cmds = [] + for env_shell_file_path in self.env_shell_file_paths: + if os.path.exists( env_shell_file_path ): + for env_setting in open( env_shell_file_path ): + cmds.append( env_setting.strip( '\n' ) ) + else: + log.debug( 'Invalid file %s specified, ignoring %s action.' % ( str( env_shell_file_path ), str( action_type ) ) ) + return cmds + + def environment_dict( self, action_type='template_command' ): + env_vars = dict() + for env_shell_file_path in self.env_shell_file_paths: + if os.path.exists( env_shell_file_path ): + for env_setting in open( env_shell_file_path ): + env_string = env_setting.split( ';' )[ 0 ] + env_name, env_path = env_string.split( '=' ) + env_vars[ env_name ] = env_path + else: + log.debug( 'Invalid file %s specified, ignoring template_command action.' % str( env_shell_file_path ) ) + return env_vars + + def handle_command( self, app, tool_dependency, cmd, return_output=False ): + """Handle a command and log the results.""" + context = app.install_model.context + command = str( cmd ) + output = self.handle_complex_command( command ) + self.log_results( cmd, output, os.path.join( self.install_dir, td_common_util.INSTALLATION_LOG ) ) + stdout = output.stdout + stderr = output.stderr + if len( stdout ) > DATABASE_MAX_STRING_SIZE: + print "Length of stdout > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) + stdout = shrink_string_by_size( stdout, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True ) + if len( stderr ) > DATABASE_MAX_STRING_SIZE: + print "Length of stderr > %s, so only a portion will be saved in the database." % str( DATABASE_MAX_STRING_SIZE_PRETTY ) + stderr = shrink_string_by_size( stderr, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True ) + if output.return_code not in [ 0 ]: + tool_dependency.status = app.install_model.ToolDependency.installation_status.ERROR + if stderr: + tool_dependency.error_message = unicodify( stderr ) + elif stdout: + tool_dependency.error_message = unicodify( stdout ) + else: + # We have a problem if there was no stdout and no stderr. + tool_dependency.error_message = "Unknown error occurred executing shell command %s, return_code: %s" % \ + ( str( cmd ), str( output.return_code ) ) + context.add( tool_dependency ) + context.flush() + if return_output: + return output + return output.return_code + + def handle_complex_command( self, command ): + """ + Wrap subprocess.Popen in such a way that the stderr and stdout from running a shell command will + be captured and logged in nearly real time. This is similar to fabric.local, but allows us to + retain control over the process. This method is named "complex" because it uses queues and + threads to execute a command while capturing and displaying the output. + """ + # Launch the command as subprocess. A bufsize of 1 means line buffered. + process_handle = subprocess.Popen( str( command ), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1, + close_fds=False, + shell=True, + cwd=state.env[ 'lcwd' ] ) + pid = process_handle.pid + # Launch the asynchronous readers of the process' stdout and stderr. + stdout_queue = Queue.Queue() + stdout_reader = asynchronous_reader.AsynchronousReader( process_handle.stdout, stdout_queue ) + stdout_reader.start() + stderr_queue = Queue.Queue() + stderr_reader = asynchronous_reader.AsynchronousReader( process_handle.stderr, stderr_queue ) + stderr_reader.start() + # Place streamed stdout and stderr into a threaded IPC queue target so it can + # be printed and stored for later retrieval when generating the INSTALLATION.log. + stdio_thread = threading.Thread( target=self.enqueue_output, + args=( process_handle.stdout, + stdout_queue, + process_handle.stderr, + stderr_queue ) ) + thread_lock = threading.Lock() + thread_lock.acquire() + stdio_thread.start() + # Check the queues for output until there is nothing more to get. + start_timer = time.time() + while not stdout_reader.installation_complete() or not stderr_reader.installation_complete(): + # Show what we received from standard output. + while not stdout_queue.empty(): + try: + line = stdout_queue.get() + except Queue.Empty: + line = None + break + if line: + print line + start_timer = time.time() + else: + break + # Show what we received from standard error. + while not stderr_queue.empty(): + try: + line = stderr_queue.get() + except Queue.Empty: + line = None + break + if line: + print line + start_timer = time.time() + else: + stderr_queue.task_done() + break + # Sleep a bit before asking the readers again. + time.sleep( .1 ) + current_wait_time = time.time() - start_timer + if stdout_queue.empty() and stderr_queue.empty() and current_wait_time > td_common_util.NO_OUTPUT_TIMEOUT: + err_msg = "\nShutting down process id %s because it generated no output for the defined timeout period of %.1f seconds.\n" % \ + ( pid, td_common_util.NO_OUTPUT_TIMEOUT ) + stderr_reader.lines.append( err_msg ) + process_handle.kill() + break + thread_lock.release() + # Wait until each of the threads we've started terminate. The following calls will block each thread + # until it terminates either normally, through an unhandled exception, or until the timeout occurs. + stdio_thread.join( td_common_util.NO_OUTPUT_TIMEOUT ) + stdout_reader.join( td_common_util.NO_OUTPUT_TIMEOUT ) + stderr_reader.join( td_common_util.NO_OUTPUT_TIMEOUT ) + # Close subprocess' file descriptors. + error = self.close_file_descriptor( process_handle.stdout ) + error = self.close_file_descriptor( process_handle.stderr ) + stdout = '\n'.join( stdout_reader.lines ) + stderr = '\n'.join( stderr_reader.lines ) + # Handle error condition (deal with stdout being None, too) + output = _AttributeString( stdout.strip() if stdout else "" ) + errors = _AttributeString( stderr.strip() if stderr else "" ) + # Make sure the process has finished. + process_handle.poll() + output.return_code = process_handle.returncode + output.stderr = errors + return output + + def log_results( self, command, fabric_AttributeString, file_path ): + """Write attributes of fabric.operations._AttributeString to a specified log file.""" + if os.path.exists( file_path ): + logfile = open( file_path, 'ab' ) + else: + logfile = open( file_path, 'wb' ) + logfile.write( "\n#############################################\n" ) + logfile.write( '%s\nSTDOUT\n' % command ) + logfile.write( str( fabric_AttributeString.stdout ) ) + logfile.write( "\n#############################################\n" ) + logfile.write( "\n#############################################\n" ) + logfile.write( '%s\nSTDERR\n' % command ) + logfile.write( str( fabric_AttributeString.stderr ) ) + logfile.write( "\n#############################################\n" ) + logfile.close() + + @contextmanager + def make_tmp_dir( self ): + work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-mtd" ) + yield work_dir + if os.path.exists( work_dir ): + try: + shutil.rmtree( work_dir ) + except Exception, e: + log.exception( str( e ) ) + + def __setup_environment( self ): + return "&&".join( [ ". %s" % file for file in self.__valid_env_shell_file_paths() ] ) + + def __valid_env_shell_file_paths( self ): + return [ file for file in self.env_shell_file_paths if os.path.exists( file ) ] This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.