1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/f5791b418b4e/ Changeset: f5791b418b4e User: greg Date: 2013-08-26 20:09:11 Summary: Rename ~/tool_shed/galaxy_install/tool_dependencies/common_util.py to be td_common_util.py Affected #: 4 files diff -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 -r f5791b418b4e1d0be9d94895f0642083f795e61f lib/tool_shed/galaxy_install/tool_dependencies/common_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/common_util.py +++ /dev/null @@ -1,314 +0,0 @@ -import logging -import os -import shutil -import sys -import tarfile -import traceback -import urllib2 -import zipfile -import tool_shed.util.shed_util_common as suc -from galaxy.datatypes import checkers -from urllib2 import HTTPError - -log = logging.getLogger( __name__ ) - -def clean_tool_shed_url( base_url ): - if base_url: - protocol, base = base_url.split( '://' ) - return base.rstrip( '/' ) - return base_url - -def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ): - env_var_name = elem.get( 'name', 'PATH' ) - env_var_action = elem.get( 'action', 'prepend_to' ) - env_var_text = None - if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0: - if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1: - env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - else: - env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_dependency_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - if elem.text and elem.text.find( 'INSTALL_DIR' ) >= 0: - if tool_dependency_install_dir: - env_var_text = elem.text.replace( '$INSTALL_DIR', tool_dependency_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - else: - env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir ) - return dict( name=env_var_name, action=env_var_action, value=env_var_text ) - if elem.text: - # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR since there may be command line - # parameters that are tuned for a Galaxy instance. Allowing them to be set in one location rather than being hard coded into - # each tool config is the best approach. For example: - # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to"> - # "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD" - # </environment_variable> - return dict( name=env_var_name, action=env_var_action, value=elem.text) - return None - -def create_or_update_env_shell_file( install_dir, env_var_dict ): - env_var_name = env_var_dict[ 'name' ] - env_var_action = env_var_dict[ 'action' ] - env_var_value = env_var_dict[ 'value' ] - if env_var_action == 'prepend_to': - changed_value = '%s:$%s' % ( env_var_value, env_var_name ) - elif env_var_action == 'set_to': - changed_value = '%s' % env_var_value - elif env_var_action == 'append_to': - changed_value = '$%s:%s' % ( env_var_name, env_var_value ) - line = "%s=%s; export %s" % (env_var_name, changed_value, env_var_name) - return create_or_update_env_shell_file_with_command(install_dir, line) - - -def create_or_update_env_shell_file_with_command( install_dir, command ): - """ - Return a shell expression which when executed will create or update - a Galaxy env.sh dependency file in the specified install_dir containing - the supplied command. - """ - env_shell_file_path = '%s/env.sh' % install_dir - if os.path.exists( env_shell_file_path ): - write_action = '>>' - else: - write_action = '>' - cmd = "echo %s %s %s;chmod +x %s" % ( __shellquote(command), - write_action, - __shellquote(env_shell_file_path), - __shellquote(env_shell_file_path)) - return cmd - -def download_binary( url, work_dir ): - ''' - Download a pre-compiled binary from the specified URL. - ''' - downloaded_filename = os.path.split( url )[ -1 ] - dir = url_download( work_dir, downloaded_filename, url, extract=False ) - return downloaded_filename - -def extract_tar( file_name, file_path ): - if isgzip( file_name ) or isbz2( file_name ): - # Open for reading with transparent compression. - tar = tarfile.open( file_name, 'r:*', errorlevel=0 ) - else: - tar = tarfile.open( file_name, errorlevel=0 ) - tar.extractall( path=file_path ) - tar.close() - -def extract_zip( archive_path, extraction_path ): - # TODO: change this method to use zipfile.Zipfile.extractall() when we stop supporting Python 2.5. - if not zipfile_ok( archive_path ): - return False - zip_archive = zipfile.ZipFile( archive_path, 'r' ) - for name in zip_archive.namelist(): - uncompressed_path = os.path.join( extraction_path, name ) - if uncompressed_path.endswith( '/' ): - if not os.path.isdir( uncompressed_path ): - os.makedirs( uncompressed_path ) - else: - file( uncompressed_path, 'wb' ).write( zip_archive.read( name ) ) - zip_archive.close() - return True - -def format_traceback(): - ex_type, ex, tb = sys.exc_info() - return ''.join( traceback.format_tb( tb ) ) - -def get_env_shell_file_path( installation_directory ): - env_shell_file_name = 'env.sh' - default_location = os.path.abspath( os.path.join( installation_directory, env_shell_file_name ) ) - if os.path.exists( default_location ): - return default_location - for root, dirs, files in os.walk( installation_directory ): - for name in files: - if name == env_shell_file_name: - return os.path.abspath( os.path.join( root, name ) ) - return None - -def get_env_shell_file_paths( app, elem ): - # Currently only the following tag set is supported. - # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056"> - # <package name="numpy" version="1.7.1" /> - # </repository> - env_shell_file_paths = [] - toolshed = elem.get( 'toolshed', None ) - repository_name = elem.get( 'name', None ) - repository_owner = elem.get( 'owner', None ) - changeset_revision = elem.get( 'changeset_revision', None ) - if toolshed and repository_name and repository_owner and changeset_revision: - toolshed = clean_tool_shed_url( toolshed ) - repository = suc.get_repository_for_dependency_relationship( app, toolshed, repository_name, repository_owner, changeset_revision ) - if repository: - for sub_elem in elem: - tool_dependency_type = sub_elem.tag - tool_dependency_name = sub_elem.get( 'name' ) - tool_dependency_version = sub_elem.get( 'version' ) - if tool_dependency_type and tool_dependency_name and tool_dependency_version: - # Get the tool_dependency so we can get it's installation directory. - tool_dependency = None - for tool_dependency in repository.tool_dependencies: - if tool_dependency.type == tool_dependency_type and tool_dependency.name == tool_dependency_name and tool_dependency.version == tool_dependency_version: - break - if tool_dependency: - tool_dependency_key = '%s/%s' % ( tool_dependency_name, tool_dependency_version ) - installation_directory = tool_dependency.installation_directory( app ) - env_shell_file_path = get_env_shell_file_path( installation_directory ) - if env_shell_file_path: - env_shell_file_paths.append( env_shell_file_path ) - else: - error_message = "Skipping tool dependency definition because unable to locate env.sh file for tool dependency " - error_message += "type %s, name %s, version %s for repository %s" % \ - ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) ) - log.debug( error_message ) - continue - else: - error_message = "Skipping tool dependency definition because unable to locate tool dependency " - error_message += "type %s, name %s, version %s for repository %s" % \ - ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) ) - log.debug( error_message ) - continue - else: - error_message = "Skipping invalid tool dependency definition: type %s, name %s, version %s." % \ - ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ) ) - log.debug( error_message ) - continue - else: - error_message = "Skipping set_environment_for_install definition because unable to locate required installed tool shed repository: " - error_message += "toolshed %s, name %s, owner %s, changeset_revision %s." % \ - ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) ) - log.debug( error_message ) - else: - error_message = "Skipping invalid set_environment_for_install definition: toolshed %s, name %s, owner %s, changeset_revision %s." % \ - ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) ) - log.debug( error_message ) - return env_shell_file_paths - -def get_env_var_values( install_dir ): - env_var_dict = {} - env_var_dict[ 'INSTALL_DIR' ] = install_dir - env_var_dict[ 'system_install' ] = install_dir - # If the Python interpreter is 64bit then we can safely assume that the underlying system is also 64bit. - env_var_dict[ '__is64bit__' ] = sys.maxsize > 2**32 - return env_var_dict - -def isbz2( file_path ): - return checkers.is_bz2( file_path ) - -def isgzip( file_path ): - return checkers.is_gzip( file_path ) - -def isjar( file_path ): - return iszip( file_path ) and file_path.endswith( '.jar' ) - -def istar( file_path ): - return tarfile.is_tarfile( file_path ) - -def iszip( file_path ): - return checkers.check_zip( file_path ) - -def is_compressed( file_path ): - if isjar( file_path ): - return False - else: - return iszip( file_path ) or isgzip( file_path ) or istar( file_path ) or isbz2( file_path ) - -def make_directory( full_path ): - if not os.path.exists( full_path ): - os.makedirs( full_path ) - -def move_directory_files( current_dir, source_dir, destination_dir ): - source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) ) - destination_directory = os.path.join( destination_dir ) - if not os.path.isdir( destination_directory ): - os.makedirs( destination_directory ) - for file_name in os.listdir( source_directory ): - source_file = os.path.join( source_directory, file_name ) - destination_file = os.path.join( destination_directory, file_name ) - shutil.move( source_file, destination_file ) - -def move_file( current_dir, source, destination_dir ): - source_file = os.path.abspath( os.path.join( current_dir, source ) ) - destination_directory = os.path.join( destination_dir ) - if not os.path.isdir( destination_directory ): - os.makedirs( destination_directory ) - shutil.move( source_file, destination_directory ) - -def tar_extraction_directory( file_path, file_name ): - """Try to return the correct extraction directory.""" - file_name = file_name.strip() - extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.tar', '.zip' ] - for extension in extensions: - if file_name.find( extension ) > 0: - dir_name = file_name[ :-len( extension ) ] - if os.path.exists( os.path.abspath( os.path.join( file_path, dir_name ) ) ): - return dir_name - if os.path.exists( os.path.abspath( os.path.join( file_path, file_name ) ) ): - return os.path.abspath( file_path ) - raise ValueError( 'Could not find path to file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) - -def url_download( install_dir, downloaded_file_name, download_url, extract=True ): - file_path = os.path.join( install_dir, downloaded_file_name ) - src = None - dst = None - try: - src = urllib2.urlopen( download_url ) - dst = open( file_path, 'wb' ) - while True: - chunk = src.read( suc.CHUNK_SIZE ) - if chunk: - dst.write( chunk ) - else: - break - except: - raise - finally: - if src: - src.close() - if dst: - dst.close() - if extract: - if istar( file_path ): - # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2</action> - extract_tar( file_path, install_dir ) - dir = tar_extraction_directory( install_dir, downloaded_file_name ) - elif isjar( file_path ): - dir = os.path.curdir - elif iszip( file_path ): - # <action type="download_by_url">http://downloads.sourceforge.net/project/picard/picard-tools/1.56/picard-tools-1.56.zip</action> - zip_archive_extracted = extract_zip( file_path, install_dir ) - dir = zip_extraction_directory( install_dir, downloaded_file_name ) - else: - dir = os.path.abspath( install_dir ) - else: - dir = os.path.abspath( install_dir ) - return dir - -def zip_extraction_directory( file_path, file_name ): - """Try to return the correct extraction directory.""" - files = [ filename for filename in os.listdir( file_path ) if not filename.endswith( '.zip' ) ] - if len( files ) > 1: - return os.path.abspath( file_path ) - elif len( files ) == 1: - # If there is only on file it should be a directory. - if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ): - return os.path.abspath( os.path.join( file_path, files[ 0 ] ) ) - raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) - -def zipfile_ok( path_to_archive ): - """ - This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction, - because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details. - """ - basename = os.path.realpath( os.path.dirname( path_to_archive ) ) - zip_archive = zipfile.ZipFile( path_to_archive ) - for member in zip_archive.namelist(): - member_path = os.path.realpath( os.path.join( basename, member ) ) - if not member_path.startswith( basename ): - return False - return True - - -def __shellquote(s): - """ - Quote and escape the supplied string for use in shell expressions. - """ - return "'" + s.replace("'", "'\\''") + "'" diff -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 -r f5791b418b4e1d0be9d94895f0642083f795e61f lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py @@ -1,12 +1,12 @@ # For Python 2.5 from __future__ import with_statement -import common_util import logging import os import shutil import tempfile import shutil +import td_common_util from contextlib import contextmanager from galaxy.util.template import fill_template from galaxy import eggs @@ -155,10 +155,10 @@ if not os.path.exists( venv_dir ): with make_tmp_dir() as work_dir: downloaded_filename = VIRTUALENV_URL.rsplit('/', 1)[-1] - downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, VIRTUALENV_URL ) - if common_util.istar( downloaded_file_path ): - common_util.extract_tar( downloaded_file_path, work_dir ) - dir = common_util.tar_extraction_directory( work_dir, downloaded_filename ) + downloaded_file_path = td_common_util.url_download( work_dir, downloaded_filename, VIRTUALENV_URL ) + if td_common_util.istar( downloaded_file_path ): + td_common_util.extract_tar( downloaded_file_path, work_dir ) + dir = td_common_util.tar_extraction_directory( work_dir, downloaded_filename ) else: log.error( "Failed to download virtualenv: Downloaded file '%s' is not a tar file", downloaded_filename ) return False @@ -188,7 +188,7 @@ log.debug( 'Attempting to download from %s to %s', url, str( target_directory ) ) downloaded_filename = None try: - downloaded_filename = common_util.download_binary( url, work_dir ) + downloaded_filename = td_common_util.download_binary( url, work_dir ) # Filter out any actions that are not download_binary, chmod, or set_environment. filtered_actions = filter_actions_after_binary_installation( actions[ 1: ] ) # Set actions to the same, so that the current download_binary doesn't get re-run in the filtered actions below. @@ -210,9 +210,9 @@ full_path_to_dir = os.path.abspath( install_dir ) else: full_path_to_dir = os.path.abspath( install_dir ) - common_util.move_file( current_dir=work_dir, - source=downloaded_filename, - destination_dir=full_path_to_dir ) + td_common_util.move_file( current_dir=work_dir, + source=downloaded_filename, + destination_dir=full_path_to_dir ) if action_type == 'download_by_url': # Eliminate the download_by_url action so remaining actions can be processed correctly. filtered_actions = actions[ 1: ] @@ -223,7 +223,7 @@ downloaded_filename = action_dict[ 'target_filename' ] else: downloaded_filename = os.path.split( url )[ -1 ] - dir = common_util.url_download( work_dir, downloaded_filename, url, extract=True ) + dir = td_common_util.url_download( work_dir, downloaded_filename, url, extract=True ) elif action_type == 'shell_command': # <action type="shell_command">git clone --recursive git://github.com/ekg/freebayes.git</action> # Eliminate the shell_command clone action so remaining actions can be processed correctly. @@ -243,7 +243,7 @@ filename = action_dict[ 'target_filename' ] else: filename = url.split( '/' )[ -1 ] - common_util.url_download( work_dir, filename, url ) + td_common_util.url_download( work_dir, filename, url ) dir = os.path.curdir else: # We're handling a complex repository dependency where we only have a set_environment tag set. @@ -263,17 +263,17 @@ with lcd( current_dir ): action_type, action_dict = action_tup if action_type == 'make_directory': - common_util.make_directory( full_path=action_dict[ 'full_path' ] ) + td_common_util.make_directory( full_path=action_dict[ 'full_path' ] ) elif action_type == 'move_directory_files': - common_util.move_directory_files( current_dir=current_dir, - source_dir=os.path.join( action_dict[ 'source_directory' ] ), - destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) ) + td_common_util.move_directory_files( current_dir=current_dir, + source_dir=os.path.join( action_dict[ 'source_directory' ] ), + destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) ) elif action_type == 'move_file': # TODO: Remove this hack that resets current_dir so that the pre-compiled bwa binary can be found. # current_dir = '/Users/gvk/workspaces_2008/bwa/bwa-0.5.9' - common_util.move_file( current_dir=current_dir, - source=os.path.join( action_dict[ 'source' ] ), - destination_dir=os.path.join( action_dict[ 'destination' ] ) ) + td_common_util.move_file( current_dir=current_dir, + source=os.path.join( action_dict[ 'source' ] ), + destination_dir=os.path.join( action_dict[ 'destination' ] ) ) elif action_type == 'set_environment': # Currently the only action supported in this category is "environment_variable". # Build a command line from the prior_installation_required, in case an environment variable is referenced @@ -289,7 +289,7 @@ for env_var_dict in env_var_dicts: # Check for the presence of the $ENV[] key string and populate it if possible. env_var_dict = handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, cmds ) - env_command = common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) + env_command = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) return_code = handle_command( app, tool_dependency, install_dir, env_command ) if return_code: return @@ -331,11 +331,13 @@ if not os.path.exists( output.stdout ): log.error( "virtualenv's site-packages directory '%s' does not exist", output.stdout ) return - modify_env_command = common_util.create_or_update_env_shell_file( install_dir, dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) ) + modify_env_command_dict = dict( name="PYTHONPATH", action="prepend_to", value=output.stdout ) + modify_env_command = td_common_util.create_or_update_env_shell_file( install_dir, modify_env_command_dict ) return_code = handle_command( app, tool_dependency, install_dir, modify_env_command ) if return_code: return - modify_env_command = common_util.create_or_update_env_shell_file( install_dir, dict( name="PATH", action="prepend_to", value=os.path.join( venv_directory, "bin" ) ) ) + modify_env_command_dict = dict( name="PATH", action="prepend_to", value=os.path.join( venv_directory, "bin" ) ) + modify_env_command = td_common_util.create_or_update_env_shell_file( install_dir, modify_env_command_dict ) return_code = handle_command( app, tool_dependency, install_dir, modify_env_command ) if return_code: return @@ -362,7 +364,7 @@ env_vars[ env_name ] = env_path else: log.debug( 'Invalid file %s specified, ignoring template_command action.', env_shell_file_path ) - env_vars.update( common_util.get_env_var_values( install_dir ) ) + env_vars.update( td_common_util.get_env_var_values( install_dir ) ) language = action_dict[ 'language' ] with settings( warn_only=True, **env_vars ): if language == 'cheetah': @@ -379,7 +381,7 @@ else: filename = url.split( '/' )[ -1 ] extract = action_dict.get( 'extract', False ) - common_util.url_download( current_dir, filename, url, extract=extract ) + td_common_util.url_download( current_dir, filename, url, extract=extract ) elif action_type == 'change_directory': target_directory = os.path.realpath( os.path.normpath( os.path.join( current_dir, action_dict[ 'directory' ] ) ) ) if target_directory.startswith( os.path.realpath( current_dir ) ) and os.path.exists( target_directory ): @@ -398,7 +400,7 @@ url = action_dict[ 'url' ] target_directory = action_dict.get( 'target_directory', None ) try: - downloaded_filename = common_util.download_binary( url, work_dir ) + downloaded_filename = td_common_util.download_binary( url, work_dir ) except Exception, e: log.exception( str( e ) ) # If the downloaded file exists, move it to $INSTALL_DIR. Put this outside the try/catch above so that @@ -413,9 +415,9 @@ full_path_to_dir = os.path.abspath( install_dir ) else: full_path_to_dir = os.path.abspath( install_dir ) - common_util.move_file( current_dir=work_dir, - source=downloaded_filename, - destination_dir=full_path_to_dir ) + td_common_util.move_file( current_dir=work_dir, + source=downloaded_filename, + destination_dir=full_path_to_dir ) def log_results( command, fabric_AttributeString, file_path ): """ diff -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 -r f5791b418b4e1d0be9d94895f0642083f795e61f lib/tool_shed/galaxy_install/tool_dependencies/install_util.py --- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py +++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py @@ -5,10 +5,10 @@ import subprocess import tempfile from string import Template -import common_util import fabric_util +import td_common_util import tool_shed.util.shed_util_common as suc -import tool_shed.util.common_util as cu +from tool_shed.util import common_util from tool_shed.util import encoding_util from tool_shed.util import tool_dependency_util from tool_shed.util import xml_util @@ -22,7 +22,7 @@ """Make a call to the tool shed to get the required repository's tool_dependencies.xml file.""" url = url_join( tool_shed_url, 'repository/get_tool_dependencies_config_contents?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) - text = cu.tool_shed_get( app, tool_shed_url, url ) + text = common_util.tool_shed_get( app, tool_shed_url, url ) if text: # Write the contents to a temporary file on disk so it can be reloaded and parsed. fh = tempfile.NamedTemporaryFile( 'wb', prefix="tmp-toolshed-cttdc" ) @@ -51,7 +51,7 @@ def get_tool_shed_repository_by_tool_shed_name_owner_changeset_revision( app, tool_shed_url, name, owner, changeset_revision ): sa_session = app.model.context.current - tool_shed = common_util.clean_tool_shed_url( tool_shed_url ) + tool_shed = td_common_util.clean_tool_shed_url( tool_shed_url ) tool_shed_repository = sa_session.query( app.model.ToolShedRepository ) \ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, app.model.ToolShedRepository.table.c.name == name, @@ -103,7 +103,7 @@ """ url = suc.url_join( tool_shed_url, 'repository/updated_changeset_revisions?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) ) - text = cu.tool_shed_get( app, tool_shed_url, url ) + text = common_util.tool_shed_get( app, tool_shed_url, url ) return text def handle_set_environment_entry_for_package( app, install_dir, tool_shed_repository, package_name, package_version, elem, required_repository ): @@ -139,7 +139,7 @@ # </action> for env_elem in action_elem: if env_elem.tag == 'environment_variable': - env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir ) + env_var_dict = td_common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir ) if env_var_dict: if env_var_dict not in env_var_dicts: env_var_dicts.append( env_var_dict ) @@ -203,7 +203,7 @@ except Exception, e: log.exception( 'Error installing tool dependency %s version %s.', str( tool_dependency.name ), str( tool_dependency.version ) ) tool_dependency.status = app.model.ToolDependency.installation_status.ERROR - tool_dependency.error_message = '%s\n%s' % ( common_util.format_traceback(), str( e ) ) + tool_dependency.error_message = '%s\n%s' % ( td_common_util.format_traceback(), str( e ) ) sa_session.add( tool_dependency ) sa_session.flush() if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR: @@ -362,7 +362,7 @@ def evaluate_template( text ): """ Substitute variables defined in XML blocks from dependencies file.""" - return Template( text ).safe_substitute( common_util.get_env_var_values( install_dir ) ) + return Template( text ).safe_substitute( td_common_util.get_env_var_values( install_dir ) ) if not os.path.exists( install_dir ): os.makedirs( install_dir ) @@ -469,7 +469,7 @@ # </action> for env_elem in action_elem: if env_elem.tag == 'environment_variable': - env_var_dict = common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir ) + env_var_dict = td_common_util.create_env_var_dict( env_elem, tool_dependency_install_dir=install_dir ) if env_var_dict: env_var_dicts.append( env_var_dict ) if env_var_dicts: @@ -489,7 +489,7 @@ # tool dependency package. See the package_matplotlib_1_2 repository in the test tool shed for a real-world example. for env_elem in action_elem: if env_elem.tag == 'repository': - env_shell_file_paths = common_util.get_env_shell_file_paths( app, env_elem ) + env_shell_file_paths = td_common_util.get_env_shell_file_paths( app, env_elem ) if env_shell_file_paths: all_env_shell_file_paths.extend( env_shell_file_paths ) if all_env_shell_file_paths: @@ -705,7 +705,7 @@ tool_dependency_name=env_var_name, tool_dependency_version=None ) tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository ) - env_var_dict = common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir ) + env_var_dict = td_common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir ) if env_var_dict: if not os.path.exists( install_dir ): os.makedirs( install_dir ) @@ -716,7 +716,7 @@ type='set_environment', status=app.model.ToolDependency.installation_status.INSTALLING, set_status=True ) - cmd = common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) + cmd = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict ) if env_var_version == '1.0': # Handle setting environment variables using a fabric method. fabric_util.handle_command( app, tool_dependency, install_dir, cmd ) diff -r 1520189c271f3a09a11f28b13548d7f0fce92fa1 -r f5791b418b4e1d0be9d94895f0642083f795e61f lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py --- /dev/null +++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py @@ -0,0 +1,311 @@ +import logging +import os +import shutil +import sys +import tarfile +import traceback +import urllib2 +import zipfile +import tool_shed.util.shed_util_common as suc +from galaxy.datatypes import checkers +from urllib2 import HTTPError + +log = logging.getLogger( __name__ ) + +def clean_tool_shed_url( base_url ): + if base_url: + protocol, base = base_url.split( '://' ) + return base.rstrip( '/' ) + return base_url + +def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ): + env_var_name = elem.get( 'name', 'PATH' ) + env_var_action = elem.get( 'action', 'prepend_to' ) + env_var_text = None + if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0: + if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1: + env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir ) + return dict( name=env_var_name, action=env_var_action, value=env_var_text ) + else: + env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_dependency_install_dir ) + return dict( name=env_var_name, action=env_var_action, value=env_var_text ) + if elem.text and elem.text.find( 'INSTALL_DIR' ) >= 0: + if tool_dependency_install_dir: + env_var_text = elem.text.replace( '$INSTALL_DIR', tool_dependency_install_dir ) + return dict( name=env_var_name, action=env_var_action, value=env_var_text ) + else: + env_var_text = elem.text.replace( '$INSTALL_DIR', tool_shed_repository_install_dir ) + return dict( name=env_var_name, action=env_var_action, value=env_var_text ) + if elem.text: + # Allow for environment variables that contain neither REPOSITORY_INSTALL_DIR nor INSTALL_DIR since there may be command line + # parameters that are tuned for a Galaxy instance. Allowing them to be set in one location rather than being hard coded into + # each tool config is the best approach. For example: + # <environment_variable name="GATK2_SITE_OPTIONS" action="set_to"> + # "--num_threads 4 --num_cpu_threads_per_data_thread 3 --phone_home STANDARD" + # </environment_variable> + return dict( name=env_var_name, action=env_var_action, value=elem.text) + return None + +def create_or_update_env_shell_file( install_dir, env_var_dict ): + env_var_name = env_var_dict[ 'name' ] + env_var_action = env_var_dict[ 'action' ] + env_var_value = env_var_dict[ 'value' ] + if env_var_action == 'prepend_to': + changed_value = '%s:$%s' % ( env_var_value, env_var_name ) + elif env_var_action == 'set_to': + changed_value = '%s' % env_var_value + elif env_var_action == 'append_to': + changed_value = '$%s:%s' % ( env_var_name, env_var_value ) + line = "%s=%s; export %s" % (env_var_name, changed_value, env_var_name) + return create_or_update_env_shell_file_with_command(install_dir, line) + + +def create_or_update_env_shell_file_with_command( install_dir, command ): + """ + Return a shell expression which when executed will create or update + a Galaxy env.sh dependency file in the specified install_dir containing + the supplied command. + """ + env_shell_file_path = '%s/env.sh' % install_dir + if os.path.exists( env_shell_file_path ): + write_action = '>>' + else: + write_action = '>' + cmd = "echo %s %s %s;chmod +x %s" % ( __shellquote(command), + write_action, + __shellquote(env_shell_file_path), + __shellquote(env_shell_file_path)) + return cmd + +def download_binary( url, work_dir ): + ''' + Download a pre-compiled binary from the specified URL. + ''' + downloaded_filename = os.path.split( url )[ -1 ] + dir = url_download( work_dir, downloaded_filename, url, extract=False ) + return downloaded_filename + +def extract_tar( file_name, file_path ): + if isgzip( file_name ) or isbz2( file_name ): + # Open for reading with transparent compression. + tar = tarfile.open( file_name, 'r:*', errorlevel=0 ) + else: + tar = tarfile.open( file_name, errorlevel=0 ) + tar.extractall( path=file_path ) + tar.close() + +def extract_zip( archive_path, extraction_path ): + # TODO: change this method to use zipfile.Zipfile.extractall() when we stop supporting Python 2.5. + if not zipfile_ok( archive_path ): + return False + zip_archive = zipfile.ZipFile( archive_path, 'r' ) + for name in zip_archive.namelist(): + uncompressed_path = os.path.join( extraction_path, name ) + if uncompressed_path.endswith( '/' ): + if not os.path.isdir( uncompressed_path ): + os.makedirs( uncompressed_path ) + else: + file( uncompressed_path, 'wb' ).write( zip_archive.read( name ) ) + zip_archive.close() + return True + +def format_traceback(): + ex_type, ex, tb = sys.exc_info() + return ''.join( traceback.format_tb( tb ) ) + +def get_env_shell_file_path( installation_directory ): + env_shell_file_name = 'env.sh' + default_location = os.path.abspath( os.path.join( installation_directory, env_shell_file_name ) ) + if os.path.exists( default_location ): + return default_location + for root, dirs, files in os.walk( installation_directory ): + for name in files: + if name == env_shell_file_name: + return os.path.abspath( os.path.join( root, name ) ) + return None + +def get_env_shell_file_paths( app, elem ): + # Currently only the following tag set is supported. + # <repository toolshed="http://localhost:9009/" name="package_numpy_1_7" owner="test" changeset_revision="c84c6a8be056"> + # <package name="numpy" version="1.7.1" /> + # </repository> + env_shell_file_paths = [] + toolshed = elem.get( 'toolshed', None ) + repository_name = elem.get( 'name', None ) + repository_owner = elem.get( 'owner', None ) + changeset_revision = elem.get( 'changeset_revision', None ) + if toolshed and repository_name and repository_owner and changeset_revision: + toolshed = clean_tool_shed_url( toolshed ) + repository = suc.get_repository_for_dependency_relationship( app, toolshed, repository_name, repository_owner, changeset_revision ) + if repository: + for sub_elem in elem: + tool_dependency_type = sub_elem.tag + tool_dependency_name = sub_elem.get( 'name' ) + tool_dependency_version = sub_elem.get( 'version' ) + if tool_dependency_type and tool_dependency_name and tool_dependency_version: + # Get the tool_dependency so we can get it's installation directory. + tool_dependency = None + for tool_dependency in repository.tool_dependencies: + if tool_dependency.type == tool_dependency_type and tool_dependency.name == tool_dependency_name and tool_dependency.version == tool_dependency_version: + break + if tool_dependency: + tool_dependency_key = '%s/%s' % ( tool_dependency_name, tool_dependency_version ) + installation_directory = tool_dependency.installation_directory( app ) + env_shell_file_path = get_env_shell_file_path( installation_directory ) + if env_shell_file_path: + env_shell_file_paths.append( env_shell_file_path ) + else: + error_message = "Skipping tool dependency definition because unable to locate env.sh file for tool dependency " + error_message += "type %s, name %s, version %s for repository %s" % \ + ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) ) + log.debug( error_message ) + continue + else: + error_message = "Skipping tool dependency definition because unable to locate tool dependency " + error_message += "type %s, name %s, version %s for repository %s" % \ + ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ), str( repository.name ) ) + log.debug( error_message ) + continue + else: + error_message = "Skipping invalid tool dependency definition: type %s, name %s, version %s." % \ + ( str( tool_dependency_type ), str( tool_dependency_name ), str( tool_dependency_version ) ) + log.debug( error_message ) + continue + else: + error_message = "Skipping set_environment_for_install definition because unable to locate required installed tool shed repository: " + error_message += "toolshed %s, name %s, owner %s, changeset_revision %s." % \ + ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) ) + log.debug( error_message ) + else: + error_message = "Skipping invalid set_environment_for_install definition: toolshed %s, name %s, owner %s, changeset_revision %s." % \ + ( str( toolshed ), str( repository_name ), str( repository_owner ), str( changeset_revision ) ) + log.debug( error_message ) + return env_shell_file_paths + +def get_env_var_values( install_dir ): + env_var_dict = {} + env_var_dict[ 'INSTALL_DIR' ] = install_dir + env_var_dict[ 'system_install' ] = install_dir + # If the Python interpreter is 64bit then we can safely assume that the underlying system is also 64bit. + env_var_dict[ '__is64bit__' ] = sys.maxsize > 2**32 + return env_var_dict + +def isbz2( file_path ): + return checkers.is_bz2( file_path ) + +def isgzip( file_path ): + return checkers.is_gzip( file_path ) + +def isjar( file_path ): + return iszip( file_path ) and file_path.endswith( '.jar' ) + +def istar( file_path ): + return tarfile.is_tarfile( file_path ) + +def iszip( file_path ): + return checkers.check_zip( file_path ) + +def is_compressed( file_path ): + if isjar( file_path ): + return False + else: + return iszip( file_path ) or isgzip( file_path ) or istar( file_path ) or isbz2( file_path ) + +def make_directory( full_path ): + if not os.path.exists( full_path ): + os.makedirs( full_path ) + +def move_directory_files( current_dir, source_dir, destination_dir ): + source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) ) + destination_directory = os.path.join( destination_dir ) + if not os.path.isdir( destination_directory ): + os.makedirs( destination_directory ) + for file_name in os.listdir( source_directory ): + source_file = os.path.join( source_directory, file_name ) + destination_file = os.path.join( destination_directory, file_name ) + shutil.move( source_file, destination_file ) + +def move_file( current_dir, source, destination_dir ): + source_file = os.path.abspath( os.path.join( current_dir, source ) ) + destination_directory = os.path.join( destination_dir ) + if not os.path.isdir( destination_directory ): + os.makedirs( destination_directory ) + shutil.move( source_file, destination_directory ) + +def tar_extraction_directory( file_path, file_name ): + """Try to return the correct extraction directory.""" + file_name = file_name.strip() + extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.tar', '.zip' ] + for extension in extensions: + if file_name.find( extension ) > 0: + dir_name = file_name[ :-len( extension ) ] + if os.path.exists( os.path.abspath( os.path.join( file_path, dir_name ) ) ): + return dir_name + if os.path.exists( os.path.abspath( os.path.join( file_path, file_name ) ) ): + return os.path.abspath( file_path ) + raise ValueError( 'Could not find path to file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) + +def url_download( install_dir, downloaded_file_name, download_url, extract=True ): + file_path = os.path.join( install_dir, downloaded_file_name ) + src = None + dst = None + try: + src = urllib2.urlopen( download_url ) + dst = open( file_path, 'wb' ) + while True: + chunk = src.read( suc.CHUNK_SIZE ) + if chunk: + dst.write( chunk ) + else: + break + except: + raise + finally: + if src: + src.close() + if dst: + dst.close() + if extract: + if istar( file_path ): + # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1.18.tar.bz2</action> + extract_tar( file_path, install_dir ) + dir = tar_extraction_directory( install_dir, downloaded_file_name ) + elif isjar( file_path ): + dir = os.path.curdir + elif iszip( file_path ): + # <action type="download_by_url">http://downloads.sourceforge.net/project/picard/picard-tools/1.56/picard-tools-1.56.zip</action> + zip_archive_extracted = extract_zip( file_path, install_dir ) + dir = zip_extraction_directory( install_dir, downloaded_file_name ) + else: + dir = os.path.abspath( install_dir ) + else: + dir = os.path.abspath( install_dir ) + return dir + +def zip_extraction_directory( file_path, file_name ): + """Try to return the correct extraction directory.""" + files = [ filename for filename in os.listdir( file_path ) if not filename.endswith( '.zip' ) ] + if len( files ) > 1: + return os.path.abspath( file_path ) + elif len( files ) == 1: + # If there is only on file it should be a directory. + if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ): + return os.path.abspath( os.path.join( file_path, files[ 0 ] ) ) + raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) ) + +def zipfile_ok( path_to_archive ): + """ + This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction, + because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details. + """ + basename = os.path.realpath( os.path.dirname( path_to_archive ) ) + zip_archive = zipfile.ZipFile( path_to_archive ) + for member in zip_archive.namelist(): + member_path = os.path.realpath( os.path.join( basename, member ) ) + if not member_path.startswith( basename ): + return False + return True + +def __shellquote(s): + """Quote and escape the supplied string for use in shell expressions.""" + return "'" + s.replace( "'", "'\\''" ) + "'" Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.