1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/b2eabe39a70f/ changeset: b2eabe39a70f user: greg date: 2012-06-12 21:15:08 summary: Enhance the tool's DependencyManager to handle information stored in tool_dependency databse records when finding tool dependencies installed with tool shed repositories. Many miscellaneous tool shed dependency fixes are included with this change set. Several fixes for uninstalling and reinstalling tool shed repositories and associated tool dependenciesare also included. affected #: 9 files diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/install_manager.py --- a/lib/galaxy/tool_shed/install_manager.py +++ b/lib/galaxy/tool_shed/install_manager.py @@ -120,7 +120,7 @@ if not is_displayed: is_displayed = True return is_displayed, tool_sections - def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision, + def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, installed_changeset_revision, ctx_rev, install_dependencies ): # Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is # updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk. @@ -141,11 +141,11 @@ # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked # deleted, undelete it. It is critical that this happens before the call to add_to_tool_panel() below because # tools will not be properly loaded if the repository is marked deleted. - print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name + print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name tool_shed_repository = create_or_update_tool_shed_repository( self.app, repository_name, description, - changeset_revision, + installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, @@ -158,7 +158,7 @@ # Handle missing data table entries for tool parameters that are dynamically generated select lists. repository_tools_tups = handle_missing_data_table_entry( self.app, tool_shed_repository, - changeset_revision, + installed_changeset_revision, self.tool_path, repository_tools_tups, work_dir ) @@ -171,12 +171,12 @@ tool_dependencies_config = get_config_from_repository( self.app, 'tool_dependencies.xml', tool_shed_repository, - changeset_revision, + installed_changeset_revision, work_dir ) # Install tool dependencies. status, message = handle_tool_dependencies( app=self.app, tool_shed_repository=tool_shed_repository, - installed_changeset_revision=changeset_revision, + installed_changeset_revision=installed_changeset_revision, tool_dependencies_config=tool_dependencies_config ) if status != 'ok' and message: print 'The following error occurred from the InstallManager while installing tool dependencies:' @@ -184,7 +184,7 @@ add_to_tool_panel( self.app, repository_name, repository_clone_url, - changeset_revision, + installed_changeset_revision, repository_tools_tups, self.repository_owner, self.migrated_tools_config, @@ -199,7 +199,7 @@ datatypes_config = get_config_from_repository( self.app, 'datatypes_conf.xml', tool_shed_repository, - changeset_revision, + installed_changeset_revision, work_dir ) # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started # after this installation completes. @@ -209,7 +209,7 @@ repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed, name=repository_name, owner=self.repository_owner, - installed_changeset_revision=changeset_revision, + installed_changeset_revision=installed_changeset_revision, tool_dicts=metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) @@ -228,29 +228,29 @@ # Install a single repository, loading contained tools into the tool panel. name = repository_elem.get( 'name' ) description = repository_elem.get( 'description' ) - changeset_revision = repository_elem.get( 'changeset_revision' ) + installed_changeset_revision = repository_elem.get( 'changeset_revision' ) # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision> - clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision ) + clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision ) if self.__isinstalled( clone_dir ): print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir else: tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed ) repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name ) relative_install_dir = os.path.join( clone_dir, name ) - ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, changeset_revision ) + ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision ) clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev ) tool_shed_repository, metadata_dict = self.handle_repository_contents( repository_clone_url, relative_install_dir, repository_elem, name, description, - changeset_revision, + installed_changeset_revision, ctx_rev, install_dependencies ) if 'tools' in metadata_dict: # Get the tool_versions from the tool shed for each tool in the installed change set. url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \ - ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision ) + ( tool_shed_url, tool_shed_repository.name, self.repository_owner, installed_changeset_revision ) response = urllib2.urlopen( url ) text = response.read() response.close() diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/common_util.py --- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py +++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py @@ -1,6 +1,10 @@ -import os, tarfile, urllib2 +import os, shutil, tarfile, urllib2 from galaxy.datatypes.checkers import * +DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ] +MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ] +ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES + def extract_tar( file_name, file_path ): if isgzip( file_name ) or isbz2( file_name ): # Open for reading with transparent compression. @@ -17,6 +21,21 @@ return tarfile.is_tarfile( file_path ) def iszip( file_path ): return check_zip( file_path ) +def move_directory_files( current_dir, source_dir, destination_dir ): + source_directory = os.path.abspath( os.path.join( current_dir, source_dir ) ) + destination_directory = os.path.join( destination_dir ) + if not os.path.isdir( destination_directory ): + os.makedirs( destination_directory ) + for file_name in os.listdir( source_directory ): + source_file = os.path.join( source_directory, file_name ) + destination_file = os.path.join( destination_directory, file_name ) + shutil.move( source_file, destination_file ) +def move_file( current_dir, source, destination_dir ): + source_file = os.path.abspath( os.path.join( current_dir, source ) ) + destination_directory = os.path.join( destination_dir ) + if not os.path.isdir( destination_directory ): + os.makedirs( destination_directory ) + shutil.move( source_file, destination_directory ) def tar_extraction_directory( file_path, file_name ): file_name = file_name.strip() extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.zip' ] diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py --- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py +++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py @@ -1,7 +1,7 @@ # For Python 2.5 from __future__ import with_statement -import os, shutil +import os, shutil, tempfile from contextlib import contextmanager import common_util @@ -13,10 +13,6 @@ from fabric.api import env, lcd, local, settings -DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ] -MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ] -ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES - def check_fabric_version(): version = env.version if int( version.split( "." )[ 0 ] ) < 1: @@ -32,18 +28,12 @@ return env @contextmanager def make_tmp_dir(): - tmp_dir = local( 'echo $TMPDIR' ).strip() - if not tmp_dir: - home_dir = local( 'echo $HOME' ) - tmp_dir = os.path.join( home_dir, 'tmp' ) - work_dir = os.path.join( tmp_dir, 'deploy_tmp' ) - if not os.path.exists( work_dir ): - local( 'mkdir -p %s' % work_dir ) + work_dir = tempfile.mkdtemp() yield work_dir if os.path.exists( work_dir ): local( 'rm -rf %s' % work_dir ) def handle_post_build_processing( tool_dependency_dir, install_dir, package_name=None ): - cmd = "echo 'PATH=%s/bin:$PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir ) + cmd = "echo 'PATH=%s/bin:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir ) message = '' output = local( cmd, capture=True ) log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) ) @@ -94,7 +84,7 @@ build_command_items = build_command_key.split( 'v^v^v' ) build_command_name = build_command_items[ 0 ] build_command = build_command_items[ 1 ] - elif build_command_key in ALL_BUILD_COMMAND_NAMES: + elif build_command_key in common_util.ALL_BUILD_COMMAND_NAMES: build_command_name = build_command_key else: build_command_name = None @@ -103,16 +93,13 @@ current_dir = os.path.join( current_dir, build_command ) lcd( current_dir ) elif build_command_name == 'move_directory_files': - source_directory = os.path.abspath( os.path.join( current_dir, build_command_dict[ 'source_directory' ] ) ) - destination_directory = build_command_dict[ 'destination_directory' ] - for file_name in os.listdir( source_directory ): - source_file = os.path.join( source_directory, file_name ) - destination_file = os.path.join( destination_directory, file_name ) - shutil.move( source_file, destination_file ) + common_util.move_directory_files( current_dir=current_dir, + source_dir=os.path.join( build_command_dict[ 'source_directory' ] ), + destination_dir=os.path.join( build_command_dict[ 'destination_directory' ] ) ) elif build_command_name == 'move_file': - source_file = os.path.abspath( os.path.join( current_dir, build_command_dict[ 'source' ] ) ) - destination = build_command_dict[ 'destination' ] - shutil.move( source_file, destination ) + common_util.move_file( current_dir=current_dir, + source=os.path.join( build_command_dict[ 'source' ] ), + destination_dir=os.path.join( build_command_dict[ 'destination' ] ) ) else: build_command = build_command_key with settings( warn_only=True ): diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tool_shed/tool_dependencies/install_util.py --- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py +++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py @@ -1,4 +1,5 @@ import sys, os, subprocess, tempfile +from common_util import * from fabric_util import * from galaxy.tool_shed.encoding_util import * from galaxy.model.orm import * @@ -11,16 +12,32 @@ from elementtree.ElementTree import Element, SubElement def create_or_update_tool_dependency( app, tool_shed_repository, changeset_revision, name, version, type ): + """ + This method is called from Galaxy (never the tool shed) when a new tool_shed_repository is being installed or when an ininstalled repository is + being reinstalled. + """ + # First see if a tool_dependency record exists for the received changeset_revision. sa_session = app.model.context.current tool_dependency = get_tool_dependency_by_shed_changeset_revision( app, tool_shed_repository, name, version, type, changeset_revision ) if tool_dependency: tool_dependency.uninstalled = False else: - tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id, - installed_changeset_revision=changeset_revision, - name=name, - version=version, - type=type ) + # Check the tool_shed_repository's set of tool_depnedency records for any that are marked uninstalled. If one is found, set uninstalled to + # False and update the value of installed_changeset_revision. + found = False + for tool_dependency in tool_shed_repository.tool_dependencies: + if tool_dependency.name == name and tool_dependency.version == version and tool_dependency.type == type and tool_dependency.uninstalled: + found = True + tool_dependency.uninstalled = False + tool_dependency.installed_changeset_revision = changeset_revision + break + if not found: + # Create a new tool_dependency record for the tool_shed_repository. + tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id, + installed_changeset_revision=changeset_revision, + name=name, + version=version, + type=type ) sa_session.add( tool_dependency ) sa_session.flush() return tool_dependency diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -794,6 +794,20 @@ if tool_version: return tool_version.get_version_ids( self.app ) return [] + @property + def installed_tool_dependencies( self ): + # If this tool is included in an installed tool shed repository and tool dependencies were installed along with the + # tool shed repository, then this method will return the repository's ToolDependency records. + if self.app.config.use_tool_dependencies: + if self.tool_shed: + tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app, + self.tool_shed, + self.repository_name, + self.repository_owner, + self.installed_changeset_revision ) + if tool_shed_repository: + return tool_shed_repository.tool_dependencies + return None def __get_job_run_config( self, run_configs, key, job_params=None ): # Look through runners/handlers to find one with matching parameters. available_configs = [] @@ -1113,7 +1127,7 @@ for stdio_elem in ( root.findall( 'stdio' ) ): self.parse_stdio_exit_codes( stdio_elem ) self.parse_stdio_regexes( stdio_elem ) - except Exception as e: + except Exception, e: log.error( "Exception in parse_stdio! " + str(sys.exc_info()) ) def parse_stdio_exit_codes( self, stdio_elem ): @@ -1185,7 +1199,7 @@ log.warning( "Tool exit_code range %s will match on " + "all exit codes" % code_range ) self.stdio_exit_codes.append( exit_code ) - except Exception as e: + except Exception, e: log.error( "Exception in parse_stdio_exit_codes! " + str(sys.exc_info()) ) trace = sys.exc_info()[2] @@ -1244,7 +1258,7 @@ regex.stdout_match = True regex.stderr_match = True self.stdio_regexes.append( regex ) - except Exception as e: + except Exception, e: log.error( "Exception in parse_stdio_exit_codes! " + str(sys.exc_info()) ) trace = sys.exc_info()[2] @@ -1270,7 +1284,7 @@ return_level = "warning" elif ( re.search( "fatal", err_level, re.IGNORECASE ) ): return_level = "fatal" - except Exception as e: + except Exception, e: log.error( "Exception in parse_error_level " + str(sys.exc_info() ) ) trace = sys.exc_info()[2] @@ -2323,9 +2337,12 @@ # TODO: currently only supporting requirements of type package, # need to implement some mechanism for mapping other types # back to packages - log.debug( "Dependency %s", requirement.name ) + log.debug( "Building dependency shell command for dependency '%s'", requirement.name ) if requirement.type == 'package': - script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( requirement.name, requirement.version ) + script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name, + version=requirement.version, + type=requirement.type, + installed_tool_dependencies=self.installed_tool_dependencies ) if script_file is None and base_path is None: log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name ) elif script_file is None: diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/tools/deps/__init__.py --- a/lib/galaxy/tools/deps/__init__.py +++ b/lib/galaxy/tools/deps/__init__.py @@ -30,7 +30,7 @@ if not os.path.isdir( base_path ): log.warn( "Path '%s' is not directory, ignoring", base_path ) self.base_paths.append( os.path.abspath( base_path ) ) - def find_dep( self, name, version=None ): + def find_dep( self, name, version=None, type='package', installed_tool_dependencies=None ): """ Attempt to find a dependency named `name` at version `version`. If version is None, return the "default" version as determined using a @@ -40,10 +40,24 @@ if version is None: return self._find_dep_default( name ) else: - return self._find_dep_versioned( name, version ) - def _find_dep_versioned( self, name, version ): + return self._find_dep_versioned( name, version, installed_tool_dependencies=installed_tool_dependencies ) + def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ): + installed_dependency = None + if installed_tool_dependencies: + for installed_dependency in installed_tool_dependencies: + if not installed_dependency.uninstalled: + if installed_dependency.name == name and installed_dependency.version == version and installed_dependency.type == type: + break for base_path in self.base_paths: - path = os.path.join( base_path, name, version ) + if installed_dependency: + tool_shed_repository = installed_dependency.tool_shed_repository + path = os.path.join( base_path, + name, version, + tool_shed_repository.owner, + tool_shed_repository.name, + installed_dependency.installed_changeset_revision ) + else: + path = os.path.join( base_path, name, version ) script = os.path.join( path, 'env.sh' ) if os.path.exists( script ): return script, path, version @@ -51,7 +65,7 @@ return None, path, version else: return None, None, None - def _find_dep_default( self, name ): + def _find_dep_default( self, name, type='package' ): version = None for base_path in self.base_paths: path = os.path.join( base_path, name, 'default' ) diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/util/shed_util.py --- a/lib/galaxy/util/shed_util.py +++ b/lib/galaxy/util/shed_util.py @@ -7,6 +7,7 @@ from galaxy.util.json import * from galaxy.tools.search import ToolBoxSearch from galaxy.tool_shed.tool_dependencies.install_util import install_package +from galaxy.tool_shed.encoding_util import * from galaxy.model.orm import * from galaxy import eggs @@ -310,20 +311,24 @@ tool_dicts=tool_dicts, converter_path=converter_path, display_path=display_path ) -def create_or_update_tool_shed_repository( app, name, description, changeset_revision, ctx_rev, repository_clone_url, metadata_dict, - owner='', dist_to_shed=False ): +def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, + current_changeset_revision=None, owner='', dist_to_shed=False ): # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed. - sa_session = app.model.context.current - tmp_url = clean_repository_clone_url( repository_clone_url ) - tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + if current_changeset_revision is None: + # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository + # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior to + # it being uninstalled. + current_changeset_revision = installed_changeset_revision + sa_session = app.model.context.current + tool_shed = get_tool_shed_from_clone_url( repository_clone_url ) if not owner: - owner = get_repository_owner( tmp_url ) + owner = get_repository_owner_from_clone_url( repository_clone_url ) includes_datatypes = 'datatypes' in metadata_dict - tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ) + tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ) if tool_shed_repository: tool_shed_repository.description = description - tool_shed_repository.changeset_revision = changeset_revision + tool_shed_repository.changeset_revision = current_changeset_revision tool_shed_repository.ctx_rev = ctx_rev tool_shed_repository.metadata = metadata_dict tool_shed_repository.includes_datatypes = includes_datatypes @@ -335,7 +340,7 @@ description=description, owner=owner, installed_changeset_revision=changeset_revision, - changeset_revision=changeset_revision, + changeset_revision=current_changeset_revision, ctx_rev=ctx_rev, metadata=metadata_dict, includes_datatypes=includes_datatypes, @@ -876,23 +881,16 @@ fh.close() return tmp_filename return None -def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): - sa_session = app.model.context.current - if tool_shed.find( '//' ) > 0: - tool_shed = tool_shed.split( '//' )[1] - tool_shed = tool_shed.rstrip( '/' ) - return sa_session.query( app.model.ToolShedRepository ) \ - .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.model.ToolShedRepository.table.c.name == name, - app.model.ToolShedRepository.table.c.owner == owner, - app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ - .first() def get_repository_owner( cleaned_repository_url ): items = cleaned_repository_url.split( 'repos' ) repo_path = items[ 1 ] if repo_path.startswith( '/' ): repo_path = repo_path.replace( '/', '', 1 ) return repo_path.lstrip( '/' ).split( '/' )[ 0 ] +def get_repository_owner_from_clone_url( repository_clone_url ): + tmp_url = clean_repository_clone_url( repository_clone_url ) + tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) + return get_repository_owner( tmp_url ) def get_repository_tools_tups( app, metadata_dict ): repository_tools_tups = [] if 'tools' in metadata_dict: @@ -988,6 +986,33 @@ relative_install_dir = os.path.join( tool_path, partial_install_dir ) return tool_path, relative_install_dir return None, None +def get_tool_shed_from_clone_url( repository_clone_url ): + tmp_url = clean_repository_clone_url( repository_clone_url ) + return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' ) +def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ): + # This method is used only in Galaxy, not the tool shed. + sa_session = app.model.context.current + if tool_shed.find( '//' ) > 0: + tool_shed = tool_shed.split( '//' )[1] + tool_shed = tool_shed.rstrip( '/' ) + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \ + .first() +def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ): + # This method is used only in Galaxy, not the tool shed. + sa_session = app.model.context.current + if tool_shed.find( '//' ) > 0: + tool_shed = tool_shed.split( '//' )[1] + tool_shed = tool_shed.rstrip( '/' ) + return sa_session.query( app.model.ToolShedRepository ) \ + .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed, + app.model.ToolShedRepository.table.c.name == name, + app.model.ToolShedRepository.table.c.owner == owner, + app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \ + .first() def get_tool_version( app, tool_id ): sa_session = app.model.context.current return sa_session.query( app.model.ToolVersion ) \ @@ -1000,6 +1025,24 @@ .filter( and_( app.model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id, app.model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \ .first() +def get_update_to_changeset_revision_and_ctx_rev( trans, repository ): + """Return the changeset revision hash to which the repository can be updated.""" + tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository ) + url = '%s/repository/get_changeset_revision_and_ctx_rev?name=%s&owner=%s&changeset_revision=%s&no_reset=true' % \ + ( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision ) + try: + response = urllib2.urlopen( url ) + encoded_update_dict = response.read() + if encoded_update_dict: + update_dict = tool_shed_decode( encoded_update_dict ) + changeset_revision = update_dict[ 'changeset_revision' ] + ctx_rev = update_dict[ 'ctx_rev' ] + response.close() + except Exception, e: + log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) ) + changeset_revision = None + ctx_rev = None + return changeset_revision, ctx_rev def get_url_from_repository_tool_shed( app, repository ): """ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is @@ -1015,7 +1058,8 @@ def handle_missing_data_table_entry( app, repository, changeset_revision, tool_path, repository_tools_tups, dir ): """ Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the - tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed. + tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed + or reinstalled. """ missing_data_table_entry = False for index, repository_tools_tup in enumerate( repository_tools_tups ): @@ -1079,10 +1123,10 @@ return error, message def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ): """ - Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can refer to installation - methods in Galaxy's tool_dependencies module or to proprietary fabric scripts contained in the repository. Future enhancements - to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies will be - installed in: + Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation + methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported. + Future enhancements to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies + will be installed in: ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repository_owner>/<repository_name>/<installed_changeset_revision> """ status = 'ok' @@ -1160,8 +1204,9 @@ def load_installed_display_applications( installed_repository_dict, deactivate=False ): # Load or deactivate proprietary datatype display applications app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate ) -def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url, - relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, install_tool_dependencies=False ): +def load_repository_contents( trans, repository_name, description, owner, installed_changeset_revision, current_changeset_revision, ctx_rev, + tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None, + install_tool_dependencies=False ): """ Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed) when an admin is installing a new repository or reinstalling an uninstalled repository. @@ -1174,10 +1219,12 @@ tool_shed_repository = create_or_update_tool_shed_repository( trans.app, repository_name, description, - changeset_revision, + installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict, + current_changeset_revision=current_changeset_revision, + owner='', dist_to_shed=False ) if 'tools' in metadata_dict: tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section ) @@ -1187,7 +1234,7 @@ work_dir = make_tmp_directory() repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_shed_repository, - changeset_revision, + current_changeset_revision, tool_path, repository_tools_tups, work_dir ) @@ -1201,12 +1248,14 @@ tool_dependencies_config = get_config_from_repository( trans.app, 'tool_dependencies.xml', tool_shed_repository, - changeset_revision, + current_changeset_revision, work_dir ) - # Install dependencies for repository tools. + # Install dependencies for repository tools. The tool_dependency.installed_changeset_revision value will be the value of + # tool_shed_repository.changeset_revision (this method's current_changeset_revision). This approach will allow for different + # versions of the same tool_dependency to be installed for associated versions of tools included in the installed repository. status, message = handle_tool_dependencies( app=trans.app, tool_shed_repository=tool_shed_repository, - installed_changeset_revision=changeset_revision, + installed_changeset_revision=current_changeset_revision, tool_dependencies_config=tool_dependencies_config ) if status != 'ok' and message: print 'The following error occurred from load_repository_contents while installing tool dependencies:' @@ -1214,7 +1263,7 @@ add_to_tool_panel( app=trans.app, repository_name=repository_name, repository_clone_url=repository_clone_url, - changeset_revision=changeset_revision, + changeset_revision=current_changeset_revision, repository_tools_tups=repository_tools_tups, owner=owner, shed_tool_conf=shed_tool_conf, @@ -1229,7 +1278,7 @@ datatypes_config = get_config_from_repository( trans.app, 'datatypes_conf.xml', tool_shed_repository, - changeset_revision, + current_changeset_revision, work_dir ) # Load data types required by tools. converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False ) @@ -1238,7 +1287,7 @@ repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed, name=repository_name, owner=owner, - installed_changeset_revision=changeset_revision, + installed_changeset_revision=installed_changeset_revision, tool_dicts=metadata_dict.get( 'tools', [] ), converter_path=converter_path, display_path=display_path ) diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/web/controllers/admin_toolshed.py --- a/lib/galaxy/web/controllers/admin_toolshed.py +++ b/lib/galaxy/web/controllers/admin_toolshed.py @@ -469,7 +469,8 @@ repository_name=name, description=description, owner=owner, - changeset_revision=changeset_revision, + installed_changeset_revision=changeset_revision, + current_changeset_revision=changeset_revision, ctx_rev=ctx_rev, tool_path=tool_path, repository_clone_url=repository_clone_url, @@ -606,7 +607,8 @@ def reinstall_repository( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) - repository = get_repository( trans, kwd[ 'id' ] ) + repository_id = kwd[ 'id' ] + repository = get_repository( trans, repository_id ) no_changes = kwd.get( 'no_changes', '' ) no_changes_checked = CheckboxField.is_checked( no_changes ) install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) ) @@ -621,6 +623,12 @@ else: ctx_rev = repository.ctx_rev clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev ) + # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated. + current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, repository ) + if current_ctx_rev != ctx_rev: + repo = hg.repository( get_configured_ui(), path=os.path.abspath( relative_install_dir ) ) + pull_repository( repo, repository_clone_url, current_changeset_revision ) + update_repository( repo, ctx_rev=current_ctx_rev ) tool_section = None if repository.includes_tools: # Get the location in the tool panel in which each tool was originally loaded. @@ -681,7 +689,8 @@ repository_name=repository.name, description=repository.description, owner=repository.owner, - changeset_revision=repository.installed_changeset_revision, + installed_changeset_revision=repository.installed_changeset_revision, + current_changeset_revision=current_changeset_revision, ctx_rev=ctx_rev, tool_path=tool_path, repository_clone_url=repository_clone_url, @@ -691,6 +700,7 @@ shed_tool_conf=shed_tool_conf, install_tool_dependencies=install_tool_dependencies ) if error_message: + # We'll only have an error_message if there was a problem installing tool dependencies. message += error_message status = 'error' repository.uninstalled = False @@ -787,7 +797,7 @@ changeset_revision = params.get( 'changeset_revision', None ) latest_changeset_revision = params.get( 'latest_changeset_revision', None ) latest_ctx_rev = params.get( 'latest_ctx_rev', None ) - repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) + repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision ) if changeset_revision and latest_changeset_revision and latest_ctx_rev: if changeset_revision == latest_changeset_revision: message = "The installed repository named '%s' is current, there are no updates available. " % name diff -r b762062399b3cab0a4139dc3fcd33f30945e49ac -r b2eabe39a70f676b8cb3b90a656501804547fd87 lib/galaxy/webapps/community/controllers/repository.py --- a/lib/galaxy/webapps/community/controllers/repository.py +++ b/lib/galaxy/webapps/community/controllers/repository.py @@ -648,6 +648,10 @@ elif not update_to_changeset_hash and changeset_hash == changeset_revision: # We've found the changeset in the changelog for which we need to get the next update. update_to_changeset_hash = changeset_hash + if from_update_manager: + if latest_changeset_revision == changeset_revision: + return no_update + return update url += str( latest_changeset_revision ) url += '&latest_ctx_rev=%s' % str( update_to_ctx.rev() ) return trans.response.send_redirect( url ) @@ -1111,6 +1115,57 @@ return to_json_string( tool_version_dicts ) return '' @web.expose + def get_changeset_revision_and_ctx_rev( self, trans, **kwd ): + """Handle a request from a local Galaxy instance to retrieve the changeset revision hash to which an installed repository can be updated.""" + params = util.Params( kwd ) + message = util.restore_text( params.get( 'message', '' ) ) + status = params.get( 'status', 'done' ) + galaxy_url = kwd.get( 'galaxy_url', '' ) + name = params.get( 'name', None ) + owner = params.get( 'owner', None ) + changeset_revision = params.get( 'changeset_revision', None ) + repository = get_repository_by_name_and_owner( trans, name, owner ) + repo_dir = repository.repo_path + repo = hg.repository( get_configured_ui(), repo_dir ) + # Default to the received changeset revision and ctx_rev. + update_to_ctx = get_changectx_for_changeset( repo, changeset_revision ) + latest_changeset_revision = changeset_revision + update_dict = dict( changeset_revision=update_to_ctx, ctx_rev=str( update_to_ctx.rev() ) ) + if changeset_revision == repository.tip: + # If changeset_revision is the repository tip, there are no additional updates. + return tool_shed_encode( update_dict ) + else: + repository_metadata = get_repository_metadata_by_changeset_revision( trans, + trans.security.encode_id( repository.id ), + changeset_revision ) + if repository_metadata: + # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates. + return tool_shed_encode( update_dict ) + else: + # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the + # repository was installed. We need to find the changeset_revision to which we need to update. + update_to_changeset_hash = None + for changeset in repo.changelog: + changeset_hash = str( repo.changectx( changeset ) ) + ctx = get_changectx_for_changeset( repo, changeset_hash ) + if update_to_changeset_hash: + if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ): + # We found a RepositoryMetadata record. + if changeset_hash == repository.tip: + # The current ctx is the repository tip, so use it. + update_to_ctx = get_changectx_for_changeset( repo, changeset_hash ) + latest_changeset_revision = changeset_hash + else: + update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash ) + latest_changeset_revision = update_to_changeset_hash + break + elif not update_to_changeset_hash and changeset_hash == changeset_revision: + # We've found the changeset in the changelog for which we need to get the next update. + update_to_changeset_hash = changeset_hash + update_dict[ 'changeset_revision' ] = str( latest_changeset_revision ) + update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() ) + return tool_shed_encode( update_dict ) + @web.expose def help( self, trans, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.