1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/33fdaf7a0dcb/
Changeset: 33fdaf7a0dcb
User: greg
Date: 2014-06-19 04:08:00
Summary: Eliminate the use of the common_install_util module for Galaxy installs from the Tool Shed.
Affected #: 10 files
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -16,7 +16,6 @@
import tool_shed.repository_types.util as rt_util
from tool_shed.util import common_util
-from tool_shed.util import common_install_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import encoding_util
@@ -49,7 +48,7 @@
repository_id = kwd[ 'id' ]
repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
try:
- common_install_util.activate_repository( trans.app, repository )
+ trans.app.installed_repository_manager.activate_repository( repository )
except Exception, e:
error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.exception( error_message )
@@ -1051,7 +1050,7 @@
includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
- irm = install_manager.InstallRepositoryManager( trans.app )
+ install_repository_manager = install_manager.InstallRepositoryManager( trans.app )
if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \
( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ):
if updating:
@@ -1080,7 +1079,7 @@
tool_path=tool_path,
tool_shed_url=tool_shed_url )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
- irm.handle_tool_shed_repositories( installation_dict, using_api=False )
+ install_repository_manager.handle_tool_shed_repositories( installation_dict, using_api=False )
if created_or_updated_tool_shed_repositories:
installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
filtered_repo_info_dicts=filtered_repo_info_dicts,
@@ -1099,7 +1098,7 @@
tool_path=tool_path,
tool_shed_url=tool_shed_url )
encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
- irm.initiate_repository_installation( installation_dict )
+ install_repository_manager.initiate_repository_installation( installation_dict )
return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
encoded_kwd=encoded_kwd,
query=query,
@@ -1119,11 +1118,11 @@
# If we're installing or updating a single repository, see if it contains a readme or
# dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
- dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
- tool_shed_url,
- repo_info_dict,
- includes_tool_dependencies,
- updating=updating )
+ dependencies_for_repository_dict = \
+ trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url,
+ repo_info_dict,
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -1151,23 +1150,24 @@
# defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined
# dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate.
containers_dict = \
- irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- updating=updating )
+ install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
else:
# We're installing a list of repositories, each of which may have tool dependencies or repository dependencies.
containers_dicts = []
+ installed_repository_manager = trans.app.installed_repository_manager
for repo_info_dict in repo_info_dicts:
- dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
- tool_shed_url,
- repo_info_dict,
- includes_tool_dependencies,
- updating=updating )
+ dependencies_for_repository_dict = \
+ trans.app.installed_repository_manager.get_dependencies_for_repository( tool_shed_url,
+ repo_info_dict,
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -1185,17 +1185,17 @@
name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
containers_dict = \
- irm.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=None,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- updating=updating )
+ install_repository_manager.populate_containers_dict_for_new_install( tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=None,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
containers_dicts.append( containers_dict )
# Merge all containers into a single container.
- containers_dict = irm.merge_containers_dicts_for_new_install( containers_dicts )
+ containers_dict = install_repository_manager.merge_containers_dicts_for_new_install( containers_dicts )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
if includes_tool_dependencies:
@@ -1592,11 +1592,11 @@
repository_metadata=None,
tool_dependencies=tool_dependencies,
repository_dependencies=repository_dependencies )
- dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
- tool_shed_url,
- repo_info_dict,
- includes_tool_dependencies,
- updating=False )
+ irm = trans.app.installed_repository_manager
+ dependencies_for_repository_dict = irm.get_dependencies_for_repository( tool_shed_url,
+ repo_info_dict,
+ includes_tool_dependencies,
+ updating=False )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -1,12 +1,18 @@
"""
-Class encapsulating the management of repositories installed from Galaxy tool sheds.
+Class encapsulating the management of repositories installed into Galaxy from the Tool Shed.
"""
+import copy
import logging
import os
+from galaxy import util
from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import repository_dependency_util
+from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
from tool_shed.util import xml_util
from galaxy.model.orm import and_
@@ -66,18 +72,68 @@
# Load defined dependency relationships for installed tool shed repositories and their contents.
self.load_dependency_relationships()
+ def activate_repository( self, repository ):
+ """Activate an installed tool shed repository that has been marked as deactivated."""
+ repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( self.app, repository )
+ repository.deleted = False
+ repository.status = self.install_model.ToolShedRepository.installation_status.INSTALLED
+ if repository.includes_tools_for_display_in_tool_panel:
+ metadata = repository.metadata
+ repository_tools_tups = suc.get_repository_tools_tups( self.app, metadata )
+ # Reload tools into the appropriate tool panel section.
+ tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
+ tool_util.add_to_tool_panel( self.app,
+ repository.name,
+ repository_clone_url,
+ repository.installed_changeset_revision,
+ repository_tools_tups,
+ repository.owner,
+ shed_tool_conf,
+ tool_panel_dict,
+ new_install=False )
+ if repository.includes_data_managers:
+ tp, data_manager_relative_install_dir = repository.get_tool_relative_path( self.app )
+ # Hack to add repository.name here, which is actually the root of the installed repository
+ data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name )
+ new_data_managers = data_manager_util.install_data_managers( self.app,
+ self.app.config.shed_data_manager_config_file,
+ metadata,
+ repository.get_shed_config_dict( self.app ),
+ data_manager_relative_install_dir,
+ repository,
+ repository_tools_tups )
+ self.install_model.context.add( repository )
+ self.install_model.context.flush()
+ if repository.includes_datatypes:
+ if tool_path:
+ repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) )
+ else:
+ repository_install_dir = os.path.abspath( relative_install_dir )
+ # Activate proprietary datatypes.
+ installed_repository_dict = datatype_util.load_installed_datatypes( self.app,
+ repository,
+ repository_install_dir,
+ deactivate=False )
+ if installed_repository_dict:
+ converter_path = installed_repository_dict.get( 'converter_path' )
+ if converter_path is not None:
+ datatype_util.load_installed_datatype_converters( self.app, installed_repository_dict, deactivate=False )
+ display_path = installed_repository_dict.get( 'display_path' )
+ if display_path is not None:
+ datatype_util.load_installed_display_applications( self.app, installed_repository_dict, deactivate=False )
+
def add_entry_to_installed_repository_dependencies_of_installed_repositories( self, repository ):
"""
Add an entry to self.installed_repository_dependencies_of_installed_repositories. A side-effect of this method
is the population of self.installed_dependent_repositories_of_installed_repositories. Since this method discovers
all repositories required by the received repository, it can use the list to add entries to the reverse dictionary.
"""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
tool_shed, name, owner, installed_changeset_revision = repository_tup
# Get the list of repository dependencies for this repository.
status = self.install_model.ToolShedRepository.installation_status.INSTALLED
- repository_dependency_tups = \
- repository_dependency_util.get_repository_dependency_tups_for_installed_repository( self.app, repository, status=status )
+ repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=status )
# Add an entry to self.installed_repository_dependencies_of_installed_repositories.
if repository_tup not in self.installed_repository_dependencies_of_installed_repositories:
debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
@@ -97,21 +153,21 @@
def add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
"""Add an entry to self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."""
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
if tool_dependency_tup not in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies:
tool_shed_repository_id, name, version, type = tool_dependency_tup
debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name )
debug_msg += "to installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."
log.debug( debug_msg )
status = self.install_model.ToolDependency.installation_status.INSTALLED
- installed_runtime_dependent_tool_dependency_tups = \
- tool_dependency_util.get_runtime_dependent_tool_dependency_tuples( self.app, tool_dependency, status=status )
+ installed_runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency,
+ status=status )
self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \
installed_runtime_dependent_tool_dependency_tups
def add_entry_to_installed_tool_dependencies_of_installed_repositories( self, repository ):
"""Add an entry to self.installed_tool_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup not in self.installed_tool_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
@@ -120,38 +176,37 @@
installed_tool_dependency_tups = []
for tool_dependency in repository.tool_dependencies:
if tool_dependency.status == self.app.install_model.ToolDependency.installation_status.INSTALLED:
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
installed_tool_dependency_tups.append( tool_dependency_tup )
self.installed_tool_dependencies_of_installed_repositories[ repository_tup ] = installed_tool_dependency_tups
def add_entry_to_repository_dependencies_of_installed_repositories( self, repository ):
"""Add an entry to self.repository_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup not in self.repository_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
debug_msg += "to repository_dependencies_of_installed_repositories."
log.debug( debug_msg )
- repository_dependency_tups = \
- repository_dependency_util.get_repository_dependency_tups_for_installed_repository( self.app, repository, status=None )
+ repository_dependency_tups = self.get_repository_dependency_tups_for_installed_repository( repository, status=None )
self.repository_dependencies_of_installed_repositories[ repository_tup ] = repository_dependency_tups
def add_entry_to_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
"""Add an entry to self.runtime_tool_dependencies_of_installed_tool_dependencies."""
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
if tool_dependency_tup not in self.runtime_tool_dependencies_of_installed_tool_dependencies:
tool_shed_repository_id, name, version, type = tool_dependency_tup
debug_msg = "Adding an entry for version %s of %s %s " % ( version, type, name )
debug_msg += "to runtime_tool_dependencies_of_installed_tool_dependencies."
log.debug( debug_msg )
- runtime_dependent_tool_dependency_tups = \
- tool_dependency_util.get_runtime_dependent_tool_dependency_tuples( self.app, tool_dependency, status=None )
+ runtime_dependent_tool_dependency_tups = self.get_runtime_dependent_tool_dependency_tuples( tool_dependency,
+ status=None )
self.runtime_tool_dependencies_of_installed_tool_dependencies[ tool_dependency_tup ] = \
runtime_dependent_tool_dependency_tups
def add_entry_to_tool_dependencies_of_installed_repositories( self, repository ):
"""Add an entry to self.tool_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup not in self.tool_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Adding an entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
@@ -159,14 +214,357 @@
log.debug( debug_msg )
tool_dependency_tups = []
for tool_dependency in repository.tool_dependencies:
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
tool_dependency_tups.append( tool_dependency_tup )
self.tool_dependencies_of_installed_repositories[ repository_tup ] = tool_dependency_tups
def get_containing_repository_for_tool_dependency( self, tool_dependency_tup ):
tool_shed_repository_id, name, version, type = tool_dependency_tup
return self.app.install_model.context.query( self.app.install_model.ToolShedRepository ).get( tool_shed_repository_id )
-
+
+ def get_dependencies_for_repository( self, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ):
+ """
+ Return dictionaries containing the sets of installed and missing tool dependencies and repository
+ dependencies associated with the repository defined by the received repo_info_dict.
+ """
+ repository = None
+ installed_rd = {}
+ installed_td = {}
+ missing_rd = {}
+ missing_td = {}
+ name = repo_info_dict.keys()[ 0 ]
+ repo_info_tuple = repo_info_dict[ name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if tool_dependencies:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.
+ # We don't add to installed_td and missing_td here because at this point they are empty.
+ installed_td, missing_td = self.get_installed_and_missing_tool_dependencies_for_repository( tool_dependencies )
+ # In cases where a repository dependency is required only for compiling a dependent repository's
+ # tool dependency, the value of repository_dependencies will be an empty dictionary here.
+ if repository_dependencies:
+ # We have a repository with one or more defined repository dependencies.
+ if not repository:
+ repository = suc.get_repository_for_dependency_relationship( self.app,
+ tool_shed_url,
+ name,
+ repository_owner,
+ changeset_revision )
+ if not updating and repository and repository.metadata:
+ installed_rd, missing_rd = self.get_installed_and_missing_repository_dependencies( repository )
+ else:
+ installed_rd, missing_rd = \
+ self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install( repo_info_tuple )
+ # Discover all repository dependencies and retrieve information for installing them.
+ all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app,
+ tool_shed_url,
+ util.listify( repo_info_dict ) )
+ has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+ has_repository_dependencies_only_if_compiling_contained_td = \
+ all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
+ includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
+ includes_tools = all_repo_info_dict.get( 'includes_tools', False )
+ required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
+ # Display tool dependencies defined for each of the repository dependencies.
+ if required_repo_info_dicts:
+ required_tool_dependencies = {}
+ for rid in required_repo_info_dicts:
+ for name, repo_info_tuple in rid.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, \
+ repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if rid_tool_dependencies:
+ for td_key, td_dict in rid_tool_dependencies.items():
+ if td_key not in required_tool_dependencies:
+ required_tool_dependencies[ td_key ] = td_dict
+ if required_tool_dependencies:
+ # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
+ required_installed_td, required_missing_td = \
+ self.get_installed_and_missing_tool_dependencies_for_repository( required_tool_dependencies )
+ if required_installed_td:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_installed_td.items():
+ if td_key not in installed_td:
+ installed_td[ td_key ] = td_dict
+ if required_missing_td:
+ if not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ for td_key, td_dict in required_missing_td.items():
+ if td_key not in missing_td:
+ missing_td[ td_key ] = td_dict
+ else:
+ # We have a single repository with (possibly) no defined repository dependencies.
+ all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app,
+ tool_shed_url,
+ util.listify( repo_info_dict ) )
+ has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+ has_repository_dependencies_only_if_compiling_contained_td = \
+ all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
+ includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
+ includes_tools = all_repo_info_dict.get( 'includes_tools', False )
+ required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
+ dependencies_for_repository_dict = \
+ dict( changeset_revision=changeset_revision,
+ has_repository_dependencies=has_repository_dependencies,
+ has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ installed_repository_dependencies=installed_rd,
+ installed_tool_dependencies=installed_td,
+ missing_repository_dependencies=missing_rd,
+ missing_tool_dependencies=missing_td,
+ name=name,
+ repository_owner=repository_owner )
+ return dependencies_for_repository_dict
+
+ def get_installed_and_missing_repository_dependencies( self, repository ):
+ """
+ Return the installed and missing repository dependencies for a tool shed repository that has a record
+ in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are
+ associated with the repository in the database. Do not include a repository dependency if it is required
+ only to compile a tool dependency defined for the dependent repository since these special kinds of repository
+ dependencies are really a dependency of the dependent repository's contained tool dependency, and only
+ if that tool dependency requires compilation.
+ """
+ missing_repository_dependencies = {}
+ installed_repository_dependencies = {}
+ has_repository_dependencies = repository.has_repository_dependencies
+ if has_repository_dependencies:
+ # The repository dependencies container will include only the immediate repository
+ # dependencies of this repository, so the container will be only a single level in depth.
+ metadata = repository.metadata
+ installed_rd_tups = []
+ missing_rd_tups = []
+ for tsr in repository.repository_dependencies:
+ prior_installation_required = suc.set_prior_installation_required( self.app, repository, tsr )
+ only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr )
+ rd_tup = [ tsr.tool_shed,
+ tsr.name,
+ tsr.owner,
+ tsr.changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ tsr.id,
+ tsr.status ]
+ if tsr.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED:
+ installed_rd_tups.append( rd_tup )
+ else:
+ # We'll only add the rd_tup to the missing_rd_tups list if the received repository
+ # has tool dependencies that are not correctly installed. This may prove to be a
+ # weak check since the repository in question may not have anything to do with
+ # compiling the missing tool dependencies. If we discover that this is a problem,
+ # more granular checking will be necessary here.
+ if repository.missing_tool_dependencies:
+ if not self.repository_dependency_needed_only_for_compiling_tool_dependency( repository, tsr ):
+ missing_rd_tups.append( rd_tup )
+ else:
+ missing_rd_tups.append( rd_tup )
+ if installed_rd_tups or missing_rd_tups:
+ # Get the description from the metadata in case it has a value.
+ repository_dependencies = metadata.get( 'repository_dependencies', {} )
+ description = repository_dependencies.get( 'description', None )
+ # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the
+ # missing_repository_dependencies dictionaries for proper display parsing.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td )
+ if installed_rd_tups:
+ installed_repository_dependencies[ 'root_key' ] = root_key
+ installed_repository_dependencies[ root_key ] = installed_rd_tups
+ installed_repository_dependencies[ 'description' ] = description
+ if missing_rd_tups:
+ missing_repository_dependencies[ 'root_key' ] = root_key
+ missing_repository_dependencies[ root_key ] = missing_rd_tups
+ missing_repository_dependencies[ 'description' ] = description
+ return installed_repository_dependencies, missing_repository_dependencies
+
+ def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( self, repo_info_tuple ):
+ """
+ Parse the received repository_dependencies dictionary that is associated with a repository being
+ installed into Galaxy for the first time and attempt to determine repository dependencies that are
+ already installed and those that are not.
+ """
+ missing_repository_dependencies = {}
+ installed_repository_dependencies = {}
+ missing_rd_tups = []
+ installed_rd_tups = []
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if repository_dependencies:
+ description = repository_dependencies[ 'description' ]
+ root_key = repository_dependencies[ 'root_key' ]
+ # The repository dependencies container will include only the immediate repository dependencies of
+ # this repository, so the container will be only a single level in depth.
+ for key, rd_tups in repository_dependencies.items():
+ if key in [ 'description', 'root_key' ]:
+ continue
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ # Updates to installed repository revisions may have occurred, so make sure to locate the
+ # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple
+ # that includes the correct repository owner which we get from the current rd_tup. The current
+ # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner,
+ # repository_dependencies, installed_td )
+ tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( self.app, rd_tup )
+ tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
+ repository, installed_changeset_revision = suc.repository_was_previously_installed( self.app,
+ tool_shed,
+ name,
+ tmp_repo_info_tuple,
+ from_tip=False )
+ if repository:
+ new_rd_tup = [ tool_shed,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ repository.id,
+ repository.status ]
+ if repository.status == self.install_model.ToolShedRepository.installation_status.INSTALLED:
+ if new_rd_tup not in installed_rd_tups:
+ installed_rd_tups.append( new_rd_tup )
+ else:
+ # A repository dependency that is not installed will not be considered missing if its value
+ # for only_if_compiling_contained_td is True This is because this type of repository dependency
+ # will only be considered at the time that the specified tool dependency is being installed, and
+ # even then only if the compiled binary of the tool dependency could not be installed due to the
+ # unsupported installation environment.
+ if not util.asbool( only_if_compiling_contained_td ):
+ if new_rd_tup not in missing_rd_tups:
+ missing_rd_tups.append( new_rd_tup )
+ else:
+ new_rd_tup = [ tool_shed,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ None,
+ 'Never installed' ]
+ if not util.asbool( only_if_compiling_contained_td ):
+ # A repository dependency that is not installed will not be considered missing if its value for
+ # only_if_compiling_contained_td is True - see above...
+ if new_rd_tup not in missing_rd_tups:
+ missing_rd_tups.append( new_rd_tup )
+ if installed_rd_tups:
+ installed_repository_dependencies[ 'root_key' ] = root_key
+ installed_repository_dependencies[ root_key ] = installed_rd_tups
+ installed_repository_dependencies[ 'description' ] = description
+ if missing_rd_tups:
+ missing_repository_dependencies[ 'root_key' ] = root_key
+ missing_repository_dependencies[ root_key ] = missing_rd_tups
+ missing_repository_dependencies[ 'description' ] = description
+ return installed_repository_dependencies, missing_repository_dependencies
+
+ def get_installed_and_missing_tool_dependencies_for_repository( self, tool_dependencies_dict ):
+ """
+ Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
+ being installed into Galaxy.
+ """
+ # FIXME: This implementation breaks when updates to a repository contain dependencies that result in
+ # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories
+ # define the same dependency internally (not using the complex repository dependency definition to a separate
+ # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency
+ # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries
+ # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled.
+ installed_tool_dependencies = {}
+ missing_tool_dependencies = {}
+ if tool_dependencies_dict:
+ # Make sure not to change anything in the received tool_dependencies_dict as that would be a bad side-effect!
+ tmp_tool_dependencies_dict = copy.deepcopy( tool_dependencies_dict )
+ for td_key, val in tmp_tool_dependencies_dict.items():
+ # Default the status to NEVER_INSTALLED.
+ tool_dependency_status = self.install_model.ToolDependency.installation_status.NEVER_INSTALLED
+ # Set environment tool dependencies are a list.
+ if td_key == 'set_environment':
+ new_val = []
+ for requirement_dict in val:
+ # {'repository_name': 'xx',
+ # 'name': 'bwa',
+ # 'version': '0.5.9',
+ # 'repository_owner': 'yy',
+ # 'changeset_revision': 'zz',
+ # 'type': 'package'}
+ tool_dependency = \
+ tool_dependency_util.get_tool_dependency_by_name_version_type( self.app,
+ requirement_dict.get( 'name', None ),
+ requirement_dict.get( 'version', None ),
+ requirement_dict.get( 'type', 'package' ) )
+ if tool_dependency:
+ tool_dependency_status = tool_dependency.status
+ requirement_dict[ 'status' ] = tool_dependency_status
+ new_val.append( requirement_dict )
+ if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]:
+ if td_key in installed_tool_dependencies:
+ installed_tool_dependencies[ td_key ].extend( new_val )
+ else:
+ installed_tool_dependencies[ td_key ] = new_val
+ else:
+ if td_key in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ].extend( new_val )
+ else:
+ missing_tool_dependencies[ td_key ] = new_val
+ else:
+ # The val dictionary looks something like this:
+ # {'repository_name': 'xx',
+ # 'name': 'bwa',
+ # 'version': '0.5.9',
+ # 'repository_owner': 'yy',
+ # 'changeset_revision': 'zz',
+ # 'type': 'package'}
+ tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( self.app,
+ val.get( 'name', None ),
+ val.get( 'version', None ),
+ val.get( 'type', 'package' ) )
+ if tool_dependency:
+ tool_dependency_status = tool_dependency.status
+ val[ 'status' ] = tool_dependency_status
+ if tool_dependency_status in [ self.install_model.ToolDependency.installation_status.INSTALLED ]:
+ installed_tool_dependencies[ td_key ] = val
+ else:
+ missing_tool_dependencies[ td_key ] = val
+ return installed_tool_dependencies, missing_tool_dependencies
+
+ def get_repository_dependency_tups_for_installed_repository( self, repository, dependency_tups=None, status=None ):
+ """
+ Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the
+ received repository. The returned list defines the entire repository dependency tree. This method is called
+ only from Galaxy.
+ """
+ if dependency_tups is None:
+ dependency_tups = []
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+ for rrda in repository.required_repositories:
+ repository_dependency = rrda.repository_dependency
+ required_repository = repository_dependency.repository
+ if status is None or required_repository.status == status:
+ required_repository_tup = self.get_repository_tuple_for_installed_repository_manager( required_repository )
+ if required_repository_tup == repository_tup:
+ # We have a circular repository dependency relationship, skip this entry.
+ continue
+ if required_repository_tup not in dependency_tups:
+ dependency_tups.append( required_repository_tup )
+ return get_repository_dependency_tups_for_installed_repository( required_repository,
+ dependency_tups=dependency_tups )
+ return dependency_tups
+
+ def get_repository_tuple_for_installed_repository_manager( self, repository ):
+ return ( str( repository.tool_shed ),
+ str( repository.name ),
+ str( repository.owner ),
+ str( repository.installed_changeset_revision ) )
+
def get_repository_install_dir( self, tool_shed_repository ):
for tool_config in self.tool_configs:
tree, error_message = xml_util.parse_xml( tool_config )
@@ -186,6 +584,38 @@
return relative_path
return None
+ def get_runtime_dependent_tool_dependency_tuples( self, tool_dependency, status=None ):
+ """
+ Return the list of tool dependency objects that require the received tool dependency at run time. The returned
+ list will be filtered by the received status if it is not None. This method is called only from Galaxy.
+ """
+ runtime_dependent_tool_dependency_tups = []
+ required_env_shell_file_path = tool_dependency.get_env_shell_file_path( self.app )
+ if required_env_shell_file_path:
+ required_env_shell_file_path = os.path.abspath( required_env_shell_file_path )
+ if required_env_shell_file_path is not None:
+ for td in self.app.install_model.context.query( self.app.install_model.ToolDependency ):
+ if status is None or td.status == status:
+ env_shell_file_path = td.get_env_shell_file_path( self.app )
+ if env_shell_file_path is not None:
+ try:
+ contents = open( env_shell_file_path, 'r' ).read()
+ except Exception, e:
+ contents = None
+ log.debug( 'Error reading file %s, so cannot determine if package %s requires package %s at run time: %s' % \
+ ( str( env_shell_file_path ), str( td.name ), str( tool_dependency.name ), str( e ) ) )
+ if contents is not None and contents.find( required_env_shell_file_path ) >= 0:
+ td_tuple = get_tool_dependency_tuple_for_installed_repository_manager( td )
+ runtime_dependent_tool_dependency_tups.append( td_tuple )
+ return runtime_dependent_tool_dependency_tups
+
+ def get_tool_dependency_tuple_for_installed_repository_manager( self, tool_dependency ):
+ if tool_dependency.type is None:
+ type = None
+ else:
+ type = str( tool_dependency.type )
+ return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type )
+
def handle_repository_install( self, repository ):
"""Load the dependency relationships for a repository that was just installed or reinstalled."""
# Populate self.repository_dependencies_of_installed_repositories.
@@ -205,7 +635,7 @@
def handle_repository_uninstall( self, repository ):
"""Remove the dependency relationships for a repository that was just uninstalled."""
for tool_dependency in repository.tool_dependencies:
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
# Remove this tool_dependency from all values in
# self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies
altered_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies = {}
@@ -241,8 +671,8 @@
# Populate self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies.
self.add_entry_to_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( tool_dependency )
# Populate self.installed_tool_dependencies_of_installed_repositories.
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
if repository_tup in self.installed_tool_dependencies_of_installed_repositories:
self.installed_tool_dependencies_of_installed_repositories[ repository_tup ].append( tool_dependency_tup )
else:
@@ -406,7 +836,7 @@
is removal of appropriate value items from self.installed_dependent_repositories_of_installed_repositories.
"""
# Remove tuples defining this repository from value lists in self.installed_dependent_repositories_of_installed_repositories.
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
tool_shed, name, owner, installed_changeset_revision = repository_tup
altered_installed_dependent_repositories_of_installed_repositories = {}
for r_tup, v_tups in self.installed_dependent_repositories_of_installed_repositories.items():
@@ -431,7 +861,7 @@
def remove_entry_from_installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
"""Remove an entry from self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies."""
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
if tool_dependency_tup in self.installed_runtime_dependent_tool_dependencies_of_installed_tool_dependencies:
tool_shed_repository_id, name, version, type = tool_dependency_tup
debug_msg = "Removing entry for version %s of %s %s " % ( version, type, name )
@@ -441,7 +871,7 @@
def remove_entry_from_installed_tool_dependencies_of_installed_repositories( self, repository ):
"""Remove an entry from self.installed_tool_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup in self.installed_tool_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
@@ -451,7 +881,7 @@
def remove_entry_from_repository_dependencies_of_installed_repositories( self, repository ):
"""Remove an entry from self.repository_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup in self.repository_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Removing entry for revision %s of repository %s owned by %s " % ( installed_changeset_revision, name, owner )
@@ -461,7 +891,7 @@
def remove_entry_from_runtime_tool_dependencies_of_installed_tool_dependencies( self, tool_dependency ):
"""Remove an entry from self.runtime_tool_dependencies_of_installed_tool_dependencies."""
- tool_dependency_tup = tool_dependency_util.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
+ tool_dependency_tup = self.get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency )
if tool_dependency_tup in self.runtime_tool_dependencies_of_installed_tool_dependencies:
tool_shed_repository_id, name, version, type = tool_dependency_tup
debug_msg = "Removing entry for version %s of %s %s from runtime_tool_dependencies_of_installed_tool_dependencies." % \
@@ -471,10 +901,26 @@
def remove_entry_from_tool_dependencies_of_installed_repositories( self, repository ):
"""Remove an entry from self.tool_dependencies_of_installed_repositories."""
- repository_tup = repository_dependency_util.get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = self.get_repository_tuple_for_installed_repository_manager( repository )
if repository_tup in self.tool_dependencies_of_installed_repositories:
tool_shed, name, owner, installed_changeset_revision = repository_tup
debug_msg = "Removing entry for revision %s of repository %s owned by %s from tool_dependencies_of_installed_repositories." % \
( installed_changeset_revision, name, owner )
log.debug( debug_msg )
del self.tool_dependencies_of_installed_repositories[ repository_tup ]
+
+ def repository_dependency_needed_only_for_compiling_tool_dependency( self, repository, repository_dependency ):
+ for rd_tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
+ # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which
+ # case we'll need to contact the tool shed to get the list of all possible changeset_revisions.
+ cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed )
+ cleaned_repository_dependency_tool_shed = \
+ common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) )
+ if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \
+ repository_dependency.name == name and \
+ repository_dependency.owner == owner and \
+ ( repository_dependency.installed_changeset_revision == changeset_revision or \
+ repository_dependency.changeset_revision == changeset_revision ):
+ return True
+ return False
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/galaxy_install/repair_repository_manager.py
--- a/lib/tool_shed/galaxy_install/repair_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -5,7 +5,6 @@
from tool_shed.galaxy_install import install_manager
-from tool_shed.util import common_install_util
from tool_shed.util import common_util
from tool_shed.util import container_util
from tool_shed.util import shed_util_common as suc
@@ -148,7 +147,7 @@
repair_dict = {}
if repository.status in [ self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
try:
- common_install_util.activate_repository( self.app, repository )
+ self.app.installed_repository_manager.activate_repository( repository )
except Exception, e:
error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
log.debug( error_message )
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ /dev/null
@@ -1,499 +0,0 @@
-import copy
-import json
-import logging
-import os
-import urllib
-import urllib2
-from galaxy import util
-from galaxy import web
-import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_util
-from tool_shed.util import container_util
-from tool_shed.util import encoding_util
-from tool_shed.util import data_manager_util
-from tool_shed.util import datatype_util
-from tool_shed.util import tool_dependency_util
-from tool_shed.util import tool_util
-
-log = logging.getLogger( __name__ )
-
-def activate_repository( app, repository ):
- """Activate an installed tool shed repository that has been marked as deactivated."""
- install_model = app.install_model
- repository_clone_url = common_util.generate_clone_url_for_installed_repository( app, repository )
- shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( app, repository )
- repository.deleted = False
- repository.status = install_model.ToolShedRepository.installation_status.INSTALLED
- if repository.includes_tools_for_display_in_tool_panel:
- metadata = repository.metadata
- repository_tools_tups = suc.get_repository_tools_tups( app, metadata )
- # Reload tools into the appropriate tool panel section.
- tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
- tool_util.add_to_tool_panel( app,
- repository.name,
- repository_clone_url,
- repository.installed_changeset_revision,
- repository_tools_tups,
- repository.owner,
- shed_tool_conf,
- tool_panel_dict,
- new_install=False )
- if repository.includes_data_managers:
- tp, data_manager_relative_install_dir = repository.get_tool_relative_path( app )
- # Hack to add repository.name here, which is actually the root of the installed repository
- data_manager_relative_install_dir = os.path.join( data_manager_relative_install_dir, repository.name )
- new_data_managers = data_manager_util.install_data_managers( app,
- app.config.shed_data_manager_config_file,
- metadata,
- repository.get_shed_config_dict( app ),
- data_manager_relative_install_dir,
- repository,
- repository_tools_tups )
- install_model.context.add( repository )
- install_model.context.flush()
- if repository.includes_datatypes:
- if tool_path:
- repository_install_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir ) )
- else:
- repository_install_dir = os.path.abspath( relative_install_dir )
- # Activate proprietary datatypes.
- installed_repository_dict = datatype_util.load_installed_datatypes( app, repository, repository_install_dir, deactivate=False )
- if installed_repository_dict:
- converter_path = installed_repository_dict.get( 'converter_path' )
- if converter_path is not None:
- datatype_util.load_installed_datatype_converters( app, installed_repository_dict, deactivate=False )
- display_path = installed_repository_dict.get( 'display_path' )
- if display_path is not None:
- datatype_util.load_installed_display_applications( app, installed_repository_dict, deactivate=False )
-
-def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ):
- """
- Return dictionaries containing the sets of installed and missing tool dependencies and repository
- dependencies associated with the repository defined by the received repo_info_dict.
- """
- repository = None
- installed_rd = {}
- installed_td = {}
- missing_rd = {}
- missing_td = {}
- name = repo_info_dict.keys()[ 0 ]
- repo_info_tuple = repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if tool_dependencies:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- # Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.
- # We don't add to installed_td and missing_td here because at this point they are empty.
- installed_td, missing_td = \
- get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies )
- # In cases where a repository dependency is required only for compiling a dependent repository's
- # tool dependency, the value of repository_dependencies will be an empty dictionary here.
- if repository_dependencies:
- # We have a repository with one or more defined repository dependencies.
- if not repository:
- repository = suc.get_repository_for_dependency_relationship( trans.app,
- tool_shed_url,
- name,
- repository_owner,
- changeset_revision )
- if not updating and repository and repository.metadata:
- installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
- else:
- installed_rd, missing_rd = \
- get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple )
- # Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dict = get_required_repo_info_dicts( trans.app, tool_shed_url, util.listify( repo_info_dict ) )
- has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
- has_repository_dependencies_only_if_compiling_contained_td = \
- all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
- includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
- includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
- includes_tools = all_repo_info_dict.get( 'includes_tools', False )
- required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
- # Display tool dependencies defined for each of the repository dependencies.
- if required_repo_info_dicts:
- required_tool_dependencies = {}
- for rid in required_repo_info_dicts:
- for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, \
- repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if rid_tool_dependencies:
- for td_key, td_dict in rid_tool_dependencies.items():
- if td_key not in required_tool_dependencies:
- required_tool_dependencies[ td_key ] = td_dict
- if required_tool_dependencies:
- # Discover and categorize all tool dependencies defined for this repository's repository dependencies.
- required_installed_td, required_missing_td = \
- get_installed_and_missing_tool_dependencies_for_repository( trans, required_tool_dependencies )
- if required_installed_td:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_installed_td.items():
- if td_key not in installed_td:
- installed_td[ td_key ] = td_dict
- if required_missing_td:
- if not includes_tool_dependencies:
- includes_tool_dependencies = True
- for td_key, td_dict in required_missing_td.items():
- if td_key not in missing_td:
- missing_td[ td_key ] = td_dict
- else:
- # We have a single repository with (possibly) no defined repository dependencies.
- all_repo_info_dict = get_required_repo_info_dicts( trans.app, tool_shed_url, util.listify( repo_info_dict ) )
- has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
- has_repository_dependencies_only_if_compiling_contained_td = \
- all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
- includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
- includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
- includes_tools = all_repo_info_dict.get( 'includes_tools', False )
- required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
- dependencies_for_repository_dict = \
- dict( changeset_revision=changeset_revision,
- has_repository_dependencies=has_repository_dependencies,
- has_repository_dependencies_only_if_compiling_contained_td=has_repository_dependencies_only_if_compiling_contained_td,
- includes_tool_dependencies=includes_tool_dependencies,
- includes_tools=includes_tools,
- includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
- installed_repository_dependencies=installed_rd,
- installed_tool_dependencies=installed_td,
- missing_repository_dependencies=missing_rd,
- missing_tool_dependencies=missing_td,
- name=name,
- repository_owner=repository_owner )
- return dependencies_for_repository_dict
-
-def get_installed_and_missing_repository_dependencies( trans, repository ):
- """
- Return the installed and missing repository dependencies for a tool shed repository that has a record
- in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are
- associated with the repository in the database. Do not include a repository dependency if it is required
- only to compile a tool dependency defined for the dependent repository since these special kinds of repository
- dependencies are really a dependency of the dependent repository's contained tool dependency, and only
- if that tool dependency requires compilation.
- """
- missing_repository_dependencies = {}
- installed_repository_dependencies = {}
- has_repository_dependencies = repository.has_repository_dependencies
- if has_repository_dependencies:
- # The repository dependencies container will include only the immediate repository dependencies of this repository, so the container
- # will be only a single level in depth.
- metadata = repository.metadata
- installed_rd_tups = []
- missing_rd_tups = []
- for tsr in repository.repository_dependencies:
- prior_installation_required = suc.set_prior_installation_required( trans.app, repository, tsr )
- only_if_compiling_contained_td = suc.set_only_if_compiling_contained_td( repository, tsr )
- rd_tup = [ tsr.tool_shed,
- tsr.name,
- tsr.owner,
- tsr.changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- tsr.id,
- tsr.status ]
- if tsr.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
- installed_rd_tups.append( rd_tup )
- else:
- # We'll only add the rd_tup to the missing_rd_tups list if the received repository has tool dependencies that are not
- # correctly installed. This may prove to be a weak check since the repository in question may not have anything to do
- # with compiling the missing tool dependencies. If we discover that this is a problem, more granular checking will be
- # necessary here.
- if repository.missing_tool_dependencies:
- if not repository_dependency_needed_only_for_compiling_tool_dependency( repository, tsr ):
- missing_rd_tups.append( rd_tup )
- else:
- missing_rd_tups.append( rd_tup )
- if installed_rd_tups or missing_rd_tups:
- # Get the description from the metadata in case it has a value.
- repository_dependencies = metadata.get( 'repository_dependencies', {} )
- description = repository_dependencies.get( 'description', None )
- # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the
- # missing_repository_dependencies dictionaries for proper display parsing.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td )
- if installed_rd_tups:
- installed_repository_dependencies[ 'root_key' ] = root_key
- installed_repository_dependencies[ root_key ] = installed_rd_tups
- installed_repository_dependencies[ 'description' ] = description
- if missing_rd_tups:
- missing_repository_dependencies[ 'root_key' ] = root_key
- missing_repository_dependencies[ root_key ] = missing_rd_tups
- missing_repository_dependencies[ 'description' ] = description
- return installed_repository_dependencies, missing_repository_dependencies
-
-def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple ):
- """
- Parse the received repository_dependencies dictionary that is associated with a repository being
- installed into Galaxy for the first time and attempt to determine repository dependencies that are
- already installed and those that are not.
- """
- missing_repository_dependencies = {}
- installed_repository_dependencies = {}
- missing_rd_tups = []
- installed_rd_tups = []
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if repository_dependencies:
- description = repository_dependencies[ 'description' ]
- root_key = repository_dependencies[ 'root_key' ]
- # The repository dependencies container will include only the immediate repository dependencies of
- # this repository, so the container will be only a single level in depth.
- for key, rd_tups in repository_dependencies.items():
- if key in [ 'description', 'root_key' ]:
- continue
- for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- # Updates to installed repository revisions may have occurred, so make sure to locate the
- # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple
- # that includes the correct repository owner which we get from the current rd_tup. The current
- # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner,
- # repository_dependencies, installed_td )
- tmp_clone_url = common_util.generate_clone_url_from_repo_info_tup( trans, rd_tup )
- tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
- repository, installed_changeset_revision = suc.repository_was_previously_installed( trans.app,
- tool_shed,
- name,
- tmp_repo_info_tuple,
- from_tip=False )
- if repository:
- new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- repository.id,
- repository.status ]
- if repository.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
- if new_rd_tup not in installed_rd_tups:
- installed_rd_tups.append( new_rd_tup )
- else:
- # A repository dependency that is not installed will not be considered missing if its value
- # for only_if_compiling_contained_td is True This is because this type of repository dependency
- # will only be considered at the time that the specified tool dependency is being installed, and
- # even then only if the compiled binary of the tool dependency could not be installed due to the
- # unsupported installation environment.
- if not util.asbool( only_if_compiling_contained_td ):
- if new_rd_tup not in missing_rd_tups:
- missing_rd_tups.append( new_rd_tup )
- else:
- new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- None,
- 'Never installed' ]
- if not util.asbool( only_if_compiling_contained_td ):
- # A repository dependency that is not installed will not be considered missing if its value for
- # only_if_compiling_contained_td is True - see above...
- if new_rd_tup not in missing_rd_tups:
- missing_rd_tups.append( new_rd_tup )
- if installed_rd_tups:
- installed_repository_dependencies[ 'root_key' ] = root_key
- installed_repository_dependencies[ root_key ] = installed_rd_tups
- installed_repository_dependencies[ 'description' ] = description
- if missing_rd_tups:
- missing_repository_dependencies[ 'root_key' ] = root_key
- missing_repository_dependencies[ root_key ] = missing_rd_tups
- missing_repository_dependencies[ 'description' ] = description
- return installed_repository_dependencies, missing_repository_dependencies
-
-def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_dependencies_dict ):
- """
- Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
- being installed into Galaxy.
- """
- # FIXME: This implementation breaks when updates to a repository contain dependencies that result in
- # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories
- # define the same dependency internally (not using the complex repository dependency definition to a separate
- # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency
- # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries
- # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled.
- installed_tool_dependencies = {}
- missing_tool_dependencies = {}
- if tool_dependencies_dict:
- # Make sure not to change anything in the received tool_dependencies_dict as that would be a bad side-effect!
- tmp_tool_dependencies_dict = copy.deepcopy( tool_dependencies_dict )
- for td_key, val in tmp_tool_dependencies_dict.items():
- # Default the status to NEVER_INSTALLED.
- tool_dependency_status = trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED
- # Set environment tool dependencies are a list.
- if td_key == 'set_environment':
- new_val = []
- for requirement_dict in val:
- # {'repository_name': 'xx',
- # 'name': 'bwa',
- # 'version': '0.5.9',
- # 'repository_owner': 'yy',
- # 'changeset_revision': 'zz',
- # 'type': 'package'}
- tool_dependency = \
- tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app,
- requirement_dict.get( 'name', None ),
- requirement_dict.get( 'version', None ),
- requirement_dict.get( 'type', 'package' ) )
- if tool_dependency:
- tool_dependency_status = tool_dependency.status
- requirement_dict[ 'status' ] = tool_dependency_status
- new_val.append( requirement_dict )
- if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
- if td_key in installed_tool_dependencies:
- installed_tool_dependencies[ td_key ].extend( new_val )
- else:
- installed_tool_dependencies[ td_key ] = new_val
- else:
- if td_key in missing_tool_dependencies:
- missing_tool_dependencies[ td_key ].extend( new_val )
- else:
- missing_tool_dependencies[ td_key ] = new_val
- else:
- # The val dictionary looks something like this:
- # {'repository_name': 'xx',
- # 'name': 'bwa',
- # 'version': '0.5.9',
- # 'repository_owner': 'yy',
- # 'changeset_revision': 'zz',
- # 'type': 'package'}
- tool_dependency = tool_dependency_util.get_tool_dependency_by_name_version_type( trans.app,
- val.get( 'name', None ),
- val.get( 'version', None ),
- val.get( 'type', 'package' ) )
- if tool_dependency:
- tool_dependency_status = tool_dependency.status
- val[ 'status' ] = tool_dependency_status
- if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
- installed_tool_dependencies[ td_key ] = val
- else:
- missing_tool_dependencies[ td_key ] = val
- return installed_tool_dependencies, missing_tool_dependencies
-
-def get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ):
- """
- Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
- them to the list. All repository_dependencies entries in each of the received repo_info_dicts includes
- all required repositories, so only one pass through this method is required to retrieve all repository
- dependencies.
- """
- all_required_repo_info_dict = {}
- all_repo_info_dicts = []
- if repo_info_dicts:
- # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
- # shed to discover repository ids.
- required_repository_tups = []
- for repo_info_dict in repo_info_dicts:
- if repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( repo_info_dict )
- for repository_name, repo_info_tup in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tup )
- if repository_dependencies:
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- repository_components_tuple = container_util.get_components_from_key( key )
- components_list = suc.extract_components_from_tuple( repository_components_tuple )
- # Skip listing a repository dependency if it is required only to compile a tool dependency
- # defined for the dependent repository since in this case, the repository dependency is really
- # a dependency of the dependent repository's contained tool dependency, and only if that
- # tool dependency requires compilation.
- # For backward compatibility to the 12/20/12 Galaxy release.
- prior_installation_required = 'False'
- only_if_compiling_contained_td = 'False'
- if len( components_list ) == 4:
- prior_installation_required = 'False'
- only_if_compiling_contained_td = 'False'
- elif len( components_list ) == 5:
- prior_installation_required = components_list[ 4 ]
- only_if_compiling_contained_td = 'False'
- if not util.asbool( only_if_compiling_contained_td ):
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- for components_list in val:
- try:
- only_if_compiling_contained_td = components_list[ 5 ]
- except:
- only_if_compiling_contained_td = 'False'
- # Skip listing a repository dependency if it is required only to compile a tool dependency
- # defined for the dependent repository (see above comment).
- if not util.asbool( only_if_compiling_contained_td ):
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- else:
- # We have a single repository with no dependencies.
- components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
- required_repository_tups.append( components_list )
- if required_repository_tups:
- # The value of required_repository_tups is a list of tuples, so we need to encode it.
- encoded_required_repository_tups = []
- for required_repository_tup in required_repository_tups:
- # Convert every item in required_repository_tup to a string.
- required_repository_tup = [ str( item ) for item in required_repository_tup ]
- encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
- encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
- encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- if suc.is_tool_shed_client( app ):
- # Handle secure / insecure Tool Shed URL protocol changes and port changes.
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
- url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
- # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
- url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
- request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
- response = urllib2.urlopen( request ).read()
- if response:
- try:
- required_repo_info_dict = json.loads( response )
- except Exception, e:
- log.exception( e )
- return all_repo_info_dicts
- required_repo_info_dicts = []
- for k, v in required_repo_info_dict.items():
- if k == 'repo_info_dicts':
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- else:
- if k not in all_required_repo_info_dict:
- all_required_repo_info_dict[ k ] = v
- else:
- if v and not all_required_repo_info_dict[ k ]:
- all_required_repo_info_dict[ k ] = v
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
- # of dictionaries, each of which has a single entry. We'll check keys here rather than
- # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
- # lists of discovered repository dependencies, but these lists will be empty in the
- # required_repo_info_dict since dependency discovery has not yet been performed for these
- # dictionaries.
- required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
- all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
- if required_repo_info_dict_key not in all_repo_info_dicts_keys:
- all_repo_info_dicts.append( required_repo_info_dict )
- all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
- return all_required_repo_info_dict
-
-def repository_dependency_needed_only_for_compiling_tool_dependency( repository, repository_dependency ):
- for rd_tup in repository.tuples_of_repository_dependencies_needed_for_compiling_td:
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_tup
- # TODO: we may discover that we need to check more than just installed_changeset_revision and changeset_revision here, in which
- # case we'll need to contact the tool shed to get the list of all possible changeset_revisions.
- cleaned_tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url( tool_shed )
- cleaned_repository_dependency_tool_shed = \
- common_util.remove_protocol_and_port_from_tool_shed_url( str( repository_dependency.tool_shed ) )
- if cleaned_repository_dependency_tool_shed == cleaned_tool_shed and \
- repository_dependency.name == name and \
- repository_dependency.owner == owner and \
- ( repository_dependency.installed_changeset_revision == changeset_revision or \
- repository_dependency.changeset_revision == changeset_revision ):
- return True
- return False
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -120,12 +120,12 @@
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
-def generate_clone_url_from_repo_info_tup( trans, repo_info_tup ):
+def generate_clone_url_from_repo_info_tup( app, repo_info_tup ):
"""Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision."""
# Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False]
toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
parse_repository_dependency_tuple( repo_info_tup )
- tool_shed_url = get_tool_shed_url_from_tool_shed_registry( trans.app, toolshed )
+ tool_shed_url = get_tool_shed_url_from_tool_shed_registry( app, toolshed )
# Don't include the changeset_revision in clone urls.
return url_join( tool_shed_url, 'repos', owner, name )
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/export_util.py
--- a/lib/tool_shed/util/export_util.py
+++ b/lib/tool_shed/util/export_util.py
@@ -13,7 +13,6 @@
from galaxy.util.odict import odict
from tool_shed.util import basic_util
from tool_shed.util import commit_util
-from tool_shed.util import common_install_util
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
@@ -262,7 +261,9 @@
str( repository.user.username ),
repository_dependencies,
None )
- all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( trans.app, tool_shed_url, [ repo_info_dict ] )
+ all_required_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( trans.app,
+ tool_shed_url,
+ [ repo_info_dict ] )
all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
return all_repo_info_dicts
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -15,7 +15,6 @@
from tool_shed.repository_types.metadata import TipOnly
from tool_shed.util import basic_util
from tool_shed.util import common_util
-from tool_shed.util import common_install_util
from tool_shed.util import container_util
from tool_shed.util import hg_util
from tool_shed.util import readme_util
@@ -1679,7 +1678,7 @@
readme_files_dict = None
# Handle repository dependencies.
installed_repository_dependencies, missing_repository_dependencies = \
- common_install_util.get_installed_and_missing_repository_dependencies( trans, repository )
+ trans.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
# Handle the current repository's tool dependencies.
repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
# Make sure to display missing tool dependencies as well.
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -1,13 +1,14 @@
+import json
import logging
import os
+import urllib
+import urllib2
from galaxy.util import asbool
-from galaxy.util import json
from galaxy.util import listify
import tool_shed.util.shed_util_common as suc
from tool_shed.util import common_util
-from tool_shed.util import common_install_util
from tool_shed.util import container_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
@@ -127,7 +128,7 @@
# Discover all repository dependencies and retrieve information for installing them. Even if the user elected
# to not install repository dependencies we have to make sure all repository dependency objects exist so that
# the appropriate repository dependency relationships can be built.
- all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts )
+ all_required_repo_info_dict = get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts )
all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
if not all_repo_info_dicts:
# No repository dependencies were discovered so process the received repositories.
@@ -171,7 +172,7 @@
elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
# The current tool shed repository is deactivated, so updating its database record is not necessary - just activate it.
log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
- common_install_util.activate_repository( app, repository_db_record )
+ app.installed_repository_manager.activate_repository( repository_db_record )
# No additional updates to the database record are necessary.
can_update_db_record = False
elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]:
@@ -368,7 +369,7 @@
print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
return ''
if len( raw_text ) > 2:
- encoded_text = json.from_json_string( raw_text )
+ encoded_text = json.loads( raw_text )
text = encoding_util.tool_shed_decode( encoded_text )
else:
text = ''
@@ -486,36 +487,118 @@
( name, owner ) )
return dependency_tups
-def get_repository_dependency_tups_for_installed_repository( app, repository, dependency_tups=None, status=None ):
+def get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ):
"""
- Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the
- received repository. The returned list defines the entire repository dependency tree. This method is called
- only from Galaxy.
+ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
+ them to the list. All repository_dependency entries in each of the received repo_info_dicts includes
+ all required repositories, so only one pass through this method is required to retrieve all repository
+ dependencies.
"""
- if dependency_tups is None:
- dependency_tups = []
- repository_tup = get_repository_tuple_for_installed_repository_manager( repository )
- for rrda in repository.required_repositories:
- repository_dependency = rrda.repository_dependency
- required_repository = repository_dependency.repository
- if status is None or required_repository.status == status:
- required_repository_tup = get_repository_tuple_for_installed_repository_manager( required_repository )
- if required_repository_tup == repository_tup:
- # We have a circular repository dependency relationship, skip this entry.
- continue
- if required_repository_tup not in dependency_tups:
- dependency_tups.append( required_repository_tup )
- return get_repository_dependency_tups_for_installed_repository( app,
- required_repository,
- dependency_tups=dependency_tups )
- return dependency_tups
-
-def get_repository_tuple_for_installed_repository_manager( repository ):
- return ( str( repository.tool_shed ),
- str( repository.name ),
- str( repository.owner ),
- str( repository.installed_changeset_revision ) )
-
+ all_required_repo_info_dict = {}
+ all_repo_info_dicts = []
+ if repo_info_dicts:
+ # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
+ # shed to discover repository ids.
+ required_repository_tups = []
+ for repo_info_dict in repo_info_dicts:
+ if repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( repo_info_dict )
+ for repository_name, repo_info_tup in repo_info_dict.items():
+ description, \
+ repository_clone_url, \
+ changeset_revision, \
+ ctx_rev, \
+ repository_owner, \
+ repository_dependencies, \
+ tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tup )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ repository_components_tuple = container_util.get_components_from_key( key )
+ components_list = suc.extract_components_from_tuple( repository_components_tuple )
+ # Skip listing a repository dependency if it is required only to compile a tool dependency
+ # defined for the dependent repository since in this case, the repository dependency is really
+ # a dependency of the dependent repository's contained tool dependency, and only if that
+ # tool dependency requires compilation.
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ prior_installation_required = 'False'
+ only_if_compiling_contained_td = 'False'
+ if len( components_list ) == 4:
+ prior_installation_required = 'False'
+ only_if_compiling_contained_td = 'False'
+ elif len( components_list ) == 5:
+ prior_installation_required = components_list[ 4 ]
+ only_if_compiling_contained_td = 'False'
+ if not asbool( only_if_compiling_contained_td ):
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ for components_list in val:
+ try:
+ only_if_compiling_contained_td = components_list[ 5 ]
+ except:
+ only_if_compiling_contained_td = 'False'
+ # Skip listing a repository dependency if it is required only to compile a tool dependency
+ # defined for the dependent repository (see above comment).
+ if not asbool( only_if_compiling_contained_td ):
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ else:
+ # We have a single repository with no dependencies.
+ components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
+ required_repository_tups.append( components_list )
+ if required_repository_tups:
+ # The value of required_repository_tups is a list of tuples, so we need to encode it.
+ encoded_required_repository_tups = []
+ for required_repository_tup in required_repository_tups:
+ # Convert every item in required_repository_tup to a string.
+ required_repository_tup = [ str( item ) for item in required_repository_tup ]
+ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+ encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+ encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+ if suc.is_tool_shed_client( app ):
+ # Handle secure / insecure Tool Shed URL protocol changes and port changes.
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
+ url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
+ # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
+ url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
+ request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
+ response = urllib2.urlopen( request ).read()
+ if response:
+ try:
+ required_repo_info_dict = json.loads( response )
+ except Exception, e:
+ log.exception( e )
+ return all_repo_info_dicts
+ required_repo_info_dicts = []
+ for k, v in required_repo_info_dict.items():
+ if k == 'repo_info_dicts':
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ else:
+ if k not in all_required_repo_info_dict:
+ all_required_repo_info_dict[ k ] = v
+ else:
+ if v and not all_required_repo_info_dict[ k ]:
+ all_required_repo_info_dict[ k ] = v
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
+ # of dictionaries, each of which has a single entry. We'll check keys here rather than
+ # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
+ # lists of discovered repository dependencies, but these lists will be empty in the
+ # required_repo_info_dict since dependency discovery has not yet been performed for these
+ # dictionaries.
+ required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
+ all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
+ if required_repo_info_dict_key not in all_repo_info_dicts_keys:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
+ return all_required_repo_info_dict
+
def get_updated_changeset_revisions_for_repository_dependencies( app, key_rd_dicts ):
updated_key_rd_dicts = []
for key_rd_dict in key_rd_dicts:
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -300,31 +300,6 @@
env_sh_file_path = os.path.join( env_sh_file_dir, 'env.sh' )
return env_sh_file_path
-def get_runtime_dependent_tool_dependency_tuples( app, tool_dependency, status=None ):
- """
- Return the list of tool dependency objects that require the received tool dependency at run time. The returned
- list will be filtered by the received status if it is not None. This method is called only from Galaxy.
- """
- runtime_dependent_tool_dependency_tups = []
- required_env_shell_file_path = tool_dependency.get_env_shell_file_path( app )
- if required_env_shell_file_path:
- required_env_shell_file_path = os.path.abspath( required_env_shell_file_path )
- if required_env_shell_file_path is not None:
- for td in app.install_model.context.query( app.install_model.ToolDependency ):
- if status is None or td.status == status:
- env_shell_file_path = td.get_env_shell_file_path( app )
- if env_shell_file_path is not None:
- try:
- contents = open( env_shell_file_path, 'r' ).read()
- except Exception, e:
- contents = None
- log.debug( 'Error reading file %s, so cannot determine if package %s requires package %s at run time: %s' % \
- ( str( env_shell_file_path ), str( td.name ), str( tool_dependency.name ), str( e ) ) )
- if contents is not None and contents.find( required_env_shell_file_path ) >= 0:
- td_tuple = get_tool_dependency_tuple_for_installed_repository_manager( td )
- runtime_dependent_tool_dependency_tups.append( td_tuple )
- return runtime_dependent_tool_dependency_tups
-
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
return trans.install_model.context.query( trans.install_model.ToolDependency ).get( trans.security.decode_id( id ) )
@@ -389,13 +364,6 @@
repository_name,
repository_changeset_revision ) )
-def get_tool_dependency_tuple_for_installed_repository_manager( tool_dependency ):
- if tool_dependency.type is None:
- type = None
- else:
- type = str( tool_dependency.type )
- return ( tool_dependency.tool_shed_repository_id, str( tool_dependency.name ), str( tool_dependency.version ), type )
-
def handle_tool_dependency_installation_error( app, tool_dependency, error_message, remove_installation_path=False ):
# Since there was an installation error, remove the installation directory because the install_package method uses
# this: "if os.path.exists( install_dir ):". Setting remove_installation_path to True should rarely occur. It is
diff -r 404fcf47260db528f04b2a6ce75ede9015f4228b -r 33fdaf7a0dcb7b6149dd5f1e70d2e687d43721a6 templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
--- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -108,12 +108,9 @@
</div><div style="clear: both"></div><br/>
- <%
- from tool_shed.util.repository_dependency_util import get_repository_tuple_for_installed_repository_manager
- from tool_shed.util.tool_dependency_util import get_tool_dependency_tuple_for_installed_repository_manager
-
+ <%
irm = trans.app.installed_repository_manager
- repository_tup = get_repository_tuple_for_installed_repository_manager( repository )
+ repository_tup = irm.get_repository_tuple_for_installed_repository_manager( repository )
# Get installed repositories that this repository requires.
installed_dependent_repositories = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7b209e06ddb9/
Changeset: 7b209e06ddb9
User: natefoo
Date: 2014-06-18 21:55:32
Summary: Add some new features to job execution:
- Runner states - Allow runner plugins to provide job finish/failure
conditions back to Galaxy so that actions can be taken on specific actions.
Currently only the WALLTIME_REACHED state is defined. This can be set on
the (Asynchronous)JobState's `runner_state` attribute. Only the slurm
runner currently does this.
- Runner state handlers - Pluggable interface for defining actions to take
when runner state actions occur. Any (non _) python file in
galaxy.jobs.runners.state_handlers will be loaded, but handler function
names should match the step in the job lifecycle where they should be used.
Only the 'failure' method is currently implemented, but adding more would
be trivial. Clever parameterization a la the dynamic runner would be a nice
improvement here.
- Destination resubmission - Destinations in the job config can specify a new
destination that jobs should be resubmitted to under certain conditions
(currently the only condition implemented is walltime_reached on the
original destination...)
- Resubmit on walltime reached state handler plugin - The actual resubmission
implementation.
- RESUBMITTED Job state - Allows resubmitted jobs to bypass the normal ready
to run checks and begin execution immediately.
- RESUBMITTED DatasetInstance state - This was the best method Carl and I
could come up with for persisting the resubmitted state so that it would be
visible in the UI. It's not perfect but I didn't want to alter the schema
and the only place it could go (job table) is not eagerloaded on history
status updates. The resubmission code will not actully set this state yet
(it is commented out) until the UI can cope with it. Bonus: once this is
done we can pretty easily add a "job concurrency limit reached" to give
users a visual cue on jobs waiting for that reason.
This has been tested pretty extensively with job recovery, concurrency limits
and multiprocess setups, which is to say that it will surely fail miserably in
production.
Affected #: 10 files
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -323,6 +323,30 @@
--><param id="request_cpus">8</param></destination>
+
+ <!-- Jobs that hit the walltime on one destination can be automatically
+ resubmitted to another destination. Walltime detection is
+ currently only implemented in the slurm runner.
+
+ Multiple resubmit tags can be defined, the first resubmit matching
+ the terminal condition of a job will be used.
+
+ The 'condition' attribute is optional, if not present, the
+ resubmit destination will be used for all conditions. Currently,
+ only the "walltime_reached" condition is implemented.
+
+ The 'handler' tag is optional, if not present, the job's original
+ handler will be reused for the resubmitted job.
+ -->
+ <destination id="short_fast" runner="slurm">
+ <param id="nativeSpecification">--time=00:05:00 --nodes=1</param>
+ <resubmit condition="walltime_reached" destination="long_slow" handler="sge_handler" />
+ </destination>
+ <destination id="long_slow" runner="sge">
+ <!-- The destination that you resubmit jobs to can be any runner type -->
+ <param id="nativeSpecification">-l h_rt=96:00:00</param>
+ </destination>
+
</destinations><resources default="default"><!-- Group different parameters defined in job_resource_params_conf.xml
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -57,6 +57,8 @@
self['runner'] = None
self['legacy'] = False
self['converted'] = False
+ self['env'] = []
+ self['resubmit'] = []
# dict is appropriate (rather than a bunch) since keys may not be valid as attributes
self['params'] = dict()
super(JobDestination, self).__init__(**kwds)
@@ -187,6 +189,7 @@
job_destination = JobDestination(**dict(destination.items()))
job_destination['params'] = self.__get_params(destination)
job_destination['env'] = self.__get_envs(destination)
+ job_destination['resubmit'] = self.__get_resubmits(destination)
self.destinations[id] = (job_destination,)
if job_destination.tags is not None:
for tag in job_destination.tags:
@@ -420,7 +423,7 @@
def __get_envs(self, parent):
"""Parses any child <env> tags in to a dictionary suitable for persistence.
- :param parent: Parent element in which to find child <param> tags.
+ :param parent: Parent element in which to find child <env> tags.
:type parent: ``xml.etree.ElementTree.Element``
:returns: dict
@@ -436,6 +439,23 @@
) )
return rval
+ def __get_resubmits(self, parent):
+ """Parses any child <resubmit> tags in to a dictionary suitable for persistence.
+
+ :param parent: Parent element in which to find child <resubmit> tags.
+ :type parent: ``xml.etree.ElementTree.Element``
+
+ :returns: dict
+ """
+ rval = []
+ for resubmit in parent.findall('resubmit'):
+ rval.append( dict(
+ condition=resubmit.get('condition'),
+ destination=resubmit.get('destination'),
+ handler=resubmit.get('handler')
+ ) )
+ return rval
+
@property
def default_job_tool_configuration(self):
"""The default JobToolConfiguration, used if a tool does not have an explicit defintion in the configuration. It consists of a reference to the default handler and default destination.
@@ -909,6 +929,17 @@
job.state = job.states.PAUSED
self.sa_session.add( job )
+ def mark_as_resubmitted( self ):
+ job = self.get_job()
+ self.sa_session.refresh( job )
+ # TODO: Enable this code once a UI for resubmitted datasets exists
+ #for dataset in [ dataset_assoc.dataset for dataset_assoc in job.output_datasets + job.output_library_datasets ]:
+ # dataset._state = model.Dataset.states.RESUBMITTED
+ # self.sa_session.add( dataset )
+ job.state = model.Job.states.RESUBMITTED
+ self.sa_session.add( job )
+ self.sa_session.flush()
+
def change_state( self, state, info=False ):
job = self.get_job()
self.sa_session.refresh( job )
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -128,7 +128,6 @@
self.queue.put( ( job.id, job.tool_id ) )
elif job.job_runner_name is not None and job.job_runner_external_id is not None and job.destination_id is None:
# This is the first start after upgrading from URLs to destinations, convert the URL to a destination and persist
- # TODO: test me extensively
job_wrapper = self.job_wrapper( job )
job_destination = self.dispatcher.url_to_destination(job.job_runner_name)
if job_destination.id is None:
@@ -146,7 +145,17 @@
else:
# Already dispatched and running
job_wrapper = self.job_wrapper( job )
- job_wrapper.job_runner_mapper.cached_job_destination = JobDestination(id=job.destination_id, runner=job.job_runner_name, params=job.destination_params)
+ # Use the persisted destination as its params may differ from
+ # what's in the job_conf xml
+ job_destination = JobDestination(id=job.destination_id, runner=job.job_runner_name, params=job.destination_params)
+ # resubmits are not persisted (it's a good thing) so they
+ # should be added back to the in-memory destination on startup
+ try:
+ config_job_destination = self.app.job_config.get_destination( job.destination_id )
+ job_destination.resubmit = config_job_destination.resubmit
+ except KeyError:
+ log.warning( '(%s) Recovered destination id (%s) does not exist in job config (but this may be normal in the case of a dynamically generated destination)', job.id, job.destination_id )
+ job_wrapper.job_runner_mapper.cached_job_destination = job_destination
self.dispatcher.recover( job, job_wrapper )
if self.sa_session.dirty:
self.sa_session.flush()
@@ -177,6 +186,7 @@
"""
# Pull all new jobs from the queue at once
jobs_to_check = []
+ resubmit_jobs = []
if self.track_jobs_in_database:
# Clear the session so we get fresh states for job and all datasets
self.sa_session.expunge_all()
@@ -215,6 +225,11 @@
~model.Job.table.c.id.in_(hda_not_ready),
~model.Job.table.c.id.in_(ldda_not_ready))) \
.order_by(model.Job.id).all()
+ # Fetch all "resubmit" jobs
+ resubmit_jobs = self.sa_session.query(model.Job).enable_eagerloads(False) \
+ .filter(and_((model.Job.state == model.Job.states.RESUBMITTED),
+ (model.Job.handler == self.app.config.server_name))) \
+ .order_by(model.Job.id).all()
else:
# Get job objects and append to watch queue for any which were
# previously waiting
@@ -233,6 +248,14 @@
pass
# Ensure that we get new job counts on each iteration
self.__clear_job_count()
+ # Check resubmit jobs first so that limits of new jobs will still be enforced
+ for job in resubmit_jobs:
+ log.debug( '(%s) Job was resubmitted and is being dispatched immediately', job.id )
+ # Reassemble resubmit job destination from persisted value
+ jw = self.job_wrapper( job )
+ jw.job_runner_mapper.cached_job_destination = JobDestination( id=job.destination_id, runner=job.job_runner_name, params=job.destination_params )
+ self.increase_running_job_count(job.user_id, jw.job_destination.id)
+ self.dispatcher.put( jw )
# Iterate over new and waiting jobs and look for any that are
# ready to run
new_waiting_jobs = []
@@ -358,7 +381,11 @@
# This could have been incremented by a previous job dispatched on this iteration, even if we're not caching
rval = self.user_job_count.get(user_id, 0)
if not self.app.config.cache_user_job_count:
- result = self.sa_session.execute(select([func.count(model.Job.table.c.id)]).where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING)), (model.Job.table.c.user_id == user_id))))
+ result = self.sa_session.execute(select([func.count(model.Job.table.c.id)]) \
+ .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED,
+ model.Job.states.RUNNING,
+ model.Job.states.RESUBMITTED)),
+ (model.Job.table.c.user_id == user_id))))
for row in result:
# there should only be one row
rval += row[0]
@@ -369,8 +396,11 @@
if self.user_job_count is None and self.app.config.cache_user_job_count:
self.user_job_count = {}
query = self.sa_session.execute(select([model.Job.table.c.user_id, func.count(model.Job.table.c.user_id)]) \
- .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING)), (model.Job.table.c.user_id is not None))) \
- .group_by(model.Job.table.c.user_id))
+ .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED,
+ model.Job.states.RUNNING,
+ model.Job.states.RESUBMITTED)),
+ (model.Job.table.c.user_id is not None))) \
+ .group_by(model.Job.table.c.user_id))
for row in query:
self.user_job_count[row[0]] = row[1]
elif self.user_job_count is None:
@@ -428,6 +458,9 @@
self.total_job_count_per_destination[destination_id] = self.total_job_count_per_destination.get(destination_id, 0) + 1
def __check_user_jobs( self, job, job_wrapper ):
+ # TODO: Update output datasets' _state = LIMITED or some such new
+ # state, so the UI can reflect what jobs are waiting due to concurrency
+ # limits
if job.user:
# Check the hard limit first
if self.app.job_config.limits.registered_user_concurrent_jobs:
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/mapper.py
--- a/lib/galaxy/jobs/mapper.py
+++ b/lib/galaxy/jobs/mapper.py
@@ -164,8 +164,9 @@
else:
raise Exception( "Unhandled dynamic job runner type specified - %s" % expand_type )
- def __cache_job_destination( self, params ):
- raw_job_destination = self.job_wrapper.tool.get_job_destination( params )
+ def __cache_job_destination( self, params, raw_job_destination=None ):
+ if raw_job_destination is None:
+ raw_job_destination = self.job_wrapper.tool.get_job_destination( params )
#raw_job_destination_id_or_tag = self.job_wrapper.tool.get_job_destination_id_or_tag( params )
if raw_job_destination.runner == DYNAMIC_RUNNER_NAME:
job_destination = self.__handle_dynamic_job_destination( raw_job_destination )
@@ -183,7 +184,6 @@
self.__cache_job_destination( params )
return self.cached_job_destination
- #def get_job_destination_id_or_tag( self, params ):
- # if not hasattr( self, 'cached_job_destination_id_or_tag' ):
- # self.__cache_job_destination( params )
- # return self.cached_job_destination_id_or_tag
+ def cache_job_destination( self, raw_job_destination ):
+ self.__cache_job_destination( None, raw_job_destination=raw_job_destination )
+ return self.cached_job_destination
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -17,9 +17,12 @@
from galaxy.util import DATABASE_MAX_STRING_SIZE, shrink_stream_by_size
from galaxy.util import in_directory
from galaxy.util import ParamsWithSpecs
+from galaxy.util.bunch import Bunch
from galaxy.jobs.runners.util.job_script import job_script
from galaxy.jobs.runners.util.env import env_to_statement
+from .state_handler_factory import build_state_handlers
+
log = logging.getLogger( __name__ )
STOP_SIGNAL = object()
@@ -57,6 +60,7 @@
if kwargs:
log.debug( 'Loading %s with params: %s', self.runner_name, kwargs )
self.runner_params = RunnerParams( specs=runner_param_specs, params=kwargs )
+ self.runner_state_handlers = build_state_handlers()
def _init_worker_threads(self):
"""Start ``nworkers`` worker threads.
@@ -310,11 +314,31 @@
job_info
)
+ def _handle_runner_state( self, runner_state, job_state ):
+ try:
+ for handler in self.runner_state_handlers.get(runner_state, []):
+ handler(self.app, self, job_state)
+ if job_state.runner_state_handled:
+ break
+ except:
+ log.exception('Caught exception in runner state handler:')
+
+ def mark_as_resubmitted( self, job_state ):
+ job_state.job_wrapper.mark_as_resubmitted()
+ if not self.app.config.track_jobs_in_database:
+ job_state.job_wrapper.change_state( model.Job.states.QUEUED )
+ self.app.job_manager.job_handler.dispatcher.put( job_state.job_wrapper )
+
class JobState( object ):
"""
Encapsulate state of jobs.
"""
+ runner_states = Bunch(
+ WALLTIME_REACHED = 'walltime_reached'
+ )
+ def __init__( self ):
+ self.runner_state_handled = False
def set_defaults( self, files_dir ):
if self.job_wrapper is not None:
@@ -348,6 +372,7 @@
"""
def __init__( self, files_dir=None, job_wrapper=None, job_id=None, job_file=None, output_file=None, error_file=None, exit_code_file=None, job_name=None, job_destination=None ):
+ super( AsynchronousJobState, self ).__init__()
self.old_state = None
self.running = False
self.check_count = 0
@@ -513,9 +538,13 @@
def fail_job( self, job_state ):
if getattr( job_state, 'stop_job', True ):
self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )
- job_state.job_wrapper.fail( getattr( job_state, 'fail_message', 'Job failed' ) )
- if self.app.config.cleanup_job == "always":
- job_state.cleanup()
+ self._handle_runner_state( 'failure', job_state )
+ # Not convinced this is the best way to indicate this state, but
+ # something necessary
+ if not job_state.runner_state_handled:
+ job_state.job_wrapper.fail( getattr( job_state, 'fail_message', 'Job failed' ) )
+ if self.app.config.cleanup_job == "always":
+ job_state.cleanup()
def mark_as_finished(self, job_state):
self.work_queue.put( ( self.finish_job, job_state ) )
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/runners/slurm.py
--- a/lib/galaxy/jobs/runners/slurm.py
+++ b/lib/galaxy/jobs/runners/slurm.py
@@ -43,7 +43,9 @@
log.warning( '(%s/%s) Job not found, assuming job check exceeded MinJobAge and completing as successful', ajs.job_wrapper.get_id_tag(), ajs.job_id )
drmaa_state = self.drmaa_job_states.DONE
elif job_info['JobState'] == 'TIMEOUT':
+ log.info( '(%s/%s) Job hit walltime', ajs.job_wrapper.get_id_tag(), ajs.job_id )
ajs.fail_message = "This job was terminated because it ran longer than the maximum allowed job run time."
+ ajs.runner_state = ajs.runner_states.WALLTIME_REACHED
elif job_info['JobState'] == 'NODE_FAIL':
log.warning( '(%s/%s) Job failed due to node failure, attempting resubmission', ajs.job_wrapper.get_id_tag(), ajs.job_id )
ajs.job_wrapper.change_state( model.Job.states.QUEUED, info = 'Job was resubmitted due to node failure' )
@@ -53,13 +55,16 @@
except:
ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed."
elif job_info['JobState'] == 'CANCELLED':
+ log.info( '(%s/%s) Job was cancelled via slurm (e.g. with scancel(1))', ajs.job_wrapper.get_id_tag(), ajs.job_id )
ajs.fail_message = "This job failed because it was cancelled by an administrator."
else:
+ log.warning( '(%s/%s) Job failed due to unknown reasons, JobState was: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, job_info['JobState'] )
ajs.fail_message = "This job failed for reasons that could not be determined."
if drmaa_state == self.drmaa_job_states.FAILED:
ajs.fail_message += '\nPlease click the bug icon to report this problem if you need help.'
ajs.stop_job = False
self.work_queue.put( ( self.fail_job, ajs ) )
+ return
except Exception, e:
log.exception( '(%s/%s) Unable to inspect failed slurm job using scontrol, job will be unconditionally failed: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, e )
super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state )
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/runners/state_handler_factory.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/state_handler_factory.py
@@ -0,0 +1,54 @@
+# Shamelessly stolen from the LWR.
+
+import os
+import logging
+
+import galaxy.jobs.runners.state_handlers
+
+
+log = logging.getLogger(__name__)
+
+def build_state_handlers():
+ return _get_state_handlers_dict()
+
+def _get_modules():
+ """
+ >>> 'galaxy.jobs.runners.state_handlers.resubmit' in _get_modules()
+ True
+ """
+ state_handlers_dir = galaxy.jobs.runners.state_handlers.__path__[0]
+ module_names = []
+ for fname in os.listdir(state_handlers_dir):
+ if not(fname.startswith("_")) and fname.endswith(".py"):
+ module_name = "galaxy.jobs.runners.state_handlers.%s" % fname[:-len(".py")]
+ module_names.append(module_name)
+ log.debug('module_names: %s', module_names)
+ return module_names
+
+def _load_modules():
+ modules = []
+ for module_name in _get_modules():
+ try:
+ log.debug('Importing %s', module_name)
+ module = __import__(module_name)
+ for comp in module_name.split(".")[1:]:
+ module = getattr(module, comp)
+ modules.append(module)
+ except BaseException as exception:
+ exception_str = str(exception)
+ message = "%s module could not be loaded: %s" % (module_name, exception_str)
+ log.warn(message)
+ continue
+
+ return modules
+
+def _get_state_handlers_dict():
+ state_handlers = {}
+ for module in _load_modules():
+ for func in module.__all__:
+ if func not in state_handlers:
+ state_handlers[func] = []
+ state_handlers[func].append(getattr(module, func))
+ log.debug("Loaded '%s' state handler from module %s", func, module.__name__)
+ return state_handlers
+
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/jobs/runners/state_handlers/resubmit.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/state_handlers/resubmit.py
@@ -0,0 +1,39 @@
+import logging
+from galaxy import model
+
+
+__all__ = ['failure']
+
+log = logging.getLogger(__name__)
+
+
+def failure(app, job_runner, job_state):
+ if getattr( job_state, 'runner_state', None ) and job_state.runner_state == job_state.runner_states.WALLTIME_REACHED:
+ # Intercept jobs that hit the walltime and have a walltime or nonspecific resubmit destination configured
+ for resubmit in job_state.job_destination.get('resubmit'):
+ if resubmit.get('condition', None) and resubmit['condition'] != 'walltime_reached':
+ continue # There is a resubmit defined for the destination but its condition is not for walltime_reached
+ log.info("(%s/%s) Job will be resubmitted to '%s' because it reached the walltime at the '%s' destination", job_state.job_wrapper.job_id, job_state.job_id, resubmit['destination'], job_state.job_wrapper.job_destination.id )
+ # fetch JobDestination for the id or tag
+ new_destination = app.job_config.get_destination(resubmit['destination'])
+ # Resolve dynamic if necessary
+ new_destination = job_state.job_wrapper.job_runner_mapper.cache_job_destination( new_destination )
+ # Reset job state
+ job = job_state.job_wrapper.get_job()
+ if resubmit.get('handler', None):
+ log.debug('(%s/%s) Job reassigned to handler %s', job_state.job_wrapper.job_id, job_state.job_id, resubmit['handler'])
+ job.set_handler(resubmit['handler'])
+ job_runner.sa_session.add( job )
+ # Is this safe to do here?
+ job_runner.sa_session.flush()
+ # Cache the destination to prevent rerunning dynamic after resubmit
+ job_state.job_wrapper.job_runner_mapper.cached_job_destination = new_destination
+ job_state.job_wrapper.set_job_destination(new_destination)
+ # Clear external ID (state change below flushes the change)
+ job.job_runner_external_id = None
+ # Allow the UI to query for resubmitted state
+ if job.params is None:
+ job.params = {}
+ job_state.runner_state_handled = True
+ job_runner.mark_as_resubmitted( job_state )
+ return
diff -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e -r 7b209e06ddb944e953d340754439f4e3e5dc339d lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -295,6 +295,7 @@
_text_metric = JobMetricText
states = Bunch( NEW = 'new',
+ RESUBMITTED = 'resubmitted',
UPLOAD = 'upload',
WAITING = 'waiting',
QUEUED = 'queued',
@@ -1299,7 +1300,9 @@
DISCARDED = 'discarded',
PAUSED = 'paused',
SETTING_METADATA = 'setting_metadata',
- FAILED_METADATA = 'failed_metadata' )
+ FAILED_METADATA = 'failed_metadata',
+ RESUBMITTED = 'resubmitted' )
+ # failed_metadata and resubmitted are only valid as DatasetInstance states currently
conversion_messages = Bunch( PENDING = "pending",
NO_DATA = "no data",
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/49b11be8c5c4/
Changeset: 49b11be8c5c4
User: greg
Date: 2014-06-18 21:28:20
Summary: Eliminate the use of the Galaxy web transaction object in Tool Shed utility functions related to importing a repository capsule into a Tool Shed and move som hg related functions from the commit_util module to the hg_util module.
Affected #: 15 files
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -419,7 +419,7 @@
try:
invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans.app, repository_id )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans,
+ message = tool_util.generate_message_for_invalid_tools( trans.app,
invalid_file_tups,
repository,
None,
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -250,8 +250,8 @@
return {}
capsule_dict[ 'tar_archive' ] = tar_archive
capsule_dict[ 'capsule_file_name' ] = capsule_file_name
- capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
- capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
+ capsule_dict = import_util.extract_capsule_files( **capsule_dict )
+ capsule_dict = import_util.validate_capsule( **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
log.debug( 'The capsule contents are invalid and cannot be imported:<br/>%s' % \
@@ -268,7 +268,10 @@
# The manifest.xml file has already been validated, so no error_message should be returned here.
repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path )
# Determine the status for each exported repository archive contained within the capsule.
- repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts )
+ repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans.app,
+ trans.user,
+ trans.user_is_admin(),
+ repository_info_dicts )
# Generate a list of repository name / import results message tuples for display after the capsule is imported.
import_results_tups = []
# Only create repositories that do not yet exist and that the current user is authorized to create. The
@@ -277,11 +280,12 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = \
- repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
- import_util.check_status_and_reset_downloadable( trans, import_results_tups )
+ import_results_tups = import_util.create_repository_and_import_archive( trans.app,
+ trans.request.host,
+ trans.user,
+ repository_status_info_dict,
+ import_results_tups )
+ import_util.check_status_and_reset_downloadable( trans.app, import_results_tups )
basic_util.remove_dir( file_path )
# NOTE: the order of installation is defined in import_results_tups, but order will be lost
# when transferred to return_dict.
@@ -413,15 +417,22 @@
log.debug( "Resetting metadata on repository %s" % str( repository.name ) )
repository_id = trans.security.encode_id( repository.id )
try:
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ invalid_file_tups, metadata_dict = \
+ metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ message = tool_util.generate_message_for_invalid_tools( trans.app,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
results[ 'unsuccessful_count' ] += 1
else:
- message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.user.username ) )
+ message = "Successfully reset metadata on repository %s owned by %s" % \
+ ( str( repository.name ), str( repository.user.username ) )
results[ 'successful_count' ] += 1
except Exception, e:
- message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.user.username ), str( e ) )
+ message = "Error resetting metadata on repository %s owned by %s: %s" % \
+ ( str( repository.name ), str( repository.user.username ), str( e ) )
results[ 'unsuccessful_count' ] += 1
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
@@ -486,14 +497,21 @@
results = dict( start_time=start_time,
repository_status=[] )
try:
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans,
- trans.security.encode_id( repository.id ) )
+ invalid_file_tups, metadata_dict = \
+ metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans,
+ trans.security.encode_id( repository.id ) )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ message = tool_util.generate_message_for_invalid_tools( trans.app,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
else:
- message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.user.username ) )
+ message = "Successfully reset metadata on repository %s owned by %s" % \
+ ( str( repository.name ), str( repository.user.username ) )
except Exception, e:
- message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.user.username ), str( e ) )
+ message = "Error resetting metadata on repository %s owned by %s: %s" % \
+ ( str( repository.name ), str( repository.user.username ), str( e ) )
status = '%s : %s' % ( str( repository.name ), message )
results[ 'repository_status' ].append( status )
return results
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -315,11 +315,14 @@
def manage_role_associations( self, trans, **kwd ):
"""Manage users, groups and repositories associated with a role."""
role_id = kwd.get( 'id', None )
- role = repository_maintenance_util.get_role_by_id( trans, role_id )
+ role = repository_maintenance_util.get_role_by_id( trans.app, role_id )
# We currently only have a single role associated with a repository, the repository admin role.
repository_role_association = role.repositories[ 0 ]
repository = repository_role_association.repository
- associations_dict = repository_maintenance_util.handle_role_associations( trans, role, repository, **kwd )
+ associations_dict = repository_maintenance_util.handle_role_associations( trans.app,
+ role,
+ repository,
+ **kwd )
in_users = associations_dict.get( 'in_users', [] )
out_users = associations_dict.get( 'out_users', [] )
in_groups = associations_dict.get( 'in_groups', [] )
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/hg.py
--- a/lib/galaxy/webapps/tool_shed/controllers/hg.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py
@@ -45,7 +45,7 @@
repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) )
update_repository( repo, ctx_rev=None )
# Set metadata using the repository files on disk.
- error_message, status = set_repository_metadata( trans, repository )
+ error_message, status = set_repository_metadata( trans.app, trans.request.host, trans.user, repository )
if status == 'ok' and error_message:
log.debug( "Successfully reset metadata on repository %s owned by %s, but encountered problem: %s" % \
( str( repository.name ), str( repository.user.username ), error_message ) )
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -41,7 +41,6 @@
from galaxy import eggs
eggs.require( 'mercurial' )
-from mercurial import commands
from mercurial import mdiff
from mercurial import patch
@@ -849,7 +848,7 @@
operation = kwd[ 'operation' ].lower()
if operation == "preview_tools_in_changeset":
repository = suc.get_repository_in_tool_shed( trans.app, repository_id )
- repository_metadata = metadata_util.get_latest_repository_metadata( trans, repository.id, downloadable=True )
+ repository_metadata = metadata_util.get_latest_repository_metadata( trans.app, repository.id, downloadable=True )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -1017,7 +1016,7 @@
if error:
status = 'error'
else:
- repository, message = repository_maintenance_util.create_repository( trans,
+ repository, message = repository_maintenance_util.create_repository( trans.app,
name,
repository_type,
description,
@@ -1974,7 +1973,10 @@
# The manifest.xml file has already been validated, so no error_message should be returned here.
repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path )
# Determine the status for each exported repository archive contained within the capsule.
- repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts )
+ repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans.app,
+ trans.user,
+ trans.user_is_admin(),
+ repository_info_dicts )
if 'import_capsule_button' in kwd:
# Generate a list of repository name / import results message tuples for display after the capsule is imported.
import_results_tups = []
@@ -1984,11 +1986,12 @@
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
- import_results_tups = \
- repository_maintenance_util.create_repository_and_import_archive( trans,
- repository_status_info_dict,
- import_results_tups )
- import_util.check_status_and_reset_downloadable( trans, import_results_tups )
+ import_results_tups = import_util.create_repository_and_import_archive( trans.app,
+ trans.request.host,
+ trans.user,
+ repository_status_info_dict,
+ import_results_tups )
+ import_util.check_status_and_reset_downloadable( trans.app, import_results_tups )
basic_util.remove_dir( file_path )
return trans.fill_template( '/webapps/tool_shed/repository/import_capsule_results.mako',
export_info_dict=export_info_dict,
@@ -2099,7 +2102,12 @@
tool,
[] )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, {}, as_html=True, displaying_invalid_tool=True )
+ message = tool_util.generate_message_for_invalid_tools( trans.app,
+ invalid_file_tups,
+ repository,
+ {},
+ as_html=True,
+ displaying_invalid_tool=True )
elif error_message:
message = error_message
try:
@@ -2459,7 +2467,10 @@
if repository_metadata:
metadata = repository_metadata.metadata
role = repository.admin_role
- associations_dict = repository_maintenance_util.handle_role_associations( trans, role, repository, **kwd )
+ associations_dict = repository_maintenance_util.handle_role_associations( trans.app,
+ role,
+ repository,
+ **kwd )
in_users = associations_dict.get( 'in_users', [] )
out_users = associations_dict.get( 'out_users', [] )
in_groups = associations_dict.get( 'in_groups', [] )
@@ -2693,7 +2704,7 @@
invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
repository = suc.get_repository_in_tool_shed( trans.app, id )
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
message = "All repository metadata has been reset. "
@@ -2733,7 +2744,7 @@
tip = repository.tip( trans.app )
for selected_file in selected_files_to_delete:
try:
- commands.remove( repo.ui, repo, selected_file, force=True )
+ hg_util.remove_file( repo.ui, repo, selected_file, force=True )
except Exception, e:
log.debug( "Error removing the following file using the mercurial API:\n %s" % str( selected_file ) )
log.debug( "The error was: %s" % str( e ))
@@ -2759,19 +2770,27 @@
# Commit the change set.
if not commit_message:
commit_message = 'Deleted selected files'
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
- suc.handle_email_alerts( trans, repository )
+ hg_util.commit_changeset( repo.ui,
+ repo,
+ full_path_to_changeset=repo_dir,
+ username=trans.user.username,
+ message=commit_message )
+ suc.handle_email_alerts( trans.app, trans.request.host, repository )
# Update the repository files for browsing.
hg_util.update_repository( repo )
# Get the new repository tip.
if tip == repository.tip( trans.app ):
message += 'No changes to repository. '
- kwd[ 'message' ] = message
-
else:
- message += 'The selected files were deleted from the repository. '
- kwd[ 'message' ] = message
- metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, **kwd )
+ status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( trans.app,
+ trans.request.host,
+ trans.user,
+ repository,
+ **kwd )
+ if error_message:
+ message = error_message
+ else:
+ message += 'The selected files were deleted from the repository. '
else:
message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
status = "error"
@@ -3061,13 +3080,13 @@
status = kwd.get( 'status', 'done' )
url = kwd.get( 'url', '' )
if 'upload_capsule_button' in kwd:
- capsule_dict = import_util.upload_capsule( trans, **kwd )
+ capsule_dict = import_util.upload_capsule( **kwd )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
message = capsule_dict.get( 'error_message', '' )
else:
- capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
- capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
+ capsule_dict = import_util.extract_capsule_files( **capsule_dict )
+ capsule_dict = import_util.validate_capsule( **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'ok':
return trans.response.send_redirect( web.url_for( controller='repository',
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -167,7 +167,7 @@
# Inspect the contents of the file to see if changeset_revision values are missing and if so,
# set them appropriately.
altered, root_elem, error_message = \
- commit_util.handle_repository_dependencies_definition( trans,
+ commit_util.handle_repository_dependencies_definition( trans.app,
uploaded_file_name,
unpopulate=False )
if error_message:
@@ -184,7 +184,7 @@
# Inspect the contents of the file to see if changeset_revision values are
# missing and if so, set them appropriately.
altered, root_elem, error_message = \
- commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name )
if error_message:
ok = False
message = error_message
@@ -200,15 +200,19 @@
if ok:
# See if any admin users have chosen to receive email alerts when a repository is updated.
# If so, check every uploaded file to ensure content is appropriate.
- check_contents = commit_util.check_file_contents_for_email_alerts( trans )
+ check_contents = commit_util.check_file_contents_for_email_alerts( trans.app )
if check_contents and os.path.isfile( full_path ):
content_alert_str = commit_util.check_file_content_for_html_and_images( full_path )
else:
content_alert_str = ''
- commands.add( repo.ui, repo, full_path )
+ hg_util.add_changeset( repo.ui, repo, full_path )
# Convert from unicode to prevent "TypeError: array item must be char"
full_path = full_path.encode( 'ascii', 'replace' )
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ hg_util.commit_changeset( repo.ui,
+ repo,
+ full_path_to_changeset=full_path,
+ username=trans.user.username,
+ message=commit_message )
if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
# Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
# by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables
@@ -218,7 +222,8 @@
message = '%s<br/>%s' % ( message, error_message )
# See if the content of the change set was valid.
admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans,
+ suc.handle_email_alerts( trans.app,
+ trans.request.host,
repository,
content_alert_str=content_alert_str,
new_repo_alert=new_repo_alert,
@@ -253,8 +258,15 @@
( len( files_to_remove ), upload_point )
else:
message += " %d files were removed from the repository root. " % len( files_to_remove )
+ status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( trans.app,
+ trans.request.host,
+ trans.user,
+ repository,
+ content_alert_str=content_alert_str,
+ **kwd )
+ if error_message:
+ message = error_message
kwd[ 'message' ] = message
- metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str, **kwd )
if repository.metadata_revisions:
# A repository's metadata revisions are order descending by update_time, so the zeroth revision
# will be the tip just after an upload.
@@ -355,7 +367,7 @@
# Inspect the contents of the file to see if changeset_revision values are missing and
# if so, set them appropriately.
altered, root_elem, error_message = \
- commit_util.handle_repository_dependencies_definition( trans,
+ commit_util.handle_repository_dependencies_definition( trans.app,
uploaded_file_name,
unpopulate=False )
if error_message:
@@ -365,7 +377,7 @@
shutil.move( tmp_filename, uploaded_file_name )
elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name )
if error_message:
return False, error_message, [], '', [], []
if altered:
@@ -382,8 +394,17 @@
os.remove( repo_path )
shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
filenames_in_archive.append( relative_path )
- return commit_util.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar,
- new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed )
+ return commit_util.handle_directory_changes( trans.app,
+ trans.request.host,
+ trans.user.username,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
@@ -422,7 +443,7 @@
uploaded_file_name = os.path.join( full_path, filename )
if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans,
+ altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans.app,
uploaded_file_name,
unpopulate=False )
if error_message:
@@ -432,13 +453,15 @@
shutil.move( tmp_filename, uploaded_file_name )
elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans.app, uploaded_file_name )
if error_message:
return False, error_message, [], '', [], []
if altered:
tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
shutil.move( tmp_filename, uploaded_file_name )
- return commit_util.handle_directory_changes( trans,
+ return commit_util.handle_directory_changes( trans.app,
+ trans.request.host,
+ trans.user.username,
repository,
full_path,
filenames_in_archive,
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
--- a/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
@@ -1,6 +1,7 @@
"""Middle-ware for handling hg authentication for users pushing change sets to local repositories."""
+import json
+import logging
import os
-import logging
import sqlalchemy
import sys
import tempfile
@@ -9,11 +10,9 @@
from paste.httpheaders import REMOTE_USER
from galaxy.util import asbool
-from galaxy.util import json
-from galaxy.webapps.tool_shed import model
from galaxy.util.hash_util import new_secure_hash
-import tool_shed.util.shed_util_common as suc
from tool_shed.util import commit_util
+from tool_shed.util import hg_util
import tool_shed.repository_types.util as rt_util
from galaxy import eggs
@@ -104,7 +103,7 @@
fh.write( chunk )
fh.close()
fh = open( tmp_filename, 'rb' )
- changeset_groups = json.from_json_string( commit_util.bundle_to_json( fh ) )
+ changeset_groups = json.loads( hg_util.bundle_to_json( fh ) )
fh.close()
try:
os.unlink( tmp_filename )
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -1,14 +1,11 @@
-import cStringIO
import gzip
+import json
import logging
import os
-import pkg_resources
import shutil
-import struct
import tempfile
from galaxy.datatypes import checkers
from galaxy.util import asbool
-from galaxy.util import json
from galaxy.util.odict import odict
from galaxy.web import url_for
import tool_shed.util.shed_util_common as suc
@@ -18,24 +15,11 @@
from tool_shed.util import xml_util
import tool_shed.repository_types.util as rt_util
-from galaxy import eggs
-eggs.require( 'mercurial' )
-from mercurial import commands
-from mercurial.changegroup import readbundle
-from mercurial.changegroup import readexactly
-
log = logging.getLogger( __name__ )
UNDESIRABLE_DIRS = [ '.hg', '.svn', '.git', '.cvs' ]
UNDESIRABLE_FILES = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
-def bundle_to_json( fh ):
- """Convert the received HG10xx data stream (a mercurial 1.0 bundle created using hg push from the command line) to a json object."""
- # See http://www.wstein.org/home/wstein/www/home/was/patches/hg_json
- hg_unbundle10_obj = readbundle( fh, None )
- groups = [ group for group in unpack_groups( hg_unbundle10_obj ) ]
- return json.to_json_string( groups, indent=4 )
-
def check_archive( repository, archive ):
for member in archive.getmembers():
# Allow regular files and directories only
@@ -62,15 +46,16 @@
return False, message
return True, ''
-def check_file_contents_for_email_alerts( trans ):
+def check_file_contents_for_email_alerts( app ):
"""
See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
checked for inappropriate content.
"""
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.email_alerts != None ):
- email_alerts = json.from_json_string( repository.email_alerts )
+ sa_session = app.model.context.current
+ admin_users = app.config.get( "admin_users", "" ).split( "," )
+ for repository in sa_session.query( app.model.Repository ) \
+ .filter( app.model.Repository.table.c.email_alerts != None ):
+ email_alerts = json.loads( repository.email_alerts )
for user_email in email_alerts:
if user_email in admin_users:
return True
@@ -140,14 +125,14 @@
bzipped_file.close()
shutil.move( uncompressed, uploaded_file_name )
-def handle_complex_repository_dependency_elem( trans, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ):
+def handle_complex_repository_dependency_elem( app, elem, sub_elem_index, sub_elem, sub_elem_altered, altered, unpopulate=False ):
"""
Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag that defines
a complex repository dependency.
"""
# The received sub_elem looks something like the following:
# <repository name="package_eigen_2_0" owner="test" prior_installation_required="True" />
- revised, repository_elem, error_message = handle_repository_dependency_elem( trans, sub_elem, unpopulate=unpopulate )
+ revised, repository_elem, error_message = handle_repository_dependency_elem( app, sub_elem, unpopulate=unpopulate )
if error_message:
error_message = 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
if revised:
@@ -157,13 +142,13 @@
altered = True
return altered, sub_elem_altered, elem, error_message
-def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert,
- commit_message, undesirable_dirs_removed, undesirable_files_removed ):
- repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+def handle_directory_changes( app, host, username, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar,
+ new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
+ repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
content_alert_str = ''
files_to_remove = []
filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
- if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
+ if remove_repo_files_not_in_tar and not repository.is_new( app ):
# We have a repository that is not new (it contains files), so discover those files that are in the
# repository, but not in the uploaded archive.
for root, dirs, files in os.walk( full_path ):
@@ -181,9 +166,10 @@
if full_name not in filenames_in_archive:
files_to_remove.append( full_name )
for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ # Remove files in the repository (relative to the upload point) that are not in
+ # the uploaded archive.
try:
- commands.remove( repo.ui, repo, repo_file, force=True )
+ hg_util.remove_file( repo.ui, repo, repo_file, force=True )
except Exception, e:
log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
relative_selected_file = repo_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
@@ -204,23 +190,33 @@
except OSError, e:
# The directory is not empty.
pass
- # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure
- # content is appropriate.
- check_contents = check_file_contents_for_email_alerts( trans )
+ # See if any admin users have chosen to receive email alerts when a repository is updated.
+ # If so, check every uploaded file to ensure content is appropriate.
+ check_contents = check_file_contents_for_email_alerts( app )
for filename_in_archive in filenames_in_archive:
# Check file content to ensure it is appropriate.
if check_contents and os.path.isfile( filename_in_archive ):
content_alert_str += check_file_content_for_html_and_images( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
+ hg_util.add_changeset( repo.ui, repo, filename_in_archive )
if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded
+ # by parsing the file and adding new entries to the in-memory app.tool_data_tables
+ # dictionary.
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( app, filename_in_archive )
if error:
return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ hg_util.commit_changeset( repo.ui,
+ repo,
+ full_path_to_changeset=full_path,
+ username=username,
+ message=commit_message )
admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ suc.handle_email_alerts( app,
+ host,
+ repository,
+ content_alert_str=content_alert_str,
+ new_repo_alert=new_repo_alert,
+ admin_only=admin_only )
return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def handle_missing_repository_attribute( elem ):
@@ -253,7 +249,7 @@
gzipped_file.close()
shutil.move( uncompressed, uploaded_file_name )
-def handle_repository_dependencies_definition( trans, repository_dependencies_config, unpopulate=False ):
+def handle_repository_dependencies_definition( app, repository_dependencies_config, unpopulate=False ):
"""
Populate or unpopulate the toolshed and changeset_revision attributes of a <repository> tag. Populating will occur when a
dependency definition file is being uploaded to the repository, while depopulating will occur when the repository is being
@@ -269,7 +265,7 @@
for index, elem in enumerate( root ):
if elem.tag == 'repository':
# <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
- revised, elem, error_message = handle_repository_dependency_elem( trans, elem, unpopulate=unpopulate )
+ revised, elem, error_message = handle_repository_dependency_elem( app, elem, unpopulate=unpopulate )
if error_message:
error_message = 'The repository_dependencies.xml file contains an invalid <repository> tag. %s' % error_message
return False, None, error_message
@@ -280,7 +276,7 @@
return altered, root, error_message
return False, None, error_message
-def handle_repository_dependency_elem( trans, elem, unpopulate=False ):
+def handle_repository_dependency_elem( app, elem, unpopulate=False ):
"""Populate or unpopulate repository tags."""
# <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
# <repository changeset_revision="xxx" name="package_xorg_macros_1_17_1" owner="test" toolshed="yyy">
@@ -342,10 +338,10 @@
# Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
# We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
# revision are valid.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ repository = suc.get_repository_by_name_and_owner( app, name, owner )
if repository:
- repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
- lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans.app, repository, repo )
+ repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
+ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo )
if lastest_installable_changeset_revision != hg_util.INITIAL_CHANGELOG_HASH:
elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
revised = True
@@ -356,7 +352,7 @@
error_message = 'Unable to locate repository with name %s and owner %s. ' % ( str( name ), str( owner ) )
return revised, elem, error_message
-def handle_repository_dependency_sub_elem( trans, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ):
+def handle_repository_dependency_sub_elem( app, package_altered, altered, actions_elem, action_index, action_elem, unpopulate=False ):
"""
Populate or unpopulate the toolshed and changeset_revision attributes for each of the following tag sets.
<action type="set_environment_for_install">
@@ -367,7 +363,7 @@
for repo_index, repo_elem in enumerate( action_elem ):
# Make sure to skip comments and tags that are not <repository>.
if repo_elem.tag == 'repository':
- revised, repository_elem, message = handle_repository_dependency_elem( trans, repo_elem, unpopulate=unpopulate )
+ revised, repository_elem, message = handle_repository_dependency_elem( app, repo_elem, unpopulate=unpopulate )
if message:
error_message += 'The tool_dependencies.xml file contains an invalid <repository> tag. %s' % message
if revised:
@@ -379,7 +375,7 @@
actions_elem[ action_index ] = action_elem
return package_altered, altered, actions_elem, error_message
-def handle_tool_dependencies_definition( trans, tool_dependencies_config, unpopulate=False ):
+def handle_tool_dependencies_definition( app, tool_dependencies_config, unpopulate=False ):
"""
Populate or unpopulate the tooshed and changeset_revision attributes of each <repository>
tag defined within a tool_dependencies.xml file.
@@ -401,7 +397,7 @@
if package_elem.tag == 'repository':
# We have a complex repository dependency.
altered, package_altered, root_elem, message = \
- handle_complex_repository_dependency_elem( trans,
+ handle_complex_repository_dependency_elem( app,
root_elem,
package_index,
package_elem,
@@ -440,7 +436,7 @@
if last_actions_elem_package_elem.tag == 'repository':
# We have a complex repository dependency.
altered, last_actions_package_altered, last_actions_elem, message = \
- handle_complex_repository_dependency_elem( trans,
+ handle_complex_repository_dependency_elem( app,
last_actions_elem,
last_actions_elem_package_index,
last_actions_elem_package_elem,
@@ -456,7 +452,7 @@
# Inspect the sub elements of last_actions_elem to locate all <repository> tags and
# populate them with toolshed and changeset_revision attributes if necessary.
last_actions_package_altered, altered, last_actions_elem, message = \
- handle_repository_dependency_sub_elem( trans,
+ handle_repository_dependency_sub_elem( app,
last_actions_package_altered,
altered,
actions_group_elem,
@@ -468,23 +464,25 @@
elif actions_elem.tag == 'actions':
# We are not in an <actions_group> tag set, so we must be in an <actions> tag set.
for action_index, action_elem in enumerate( actions_elem ):
- # Inspect the sub elements of last_actions_elem to locate all <repository> tags and populate them with
- # toolshed and changeset_revision attributes if necessary.
- package_altered, altered, actions_elem, message = handle_repository_dependency_sub_elem( trans,
- package_altered,
- altered,
- actions_elem,
- action_index,
- action_elem,
- unpopulate=unpopulate )
+ # Inspect the sub elements of last_actions_elem to locate all <repository> tags
+ # and populate them with toolshed and changeset_revision attributes if necessary.
+ package_altered, altered, actions_elem, message = \
+ handle_repository_dependency_sub_elem( app,
+ package_altered,
+ altered,
+ actions_elem,
+ action_index,
+ action_elem,
+ unpopulate=unpopulate )
if message:
error_message += message
else:
package_name = root_elem.get( 'name', '' )
package_version = root_elem.get( 'version', '' )
- error_message += 'Version %s of the %s package cannot be installed because ' % ( str( package_version ), str( package_name ) )
- error_message += 'the recipe for installing the package is missing either an <actions> tag set or an <actions_group> '
- error_message += 'tag set.'
+ error_message += 'Version %s of the %s package cannot be installed because ' % \
+ ( str( package_version ), str( package_name ) )
+ error_message += 'the recipe for installing the package is missing either an '
+ error_message += '<actions> tag set or an <actions_group> tag set.'
if package_altered:
package_elem[ actions_index ] = actions_elem
if package_altered:
@@ -534,62 +532,3 @@
if isbz2:
handle_bz2( repository, uploaded_file_name )
return uploaded_file_filename.rstrip( '.bz2' )
-
-def unpack_chunks( hg_unbundle10_obj ):
- """
- This method provides a generator of parsed chunks of a "group" in a mercurial unbundle10 object which
- is created when a changeset that is pushed to a Tool Shed repository using hg push from the command line
- is read using readbundle.
- """
- while True:
- length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
- if length <= 4:
- # We found a "null chunk", which ends the group.
- break
- if length < 84:
- raise Exception( "negative data length" )
- node, p1, p2, cs = struct.unpack( '20s20s20s20s', readexactly( hg_unbundle10_obj, 80 ) )
- yield { 'node': node.encode( 'hex' ),
- 'p1': p1.encode( 'hex' ),
- 'p2': p2.encode( 'hex' ),
- 'cs': cs.encode( 'hex' ),
- 'data': [ patch for patch in unpack_patches( hg_unbundle10_obj, length - 84 ) ] }
-
-def unpack_groups( hg_unbundle10_obj ):
- """
- This method provides a generator of parsed groups from a mercurial unbundle10 object which is
- created when a changeset that is pushed to a Tool Shed repository using hg push from the command
- line is read using readbundle.
- """
- # Process the changelog group.
- yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
- # Process the manifest group.
- yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
- while True:
- length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
- if length <= 4:
- # We found a "null meta chunk", which ends the changegroup.
- break
- filename = readexactly( hg_unbundle10_obj, length-4 ).encode( 'string_escape' )
- # Process the file group.
- yield ( filename, [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] )
-
-def unpack_patches( hg_unbundle10_obj, remaining ):
- """
- This method provides a generator of patches from the data field in a chunk. As there is no delimiter
- for this data field, a length argument is required.
- """
- while remaining >= 12:
- start, end, blocklen = struct.unpack( '>lll', readexactly( hg_unbundle10_obj, 12 ) )
- remaining -= 12
- if blocklen > remaining:
- raise Exception( "unexpected end of patch stream" )
- block = readexactly( hg_unbundle10_obj, blocklen )
- remaining -= blocklen
- yield { 'start': start,
- 'end': end,
- 'blocklen': blocklen,
- 'block': block.encode( 'string_escape' ) }
- if remaining > 0:
- print remaining
- raise Exception( "unexpected end of patch stream" )
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/export_util.py
--- a/lib/tool_shed/util/export_util.py
+++ b/lib/tool_shed/util/export_util.py
@@ -151,7 +151,7 @@
if name == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
# Eliminate the toolshed, and changeset_revision attributes from all <repository> tags.
altered, root_elem, error_message = \
- commit_util.handle_repository_dependencies_definition( trans, full_path, unpopulate=True )
+ commit_util.handle_repository_dependencies_definition( trans.app, full_path, unpopulate=True )
if error_message:
return None, error_message
if altered:
@@ -160,7 +160,7 @@
elif name == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Eliminate the toolshed, and changeset_revision attributes from all <repository> tags.
altered, root_elem, error_message = \
- commit_util.handle_tool_dependencies_definition( trans, full_path, unpopulate=True )
+ commit_util.handle_tool_dependencies_definition( trans.app, full_path, unpopulate=True )
if error_message:
return None, error_message
if altered:
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/hg_util.py
--- a/lib/tool_shed/util/hg_util.py
+++ b/lib/tool_shed/util/hg_util.py
@@ -1,5 +1,7 @@
+import json
import logging
import os
+import struct
from datetime import datetime
from time import gmtime
@@ -14,6 +16,8 @@
from mercurial import commands
from mercurial import hg
from mercurial import ui
+from mercurial.changegroup import readbundle
+from mercurial.changegroup import readexactly
from tool_shed.util import basic_util
@@ -21,6 +25,19 @@
INITIAL_CHANGELOG_HASH = '000000000000'
+def add_changeset( repo_ui, repo, path_to_filename_in_archive ):
+ commands.add( repo_ui, repo, path_to_filename_in_archive )
+
+def bundle_to_json( fh ):
+ """
+ Convert the received HG10xx data stream (a mercurial 1.0 bundle created using hg push from the
+ command line) to a json object.
+ """
+ # See http://www.wstein.org/home/wstein/www/home/was/patches/hg_json
+ hg_unbundle10_obj = readbundle( fh, None )
+ groups = [ group for group in unpack_groups( hg_unbundle10_obj ) ]
+ return json.dumps( groups, indent=4 )
+
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""
Clone the repository up to the specified changeset_revision. No subsequent revisions will be
@@ -39,6 +56,9 @@
log.debug( error_message )
return False, error_message
+def commit_changeset( repo_ui, repo, full_path_to_changeset, username, message ):
+ commands.commit( repo_ui, repo, full_path_to_changeset, user=username, message=message )
+
def copy_file_from_manifest( repo, ctx, filename, dir ):
"""
Copy the latest version of the file named filename from the repository manifest to the directory
@@ -268,6 +288,9 @@
"""Pull changes from a remote repository to a local one."""
commands.pull( get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
+def remove_file( repo_ui, repo, selected_file, force=True ):
+ commands.remove( repo_ui, repo, selected_file, force=force )
+
def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
"""
Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision,
@@ -300,6 +323,65 @@
"""
return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+def unpack_chunks( hg_unbundle10_obj ):
+ """
+ This method provides a generator of parsed chunks of a "group" in a mercurial unbundle10 object which
+ is created when a changeset that is pushed to a Tool Shed repository using hg push from the command line
+ is read using readbundle.
+ """
+ while True:
+ length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
+ if length <= 4:
+ # We found a "null chunk", which ends the group.
+ break
+ if length < 84:
+ raise Exception( "negative data length" )
+ node, p1, p2, cs = struct.unpack( '20s20s20s20s', readexactly( hg_unbundle10_obj, 80 ) )
+ yield { 'node': node.encode( 'hex' ),
+ 'p1': p1.encode( 'hex' ),
+ 'p2': p2.encode( 'hex' ),
+ 'cs': cs.encode( 'hex' ),
+ 'data': [ patch for patch in unpack_patches( hg_unbundle10_obj, length - 84 ) ] }
+
+def unpack_groups( hg_unbundle10_obj ):
+ """
+ This method provides a generator of parsed groups from a mercurial unbundle10 object which is
+ created when a changeset that is pushed to a Tool Shed repository using hg push from the command
+ line is read using readbundle.
+ """
+ # Process the changelog group.
+ yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
+ # Process the manifest group.
+ yield [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ]
+ while True:
+ length, = struct.unpack( '>l', readexactly( hg_unbundle10_obj, 4 ) )
+ if length <= 4:
+ # We found a "null meta chunk", which ends the changegroup.
+ break
+ filename = readexactly( hg_unbundle10_obj, length-4 ).encode( 'string_escape' )
+ # Process the file group.
+ yield ( filename, [ chunk for chunk in unpack_chunks( hg_unbundle10_obj ) ] )
+
+def unpack_patches( hg_unbundle10_obj, remaining ):
+ """
+ This method provides a generator of patches from the data field in a chunk. As there is no delimiter
+ for this data field, a length argument is required.
+ """
+ while remaining >= 12:
+ start, end, blocklen = struct.unpack( '>lll', readexactly( hg_unbundle10_obj, 12 ) )
+ remaining -= 12
+ if blocklen > remaining:
+ raise Exception( "unexpected end of patch stream" )
+ block = readexactly( hg_unbundle10_obj, blocklen )
+ remaining -= blocklen
+ yield { 'start': start,
+ 'end': end,
+ 'blocklen': blocklen,
+ 'block': block.encode( 'string_escape' ) }
+ if remaining > 0:
+ print remaining
+ raise Exception( "unexpected end of patch stream" )
+
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/import_util.py
--- a/lib/tool_shed/util/import_util.py
+++ b/lib/tool_shed/util/import_util.py
@@ -5,50 +5,117 @@
import tempfile
import urllib
from galaxy import util
-from galaxy.datatypes import checkers
from tool_shed.util import commit_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
+from tool_shed.util import repository_maintenance_util
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
import tool_shed.repository_types.util as rt_util
log = logging.getLogger( __name__ )
-def check_status_and_reset_downloadable( trans, import_results_tups ):
+def check_status_and_reset_downloadable( app, import_results_tups ):
"""Check the status of each imported repository and set downloadable to False if errors."""
+ sa_session = app.model.context.current
flush = False
for import_results_tup in import_results_tups:
ok, name_owner, message = import_results_tup
name, owner = name_owner
if not ok:
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ repository = suc.get_repository_by_name_and_owner( app, name, owner )
if repository is not None:
# Do not allow the repository to be automatically installed if population resulted in errors.
- tip_changeset_revision = repository.tip( trans.app )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app,
- trans.security.encode_id( repository.id ),
+ tip_changeset_revision = repository.tip( app )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app,
+ app.security.encode_id( repository.id ),
tip_changeset_revision )
if repository_metadata:
if repository_metadata.downloadable:
repository_metadata.downloadable = False
- trans.sa_session.add( repository_metadata )
+ sa_session.add( repository_metadata )
if not flush:
flush = True
# Do not allow dependent repository revisions to be automatically installed if population
# resulted in errors.
- dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( trans.app, repository_metadata )
+ dependent_downloadable_revisions = suc.get_dependent_downloadable_revisions( app, repository_metadata )
for dependent_downloadable_revision in dependent_downloadable_revisions:
if dependent_downloadable_revision.downloadable:
dependent_downloadable_revision.downloadable = False
- trans.sa_session.add( dependent_downloadable_revision )
+ sa_session.add( dependent_downloadable_revision )
if not flush:
flush = True
if flush:
- trans.sa_session.flush()
+ sa_session.flush()
-def extract_capsule_files( trans, **kwd ):
+def create_repository_and_import_archive( app, host, user, repository_archive_dict, import_results_tups ):
+ """
+ Create a new repository in the tool shed and populate it with the contents of a gzip compressed
+ tar archive that was exported as part or all of the contents of a capsule.
+ """
+ results_message = ''
+ name = repository_archive_dict.get( 'name', None )
+ username = repository_archive_dict.get( 'owner', None )
+ if name is None or username is None:
+ ok = False
+ results_message += 'Import failed: required repository name <b>%s</b> or owner <b>%s</b> is missing.' % \
+ ( str( name ), str( username ))
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+ else:
+ status = repository_archive_dict.get( 'status', None )
+ if status is None:
+ # The repository does not yet exist in this Tool Shed and the current user is authorized to import
+ # the current archive file.
+ type = repository_archive_dict.get( 'type', 'unrestricted' )
+ description = repository_archive_dict.get( 'description', '' )
+ long_description = repository_archive_dict.get( 'long_description', '' )
+ # The owner entry in the repository_archive_dict is the public username of the user associated with
+ # the exported repository archive.
+ user = suc.get_user_by_username( app, username )
+ if user is None:
+ ok = False
+ results_message += 'Import failed: repository owner <b>%s</b> does not have an account in this Tool Shed.' % str( username )
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+ else:
+ user_id = user.id
+ # The categories entry in the repository_archive_dict is a list of category names. If a name does not
+ # exist in the current Tool Shed, the category will not be created, so it will not be associated with
+ # the repository.
+ category_ids = []
+ category_names = repository_archive_dict.get( 'category_names', [] )
+ for category_name in category_names:
+ category = suc.get_category_by_name( app, category_name )
+ if category is None:
+ results_message += 'This Tool Shed does not have the category <b>%s</b> so it ' % str( category_name )
+ results_message += 'will not be associated with this repository.'
+ else:
+ category_ids.append( app.security.encode_id( category.id ) )
+ # Create the repository record in the database.
+ repository, create_message = repository_maintenance_util.create_repository( app,
+ name,
+ type,
+ description,
+ long_description,
+ user_id=user_id,
+ category_ids=category_ids )
+ if create_message:
+ results_message += create_message
+ # Populate the new repository with the contents of exported repository archive.
+ results_dict = import_repository_archive( app, host, user, repository, repository_archive_dict )
+ ok = results_dict.get( 'ok', False )
+ error_message = results_dict.get( 'error_message', '' )
+ if error_message:
+ results_message += error_message
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+ else:
+ # The repository either already exists in this Tool Shed or the current user is not authorized to create it.
+ ok = True
+ results_message += 'Import not necessary: repository status for this Tool Shed is: %s.' % str( status )
+ import_results_tups.append( ( ok, ( str( name ), str( username ) ), results_message ) )
+ return import_results_tups
+
+def extract_capsule_files( **kwd ):
"""Extract the uploaded capsule archive into a temporary location for inspection, validation and potential import."""
return_dict = {}
tar_archive = kwd.get( 'tar_archive', None )
@@ -168,7 +235,7 @@
repository_info_dicts.append( repository_info_dict )
return repository_info_dicts, error_message
-def get_repository_status_from_tool_shed( trans, repository_info_dicts ):
+def get_repository_status_from_tool_shed( app, user, user_is_admin, repository_info_dicts ):
"""
For each exported repository archive contained in the capsule, inspect the Tool Shed to see if that repository already
exists or if the current user is authorized to create the repository, and set a status appropriately. If repository
@@ -179,7 +246,7 @@
"""
repository_status_info_dicts = []
for repository_info_dict in repository_info_dicts:
- repository = suc.get_repository_by_name_and_owner( trans.app, repository_info_dict[ 'name' ], repository_info_dict[ 'owner' ] )
+ repository = suc.get_repository_by_name_and_owner( app, repository_info_dict[ 'name' ], repository_info_dict[ 'owner' ] )
if repository:
if repository.deleted:
repository_info_dict[ 'status' ] = 'Exists, deleted'
@@ -189,16 +256,16 @@
repository_info_dict[ 'status' ] = 'Exists'
else:
# No repository with the specified name and owner currently exists, so make sure the current user can create one.
- if trans.user_is_admin():
+ if user_is_admin:
repository_info_dict[ 'status' ] = None
- elif trans.app.security_agent.user_can_import_repository_archive( trans.user, repository_info_dict[ 'owner' ] ):
+ elif app.security_agent.user_can_import_repository_archive( user, repository_info_dict[ 'owner' ] ):
repository_info_dict[ 'status' ] = None
else:
repository_info_dict[ 'status' ] = 'Not authorized to import'
repository_status_info_dicts.append( repository_info_dict )
return repository_status_info_dicts
-def import_repository_archive( trans, repository, repository_archive_dict ):
+def import_repository_archive( app, host, user, repository, repository_archive_dict ):
"""Import a repository archive contained within a repository capsule."""
archive_file_name = repository_archive_dict.get( 'archive_file_name', None )
capsule_file_name = repository_archive_dict[ 'capsule_file_name' ]
@@ -207,8 +274,8 @@
results_dict = dict( ok=True, error_message='' )
archive_file_path = os.path.join( file_path, archive_file_name )
archive = tarfile.open( archive_file_path, 'r:*' )
- repo_dir = repository.repo_path( trans.app )
- repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
+ repo_dir = repository.repo_path( app )
+ repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repo_dir, create=False )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, error_message = commit_util.check_archive( repository, archive )
@@ -237,9 +304,8 @@
uploaded_file_name = os.path.join( full_path, filename )
if os.path.split( uploaded_file_name )[ -1 ] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans,
- uploaded_file_name,
- unpopulate=False )
+ altered, root_elem, error_message = \
+ commit_util.handle_repository_dependencies_definition( app, uploaded_file_name, unpopulate=False )
if error_message:
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
@@ -248,7 +314,7 @@
shutil.move( tmp_filename, uploaded_file_name )
elif os.path.split( uploaded_file_name )[ -1 ] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( app, uploaded_file_name )
if error_message:
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
@@ -261,7 +327,9 @@
# Since the repository is new, the following must be False.
remove_repo_files_not_in_tar = False
ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
- commit_util.handle_directory_changes( trans,
+ commit_util.handle_directory_changes( app,
+ host,
+ user.username,
repository,
full_path,
filenames_in_archive,
@@ -274,9 +342,14 @@
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
try:
- metadata_util.set_repository_metadata_due_to_new_tip( trans,
- repository,
- content_alert_str=content_alert_str )
+ status, error_message = metadata_util.set_repository_metadata_due_to_new_tip( app,
+ host,
+ user,
+ repository,
+ content_alert_str=content_alert_str )
+ if error_message:
+ results_dict[ 'ok' ] = False
+ results_dict[ 'error_message' ] += error_message
except Exception, e:
log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \
( str( repository.name ), str( archive_file_name ), str( e ) ) )
@@ -286,7 +359,7 @@
results_dict[ 'error_message' ] += error_message
return results_dict
-def upload_capsule( trans, **kwd ):
+def upload_capsule( **kwd ):
"""Upload and prepare an exported repository capsule for validation."""
file_data = kwd.get( 'file_data', '' )
url = kwd.get( 'url', '' )
@@ -349,7 +422,7 @@
return return_dict
return return_dict
-def validate_capsule( trans, **kwd ):
+def validate_capsule( **kwd ):
"""Inspect the uploaded capsule's manifest and its contained files to ensure it is a valid repository capsule."""
capsule_dict = {}
capsule_dict.update( kwd )
diff -r 7af8ed727418f8eac8499f20853890ab8731101e -r 49b11be8c5c4f1e51eb3e1f10f4b20f9cd77ab8e lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -40,13 +40,14 @@
rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME,
REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ]
-def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
+def add_tool_versions( app, id, repository_metadata, changeset_revisions ):
# Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
+ sa_session = app.model.context.current
metadata = repository_metadata.metadata
tool_versions_dict = {}
for tool_dict in metadata.get( 'tools', [] ):
# We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans.app,
+ parent_id = get_parent_id( app,
id,
tool_dict[ 'id' ],
tool_dict[ 'version' ],
@@ -55,8 +56,8 @@
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ sa_session.add( repository_metadata )
+ sa_session.flush()
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
@@ -72,10 +73,15 @@
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
-def compare_changeset_revisions( trans, ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
- """Compare the contents of two changeset revisions to determine if a new repository metadata revision should be created."""
- # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
- # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
+def compare_changeset_revisions( app, ancestor_changeset_revision, ancestor_metadata_dict,
+ current_changeset_revision, current_metadata_dict ):
+ """
+ Compare the contents of two changeset revisions to determine if a new repository
+ metadata revision should be created.
+ """
+ # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict.
+ # This changeset_revision is an ancestor of current_changeset_revision which is associated
+ # with current_metadata_dict. A new repository_metadata record will be created only
# when this method returns the constant value NOT_EQUAL_AND_NOT_SUBSET.
ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
@@ -107,14 +113,16 @@
no_data_manager = not ancestor_data_manager and not current_data_manager
if no_datatypes and no_readme_files and no_repository_dependencies and no_tool_dependencies and no_tools and no_workflows and no_data_manager:
return NO_METADATA
- # Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the
- # compare_readme_files() method.
+ # Uncomment the following if we decide that README files should affect how installable
+ # repository revisions are defined. See the NOTE in the compare_readme_files() method.
# readme_file_comparision = compare_readme_files( ancestor_readme_files, current_readme_files )
- repository_dependency_comparison = compare_repository_dependencies( trans, ancestor_repository_dependencies, current_repository_dependencies )
- tool_dependency_comparison = compare_tool_dependencies( trans, ancestor_tool_dependencies, current_tool_dependencies )
- workflow_comparison = compare_workflows( trans, ancestor_workflows, current_workflows )
- datatype_comparison = compare_datatypes( trans, ancestor_datatypes, current_datatypes )
- data_manager_comparison = compare_data_manager( trans, ancestor_data_manager, current_data_manager )
+ repository_dependency_comparison = compare_repository_dependencies( app,
+ ancestor_repository_dependencies,
+ current_repository_dependencies )
+ tool_dependency_comparison = compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies )
+ workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
+ datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
+ data_manager_comparison = compare_data_manager( ancestor_data_manager, current_data_manager )
# Handle case where all metadata is the same.
if ancestor_guids == current_guids and \
repository_dependency_comparison == EQUAL and \
@@ -130,7 +138,11 @@
workflow_dependency_is_subset = workflow_comparison in SUBSET_VALUES
datatype_is_subset = datatype_comparison in SUBSET_VALUES
datamanager_is_subset = data_manager_comparison in SUBSET_VALUES
- if repository_dependency_is_subset and tool_dependency_is_subset and workflow_dependency_is_subset and datatype_is_subset and datamanager_is_subset:
+ if repository_dependency_is_subset and \
+ tool_dependency_is_subset and \
+ workflow_dependency_is_subset and \
+ datatype_is_subset and \
+ datamanager_is_subset:
is_subset = True
for guid in ancestor_guids:
if guid not in current_guids:
@@ -140,11 +152,16 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def compare_data_manager( trans, ancestor_metadata, current_metadata ):
+def compare_data_manager( ancestor_metadata, current_metadata ):
"""Determine if ancestor_metadata is the same as or a subset of current_metadata for data_managers."""
def __data_manager_dict_to_tuple_list( metadata_dict ):
# we do not check tool_guid or tool conf file name
- return set( sorted( [ ( name, tuple( sorted( value.get( 'data_tables', [] ) ) ), value.get( 'guid' ), value.get( 'version' ), value.get( 'name' ), value.get( 'id' ) ) for name, value in metadata_dict.iteritems() ] ) )
+ return set( sorted( [ ( name,
+ tuple( sorted( value.get( 'data_tables', [] ) ) ),
+ value.get( 'guid' ),
+ value.get( 'version' ),
+ value.get( 'name' ),
+ value.get( 'id' ) ) for name, value in metadata_dict.iteritems() ] ) )
# only compare valid entries, any invalid entries are ignored
ancestor_metadata = __data_manager_dict_to_tuple_list( ancestor_metadata.get( 'data_managers', {} ) )
current_metadata = __data_manager_dict_to_tuple_list( current_metadata.get( 'data_managers', {} ) )
@@ -155,7 +172,7 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def compare_datatypes( trans, ancestor_datatypes, current_datatypes ):
+def compare_datatypes( ancestor_datatypes, current_datatypes ):
"""Determine if ancestor_datatypes is the same as or a subset of current_datatypes."""
# Each datatype dict looks something like: {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
if len( ancestor_datatypes ) <= len( current_datatypes ):
@@ -199,8 +216,10 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def compare_repository_dependencies( trans, ancestor_repository_dependencies, current_repository_dependencies ):
- """Determine if ancestor_repository_dependencies is the same as or a subset of current_repository_dependencies."""
+def compare_repository_dependencies( app, ancestor_repository_dependencies, current_repository_dependencies ):
+ """
+ Determine if ancestor_repository_dependencies is the same as or a subset of current_repository_dependencies.
+ """
# The list of repository_dependencies looks something like:
# [["http://localhost:9009", "emboss_datatypes", "test", "ab03a2a5f407", "False", "False"]].
# Create a string from each tuple in the list for easier comparison.
@@ -223,9 +242,12 @@
found_in_current = True
break
if not found_in_current:
- # In some cases, the only difference between a dependency definition in the lists is the changeset_revision value. We'll
- # check to see if this is the case, and if the defined dependency is a repository that has metadata set only on its tip.
- if not different_revision_defines_tip_only_repository_dependency( trans, ancestor_tup, current_repository_dependencies ):
+ # In some cases, the only difference between a dependency definition in the lists
+ # is the changeset_revision value. We'll check to see if this is the case, and if
+ # the defined dependency is a repository that has metadata set only on its tip.
+ if not different_revision_defines_tip_only_repository_dependency( app,
+ ancestor_tup,
+ current_repository_dependencies ):
return NOT_EQUAL_AND_NOT_SUBSET
return SUBSET
if len( ancestor_repository_dependencies ) == len( current_repository_dependencies ):
@@ -234,16 +256,17 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def compare_tool_dependencies( trans, ancestor_tool_dependencies, current_tool_dependencies ):
+def compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ):
"""Determine if ancestor_tool_dependencies is the same as or a subset of current_tool_dependencies."""
# The tool_dependencies dictionary looks something like:
# {'bwa/0.5.9': {'readme': 'some string', 'version': '0.5.9', 'type': 'package', 'name': 'bwa'}}
if len( ancestor_tool_dependencies ) <= len( current_tool_dependencies ):
for ancestor_td_key, ancestor_requirements_dict in ancestor_tool_dependencies.items():
if ancestor_td_key in current_tool_dependencies:
- # The only values that could have changed between the 2 dictionaries are the "readme" or "type" values. Changing the readme value
- # makes no difference. Changing the type will change the installation process, but for now we'll assume it was a typo, so new metadata
- # shouldn't be generated.
+ # The only values that could have changed between the 2 dictionaries are the
+ # "readme" or "type" values. Changing the readme value makes no difference.
+ # Changing the type will change the installation process, but for now we'll
+ # assume it was a typo, so new metadata shouldn't be generated.
continue
else:
return NOT_EQUAL_AND_NOT_SUBSET
@@ -254,8 +277,11 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def compare_workflows( trans, ancestor_workflows, current_workflows ):
- """Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows is a subset of current_workflows."""
+def compare_workflows( ancestor_workflows, current_workflows ):
+ """
+ Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows
+ is a subset of current_workflows.
+ """
if len( ancestor_workflows ) <= len( current_workflows ):
for ancestor_workflow_tup in ancestor_workflows:
# ancestor_workflows is a list of tuples where each contained tuple is
@@ -267,8 +293,10 @@
found_in_current = False
for current_workflow_tup in current_workflows:
current_workflow_dict = current_workflow_tup[1]
- # Assume that if the name and number of steps are euqal, then the workflows are the same. Of course, this may not be true...
- if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
+ # Assume that if the name and number of steps are euqal, then the workflows
+ # are the same. Of course, this may not be true...
+ if current_workflow_dict[ 'name' ] == ancestor_workflow_name and \
+ len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
found_in_current = True
break
if not found_in_current:
@@ -279,8 +307,9 @@
return SUBSET
return NOT_EQUAL_AND_NOT_SUBSET
-def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+def create_or_update_repository_metadata( app, id, repository, changeset_revision, metadata_dict ):
"""Create or update a repository_metadatqa record in the tool shed."""
+ sa_session = app.model.context.current
has_repository_dependencies = False
has_repository_dependencies_only_if_compiling_contained_td = False
includes_datatypes = False
@@ -309,7 +338,7 @@
downloadable = True
else:
downloadable = False
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
if repository_metadata:
# A repository metadata record already exists with the received changeset_revision, so we don't need to
# check the skip_tool_test table.
@@ -325,15 +354,15 @@
# No repository_metadata record exists for the received changeset_revision, so we may need to update the
# skip_tool_test table.
check_skip_tool_test = True
- repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
- changeset_revision=changeset_revision,
- metadata=metadata_dict,
- downloadable=downloadable,
- has_repository_dependencies=has_repository_dependencies,
- includes_datatypes=includes_datatypes,
- includes_tools=includes_tools,
- includes_tool_dependencies=includes_tool_dependencies,
- includes_workflows=includes_workflows )
+ repository_metadata = app.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_datatypes=includes_datatypes,
+ includes_tools=includes_tools,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_workflows=includes_workflows )
# Always set the default values for the following columns. When resetting all metadata on a repository
# this will reset the values.
repository_metadata.tools_functionally_correct = False
@@ -342,20 +371,20 @@
repository_metadata.do_not_test = False
repository_metadata.time_last_tested = None
repository_metadata.tool_test_results = None
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ sa_session.add( repository_metadata )
+ sa_session.flush()
if check_skip_tool_test:
# Since we created a new repository_metadata record, we may need to update the skip_tool_test table to point to it. Inspect each
# changeset revision in the received repository's changelog (up to the received changeset revision) to see if it is contained in the
# skip_tool_test table. If it is, but is not associated with a repository_metadata record, reset that skip_tool_test record to the
# newly created repository_metadata record.
- repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+ repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
- skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( trans.app, changeset_hash )
+ skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( app, changeset_hash )
if skip_tool_test:
# We found a skip_tool_test record associated with the changeset_revision, so see if it has a valid repository_revision.
- repository_revision = get_repository_metadata_by_id( trans.app, trans.security.encode_id( repository_metadata.id ) )
+ repository_revision = get_repository_metadata_by_id( app, app.security.encode_id( repository_metadata.id ) )
if repository_revision:
# The skip_tool_test record is associated with a valid repository_metadata record, so proceed.
continue
@@ -364,14 +393,14 @@
# require updating, so we won't break here, we'll continue to inspect the rest of the changelog up to the received
# changeset_revision.
skip_tool_test.repository_metadata_id = repository_metadata.id
- trans.sa_session.add( skip_tool_test )
- trans.sa_session.flush()
+ sa_session.add( skip_tool_test )
+ sa_session.flush()
if changeset_hash == changeset_revision:
# Proceed no further than the received changeset_revision.
break
return repository_metadata
-def different_revision_defines_tip_only_repository_dependency( trans, rd_tup, repository_dependencies ):
+def different_revision_defines_tip_only_repository_dependency( app, rd_tup, repository_dependencies ):
"""
Determine if the only difference between rd_tup and a dependency definition in the list of
repository_dependencies is the changeset_revision value.
@@ -386,8 +415,8 @@
cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url( tool_shed )
if cleaned_rd_tool_shed == cleaned_tool_shed and rd_name == name and rd_owner == owner:
# Determine if the repository represented by the dependency tuple is an instance of the repository type TipOnly.
- required_repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- repository_type_class = trans.app.repository_types_registry.get_class_by_label( required_repository.type )
+ required_repository = suc.get_repository_by_name_and_owner( app, name, owner )
+ repository_type_class = app.repository_types_registry.get_class_by_label( required_repository.type )
return isinstance( repository_type_class, TipOnly )
return False
@@ -1045,20 +1074,17 @@
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
-def get_latest_repository_metadata( trans, decoded_repository_id, downloadable=False ):
+def get_latest_repository_metadata( app, decoded_repository_id, downloadable=False ):
"""Get last metadata defined for a specified repository from the database."""
- repository = trans.sa_session.query( trans.model.Repository ).get( decoded_repository_id )
- repo = hg_util.get_repo_for_repository( trans.app, repository=repository, repo_path=None, create=False )
+ sa_session = app.model.context.current
+ repository = sa_session.query( app.model.Repository ).get( decoded_repository_id )
+ repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
if downloadable:
- changeset_revision = suc.get_latest_downloadable_changeset_revision( trans.app,
- repository,
- repo )
+ changeset_revision = suc.get_latest_downloadable_changeset_revision( app, repository, repo )
else:
- changeset_revision = suc.get_latest_changeset_revision( trans.app,
- repository,
- repo )
- return suc.get_repository_metadata_by_changeset_revision( trans.app,
- trans.security.encode_id( repository.id ),
+ changeset_revision = suc.get_latest_changeset_revision( app, repository, repo )
+ return suc.get_repository_metadata_by_changeset_revision( app,
+ app.security.encode_id( repository.id ),
changeset_revision )
def get_parent_id( app, id, old_id, version, guid, changeset_revisions ):
@@ -1377,10 +1403,11 @@
return repository_metadata.malicious
return False
-def new_datatypes_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_datatypes_metadata_required( repository_metadata, metadata_dict ):
"""
- Compare the last saved metadata for each datatype in the repository with the new metadata in metadata_dict to determine if a new
- repository_metadata table record is required or if the last saved metadata record can be updated for datatypes instead.
+ Compare the last saved metadata for each datatype in the repository with the new metadata
+ in metadata_dict to determine if a new repository_metadata table record is required or if
+ the last saved metadata record can be updated for datatypes instead.
"""
# Datatypes are stored in metadata as a list of dictionaries that looks like:
# [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}]
@@ -1392,7 +1419,7 @@
if 'datatypes' in metadata:
ancestor_datatypes = metadata[ 'datatypes' ]
# The saved metadata must be a subset of the new metadata.
- datatype_comparison = compare_datatypes( trans, ancestor_datatypes, current_datatypes )
+ datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
if datatype_comparison == NOT_EQUAL_AND_NOT_SUBSET:
return True
else:
@@ -1409,23 +1436,28 @@
# The received metadata_dict includes no metadata for datatypes, so a new repository_metadata table record is not needed.
return False
-def new_metadata_required_for_utilities( trans, repository, new_tip_metadata_dict ):
+def new_metadata_required_for_utilities( app, repository, new_tip_metadata_dict ):
"""
- Galaxy utilities currently consist of datatypes, repository_dependency definitions, tools, tool_dependency definitions and exported
- Galaxy workflows. This method compares the last stored repository_metadata record associated with the received repository against the
- contents of the received new_tip_metadata_dict and returns True or False for the union set of Galaxy utilities contained in both metadata
- dictionaries. The metadata contained in new_tip_metadata_dict may not be a subset of that contained in the last stored repository_metadata
- record associated with the received repository because one or more Galaxy utilities may have been deleted from the repository in the new tip.
+ Galaxy utilities currently consist of datatypes, repository_dependency definitions,
+ tools, tool_dependency definitions and exported Galaxy workflows. This method compares
+ the last stored repository_metadata record associated with the received repository against
+ the contents of the received new_tip_metadata_dict and returns True or False for the union
+ set of Galaxy utilities contained in both metadata dictionaries. The metadata contained
+ in new_tip_metadata_dict may not be a subset of that contained in the last stored
+ repository_metadata record associated with the received repository because one or more
+ Galaxy utilities may have been deleted from the repository in the new tip.
"""
- repository_metadata = get_latest_repository_metadata( trans, repository.id, downloadable=False )
- datatypes_required = new_datatypes_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
- # Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the
- # compare_readme_files() method.
- # readme_files_required = new_readme_files_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
- repository_dependencies_required = new_repository_dependency_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
- tools_required = new_tool_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
- tool_dependencies_required = new_tool_dependency_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
- workflows_required = new_workflow_metadata_required( trans, repository_metadata, new_tip_metadata_dict )
+ repository_metadata = get_latest_repository_metadata( app, repository.id, downloadable=False )
+ datatypes_required = new_datatypes_metadata_required( repository_metadata, new_tip_metadata_dict )
+ # Uncomment the following if we decide that README files should affect how installable
+ # repository revisions are defined. See the NOTE in the compare_readme_files() method.
+ # readme_files_required = new_readme_files_metadata_required( repository_metadata, new_tip_metadata_dict )
+ repository_dependencies_required = new_repository_dependency_metadata_required( app,
+ repository_metadata,
+ new_tip_metadata_dict )
+ tools_required = new_tool_metadata_required( repository_metadata, new_tip_metadata_dict )
+ tool_dependencies_required = new_tool_dependency_metadata_required( repository_metadata, new_tip_metadata_dict )
+ workflows_required = new_workflow_metadata_required( repository_metadata, new_tip_metadata_dict )
if datatypes_required or repository_dependencies_required or tools_required or tool_dependencies_required or workflows_required:
return True
return False
@@ -1462,7 +1494,7 @@
# The received metadata_dict includes no metadata for readme_files, so a new repository_metadata table record is not needed.
return False
-def new_repository_dependency_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_repository_dependency_metadata_required( app, repository_metadata, metadata_dict ):
"""
Compare the last saved metadata for each repository dependency in the repository with the new metadata in metadata_dict to determine if a new
repository_metadata table record is required or if the last saved metadata record can be updated for repository_dependencies instead.
@@ -1474,24 +1506,29 @@
new_repository_dependencies_metadata = metadata_dict.get( 'repository_dependencies', None )
if new_repository_dependencies_metadata:
new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
- # TODO: We used to include the following here to handle the case where repository dependency definitions were deleted. However,
- # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files
- # were uploaded when a single tarball should have been. We need to implement support for handling deleted repository dependency
- # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following.
+ # TODO: We used to include the following here to handle the case where repository
+ # dependency definitions were deleted. However this erroneously returned True in
+ # cases where is should not have done so. This usually occurred where multiple single
+ # files were uploaded when a single tarball should have been. We need to implement
+ # support for handling deleted repository dependency definitions so that we can guarantee
+ # reproducibility, but we need to do it in a way that is better than the following.
# for new_repository_dependency in new_repository_dependencies:
# if new_repository_dependency not in saved_repository_dependencies:
# return True
# The saved metadata must be a subset of the new metadata.
for saved_repository_dependency in saved_repository_dependencies:
if saved_repository_dependency not in new_repository_dependencies:
- # In some cases, the only difference between a dependency definition in the lists is the changeset_revision value. We'll
- # check to see if this is the case, and if the defined dependency is a repository that has metadata set only on its tip.
- if not different_revision_defines_tip_only_repository_dependency( trans, saved_repository_dependency, new_repository_dependencies ):
+ # In some cases, the only difference between a dependency definition in the lists
+ # is the changeset_revision value. We'll check to see if this is the case, and if
+ # the defined dependency is a repository that has metadata set only on its tip.
+ if not different_revision_defines_tip_only_repository_dependency( app,
+ saved_repository_dependency,
+ new_repository_dependencies ):
return True
return False
else:
- # The repository_dependencies.xml file must have been deleted, so create a new repository_metadata record so we always have
- # access to the deleted file.
+ # The repository_dependencies.xml file must have been deleted, so create a new
+ # repository_metadata record so we always have access to the deleted file.
return True
else:
return False
@@ -1500,13 +1537,15 @@
# There is no saved repository metadata, so we need to create a new repository_metadata record.
return True
else:
- # The received metadata_dict includes no metadata for repository dependencies, so a new repository_metadata record is not needed.
+ # The received metadata_dict includes no metadata for repository dependencies, so
+ # a new repository_metadata record is not needed.
return False
-def new_tool_dependency_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_tool_dependency_metadata_required( repository_metadata, metadata_dict ):
"""
- Compare the last saved metadata for each tool dependency in the repository with the new metadata in metadata_dict to determine if a new
- repository_metadata table record is required or if the last saved metadata record can be updated for tool_dependencies instead.
+ Compare the last saved metadata for each tool dependency in the repository with the new
+ metadata in metadata_dict to determine if a new repository_metadata table record is required
+ or if the last saved metadata record can be updated for tool_dependencies instead.
"""
if repository_metadata:
metadata = repository_metadata.metadata
@@ -1545,10 +1584,11 @@
# The received metadata_dict includes no metadata for tool dependencies, so a new repository_metadata record is not needed.
return False
-def new_tool_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_tool_metadata_required( repository_metadata, metadata_dict ):
"""
- Compare the last saved metadata for each tool in the repository with the new metadata in metadata_dict to determine if a new repository_metadata
- table record is required, or if the last saved metadata record can be updated instead.
+ Compare the last saved metadata for each tool in the repository with the new metadata in
+ metadata_dict to determine if a new repository_metadata table record is required, or if
+ the last saved metadata record can be updated instead.
"""
if 'tools' in metadata_dict:
if repository_metadata:
@@ -1587,10 +1627,11 @@
# The received metadata_dict includes no metadata for tools, so a new repository_metadata table record is not needed.
return False
-def new_workflow_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_workflow_metadata_required( repository_metadata, metadata_dict ):
"""
- Currently everything about an exported workflow except the name is hard-coded, so there's no real way to differentiate versions of
- exported workflows. If this changes at some future time, this method should be enhanced accordingly.
+ Currently everything about an exported workflow except the name is hard-coded, so
+ there's no real way to differentiate versions of exported workflows. If this changes
+ at some future time, this method should be enhanced accordingly.
"""
if 'workflows' in metadata_dict:
if repository_metadata:
@@ -1819,7 +1860,7 @@
# SUBSET - ancestor metadata is a subset of current metadata, so continue from current
# NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current
# metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( trans,
+ comparison = compare_changeset_revisions( trans.app,
ancestor_changeset_revision,
ancestor_metadata_dict,
current_changeset_revision,
@@ -1830,7 +1871,7 @@
elif comparison == NOT_EQUAL_AND_NOT_SUBSET:
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans,
+ repository_metadata = create_or_update_repository_metadata( trans.app,
id,
repository,
metadata_changeset_revision,
@@ -1846,7 +1887,7 @@
metadata_changeset_revision = current_changeset_revision
metadata_dict = current_metadata_dict
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans,
+ repository_metadata = create_or_update_repository_metadata( trans.app,
id,
repository,
metadata_changeset_revision,
@@ -1858,7 +1899,7 @@
# We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
if not ctx.children():
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans,
+ repository_metadata = create_or_update_repository_metadata( trans.app,
id,
repository,
metadata_changeset_revision,
@@ -1901,7 +1942,7 @@
owner = str( repository.owner )
invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans.app, repository_id )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, None, as_html=False )
log.debug( message )
unsuccessful_count += 1
else:
@@ -1947,20 +1988,21 @@
return False
return True
-def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
+def set_repository_metadata( app, host, user, repository, content_alert_str='', **kwd ):
"""
- Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the
- repository owner that the changeset has problems.
+ Set metadata using the repository's current disk files, returning specific error
+ messages (if any) to alert the repository owner that the changeset has problems.
"""
+ sa_session = app.model.context.current
message = ''
status = 'done'
- encoded_id = trans.security.encode_id( repository.id )
- repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
- repo_dir = repository.repo_path( trans.app )
- repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ encoded_id = app.security.encode_id( repository.id )
+ repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
+ repo_dir = repository.repo_path( app )
+ repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repo_dir, create=False )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=app,
repository=repository,
- changeset_revision=repository.tip( trans.app ),
+ changeset_revision=repository.tip( app ),
repository_clone_url=repository_clone_url,
relative_install_dir=repo_dir,
repository_files_dir=None,
@@ -1969,25 +2011,25 @@
persist=False )
if metadata_dict:
repository_metadata = None
- repository_type_class = trans.app.repository_types_registry.get_class_by_label( repository.type )
+ repository_type_class = app.repository_types_registry.get_class_by_label( repository.type )
tip_only = isinstance( repository_type_class, TipOnly )
- if not tip_only and new_metadata_required_for_utilities( trans, repository, metadata_dict ):
+ if not tip_only and new_metadata_required_for_utilities( app, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = create_or_update_repository_metadata( trans,
+ repository_metadata = create_or_update_repository_metadata( app,
encoded_id,
repository,
- repository.tip( trans.app ),
+ repository.tip( app ),
metadata_dict )
# If this is the first record stored for this repository, see if we need to send any email alerts.
if len( repository.downloadable_revisions ) == 1:
- suc.handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
+ suc.handle_email_alerts( app, host, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
# Update the latest stored repository metadata with the contents and attributes of metadata_dict.
- repository_metadata = get_latest_repository_metadata( trans, repository.id, downloadable=False )
+ repository_metadata = get_latest_repository_metadata( app, repository.id, downloadable=False )
if repository_metadata:
downloadable = is_downloadable( metadata_dict )
# Update the last saved repository_metadata table row.
- repository_metadata.changeset_revision = repository.tip( trans.app )
+ repository_metadata.changeset_revision = repository.tip( app )
repository_metadata.metadata = metadata_dict
repository_metadata.downloadable = downloadable
if 'datatypes' in metadata_dict:
@@ -2018,14 +2060,14 @@
repository_metadata.tools_functionally_correct = False
repository_metadata.missing_test_components = False
repository_metadata.tool_test_results = None
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ sa_session.add( repository_metadata )
+ sa_session.flush()
else:
# There are no metadata records associated with the repository.
- repository_metadata = create_or_update_repository_metadata( trans,
+ repository_metadata = create_or_update_repository_metadata( app,
encoded_id,
repository,
- repository.tip( trans.app ),
+ repository.tip( app ),
metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
# Set tool versions on the new downloadable change set. The order of the list of changesets is
@@ -2033,41 +2075,32 @@
changeset_revisions = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
- if suc.get_repository_metadata_by_changeset_revision( trans.app, encoded_id, changeset_revision ):
+ if suc.get_repository_metadata_by_changeset_revision( app, encoded_id, changeset_revision ):
changeset_revisions.append( changeset_revision )
- add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
+ add_tool_versions( app, encoded_id, repository_metadata, changeset_revisions )
elif len( repo ) == 1 and not invalid_file_tups:
- message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % str( repository.tip( trans.app ) )
+ message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % str( repository.tip( app ) )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict )
+ message = tool_util.generate_message_for_invalid_tools( app, invalid_file_tups, repository, metadata_dict )
status = 'error'
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( app )
return message, status
-def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
+def set_repository_metadata_due_to_new_tip( app, host, user, repository, content_alert_str=None, **kwd ):
"""Set metadata on the repository tip in the tool shed."""
# This method is not called from Galaxy.
- error_message, status = set_repository_metadata( trans,
- repository,
- content_alert_str=content_alert_str,
- **kwd )
- if error_message:
- # FIXME: This probably should not redirect since this method is called from the upload controller as well
- # as the repository controller.
- # If there is an error, display it.
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- id=trans.security.encode_id( repository.id ),
- message=error_message,
- status='error' ) )
+ error_message, status = set_repository_metadata( app, host, user, repository, content_alert_str=content_alert_str, **kwd )
+ return status, error_message
def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
"""
- Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
- to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is::
+ Update an exsiting tool dependency whose definition was updated in a change set
+ pulled by a Galaxy administrator when getting updates to an installed tool shed
+ repository. The original_dependency_dict is a single tool dependency definition,
+ an example of which is::
{"name": "bwa",
"readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fc2eb78580b9/
Changeset: fc2eb78580b9
User: guerler
Date: 2014-06-18 12:53:16
Summary: Charts: Pad y axis
Affected #: 1 file
diff -r 9896943ec1d8ef895b973d496ff6e124bbd46f06 -r fc2eb78580b90809ead1ecde5efa68899205b359 config/plugins/visualizations/charts/static/charts/jqplot/bar/wrapper.js
--- a/config/plugins/visualizations/charts/static/charts/jqplot/bar/wrapper.js
+++ b/config/plugins/visualizations/charts/static/charts/jqplot/bar/wrapper.js
@@ -25,6 +25,9 @@
axes: {
xaxis: {
min : -1
+ },
+ yaxis: {
+ pad : 1.2
}
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.