1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/21babc3112aa/
Changeset: 21babc3112aa
User: natefoo
Date: 2014-06-19 19:22:26
Summary: Fix the output_size and runtime job limits. Heads up to pbs runner users: I did not test this change with the PBS runner.
Affected #: 6 files
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 job_conf.xml.sample_advanced
--- a/job_conf.xml.sample_advanced
+++ b/job_conf.xml.sample_advanced
@@ -412,5 +412,12 @@
will be terminated by Galaxy.
--><limit type="walltime">24:00:00</limit>
+ <!-- output_size:
+ Size that any defined tool output can grow to before the job
+ will be terminated. This does not include temporary files
+ created by the job. Format is flexible, e.g.:
+ '10GB' = '10g' = '10240 Mb' = '10737418240'
+ -->
+ <limit type="output_size">10GB</limit></limits></job_conf>
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -22,7 +22,7 @@
from galaxy.exceptions import ObjectInvalid, ObjectNotFound
from galaxy.jobs.actions.post import ActionBox
from galaxy.jobs.mapper import JobRunnerMapper
-from galaxy.jobs.runners import BaseJobRunner
+from galaxy.jobs.runners import BaseJobRunner, JobState
from galaxy.util.bunch import Bunch
from galaxy.util.expressions import ExpressionContext
from galaxy.util.json import from_json_string
@@ -230,7 +230,7 @@
types = dict(registered_user_concurrent_jobs=int,
anonymous_user_concurrent_jobs=int,
walltime=str,
- output_size=int)
+ output_size=util.size_to_bytes)
self.limits = Bunch(registered_user_concurrent_jobs=None,
anonymous_user_concurrent_jobs=None,
@@ -1270,13 +1270,13 @@
def check_limits(self, runtime=None):
if self.app.job_config.limits.output_size > 0:
for outfile, size in self.get_output_sizes():
- if size > self.app.config.output_size_limit:
- log.warning( '(%s) Job output %s is over the output size limit' % ( self.get_id_tag(), os.path.basename( outfile ) ) )
- return 'Job output file grew too large (greater than %s), please try different inputs or parameters' % util.nice_size( self.app.job_config.limits.output_size )
+ if size > self.app.job_config.limits.output_size:
+ log.warning( '(%s) Job output size %s has exceeded the global output size limit', self.get_id_tag(), os.path.basename( outfile ) )
+ return JobState.runner_states.OUTPUT_SIZE_LIMIT, 'Job output file grew too large (greater than %s), please try different inputs or parameters' % util.nice_size( self.app.job_config.limits.output_size )
if self.app.job_config.limits.walltime_delta is not None and runtime is not None:
if runtime > self.app.job_config.limits.walltime_delta:
- log.warning( '(%s) Job has reached walltime, it will be terminated' % ( self.get_id_tag() ) )
- return 'Job ran longer than the maximum allowed execution time (%s), please try different inputs or parameters' % self.app.job_config.limits.walltime
+ log.warning( '(%s) Job runtime %s has exceeded the global walltime, it will be terminated', self.get_id_tag(), runtime )
+ return JobState.runner_states.GLOBAL_WALLTIME_REACHED, 'Job ran longer than the maximum allowed execution time (runtime: %s, limit: %s), please try different inputs or parameters' % ( str(runtime).split('.')[0], self.app.job_config.limits.walltime )
return None
def has_limits( self ):
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -6,6 +6,7 @@
import time
import string
import logging
+import datetime
import threading
import subprocess
@@ -335,7 +336,9 @@
Encapsulate state of jobs.
"""
runner_states = Bunch(
- WALLTIME_REACHED = 'walltime_reached'
+ WALLTIME_REACHED = 'walltime_reached',
+ GLOBAL_WALLTIME_REACHED = 'global_walltime_reached',
+ OUTPUT_SIZE_LIMIT = 'output_size_limit'
)
def __init__( self ):
self.runner_state_handled = False
@@ -374,8 +377,9 @@
def __init__( self, files_dir=None, job_wrapper=None, job_id=None, job_file=None, output_file=None, error_file=None, exit_code_file=None, job_name=None, job_destination=None ):
super( AsynchronousJobState, self ).__init__()
self.old_state = None
- self.running = False
+ self._running = False
self.check_count = 0
+ self.start_time = None
self.job_wrapper = job_wrapper
# job_id is the DRM's job id, not the Galaxy job id
@@ -392,6 +396,33 @@
self.cleanup_file_attributes = [ 'job_file', 'output_file', 'error_file', 'exit_code_file' ]
+ @property
+ def running( self ):
+ return self._running
+
+ @running.setter
+ def running( self, is_running ):
+ self._running = is_running
+ # This will be invalid for job recovery
+ if self.start_time is None:
+ self.start_time = datetime.datetime.now()
+
+ def check_limits( self, runtime=None ):
+ limit_state = None
+ if self.job_wrapper.has_limits():
+ self.check_count += 1
+ if self.running and (self.check_count % 20 == 0):
+ if runtime is None:
+ runtime = datetime.datetime.now() - (self.start_time or datetime.datetime.now())
+ self.check_count = 0
+ limit_state = self.job_wrapper.check_limits( runtime=runtime )
+ if limit_state is not None:
+ # Set up the job for failure, but the runner will do the actual work
+ self.runner_state, self.fail_message = limit_state
+ self.stop_job = True
+ return True
+ return False
+
def cleanup( self ):
for file in [ getattr( self, a ) for a in self.cleanup_file_attributes if hasattr( self, a ) ]:
try:
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -273,6 +273,9 @@
if state in ( drmaa.JobState.FAILED, drmaa.JobState.DONE ):
self._complete_terminal_job( ajs, drmaa_state = state )
continue
+ if ajs.check_limits():
+ self.work_queue.put( ( self.fail_job, ajs ) )
+ continue
ajs.old_state = state
new_watched.append( ajs )
# Replace the watch list with the updated version
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -191,9 +191,9 @@
while proc.poll() is None:
i += 1
if (i % 20) == 0:
- msg = job_wrapper.check_limits(runtime=datetime.datetime.now() - job_start)
- if msg is not None:
- job_wrapper.fail(msg)
+ limit_state = job_wrapper.check_limits(runtime=datetime.datetime.now() - job_start)
+ if limit_state is not None:
+ job_wrapper.fail(limit_state[1])
log.debug('(%s) Terminating process group' % job_id)
self._terminate(proc)
return True
diff -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 -r 21babc3112aa28761425367d3a5abfbbd8ce80e9 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -400,18 +400,11 @@
if status.job_state == "R" and not pbs_job_state.running:
pbs_job_state.running = True
pbs_job_state.job_wrapper.change_state( model.Job.states.RUNNING )
- if status.job_state == "R" and ( pbs_job_state.check_count % 20 ) == 0:
- # Every 20th time the job status is checked, do limit checks (if configured)
- # Get the job's runtime
- runtime = None
- if status.get( 'resources_used', False ):
- # resources_used may not be in the status for new jobs
- h, m, s = [ int( i ) for i in status.resources_used.walltime.split( ':' ) ]
- runtime = timedelta( 0, s, 0, 0, m, h )
- msg = pbs_job_state.job_wrapper.check_limits(runtime)
- if msg is not None:
- pbs_job_state.fail_message = msg
- pbs_job_state.stop_job = True
+ if status.job_state == "R" and status.get( 'resources_used', False ):
+ # resources_used may not be in the status for new jobs
+ h, m, s = [ int( i ) for i in status.resources_used.walltime.split( ':' ) ]
+ runtime = timedelta( 0, s, 0, 0, m, h )
+ if pbs_job_state.check_limits( runtime=runtime ):
self.work_queue.put( ( self.fail_job, pbs_job_state ) )
continue
elif status.job_state == "C":
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/576be32f8aab/
Changeset: 576be32f8aab
User: greg
Date: 2014-06-19 18:21:48
Summary: Introduce a RepositoryDependencyManager class into the Galaxy install process from the Tool Shed.
Affected #: 7 files
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -31,6 +31,7 @@
from tool_shed.galaxy_install import install_manager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
log = logging.getLogger( __name__ )
@@ -1294,6 +1295,7 @@
Reinstall a tool shed repository that has been previously uninstalled, making sure to handle all repository
and tool dependencies of the repository.
"""
+ rdm = RepositoryDependencyManager( trans.app )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
@@ -1381,13 +1383,12 @@
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
- repository_dependency_util.create_repository_dependency_objects( app=trans.app,
- tool_path=tool_path,
- tool_shed_url=tool_shed_url,
- repo_info_dicts=repo_info_dicts,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section_id=tool_panel_section_id )
+ rdm.create_repository_dependency_objects( tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section_id=tool_panel_section_id )
# Default the selected tool panel location for loading tools included in each newly installed required
# tool shed repository to the location selected for the repository selected for re-installation.
for index, tps_key in enumerate( tool_panel_section_keys ):
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -35,6 +35,7 @@
from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
log = logging.getLogger( __name__ )
@@ -610,15 +611,15 @@
tool_panel_section_id = installation_dict[ 'tool_panel_section_id' ]
tool_path = installation_dict[ 'tool_path' ]
tool_shed_url = installation_dict[ 'tool_shed_url' ]
+ rdm = RepositoryDependencyManager( self.app )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
- repository_dependency_util.create_repository_dependency_objects( app=self.app,
- tool_path=tool_path,
- tool_shed_url=tool_shed_url,
- repo_info_dicts=repo_info_dicts,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section_id=tool_panel_section_id,
- new_tool_panel_section_label=new_tool_panel_section_label )
+ rdm.create_repository_dependency_objects( tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section_id=tool_panel_section_id,
+ new_tool_panel_section_label=new_tool_panel_section_label )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts
def initiate_repository_installation( self, installation_dict ):
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -16,6 +16,8 @@
from tool_shed.util import xml_util
from galaxy.model.orm import and_
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+
log = logging.getLogger( __name__ )
@@ -227,6 +229,7 @@
Return dictionaries containing the sets of installed and missing tool dependencies and repository
dependencies associated with the repository defined by the received repo_info_dict.
"""
+ rdm = RepositoryDependencyManager( self.app )
repository = None
installed_rd = {}
installed_td = {}
@@ -258,9 +261,7 @@
installed_rd, missing_rd = \
self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install( repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app,
- tool_shed_url,
- util.listify( repo_info_dict ) )
+ all_repo_info_dict = rdm.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
has_repository_dependencies_only_if_compiling_contained_td = \
all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
@@ -298,9 +299,7 @@
missing_td[ td_key ] = td_dict
else:
# We have a single repository with (possibly) no defined repository dependencies.
- all_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( self.app,
- tool_shed_url,
- util.listify( repo_info_dict ) )
+ all_repo_info_dict = rdm.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
has_repository_dependencies_only_if_compiling_contained_td = \
all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
--- /dev/null
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -0,0 +1,386 @@
+"""
+Class encapsulating the management of repository dependencies installed or being installed
+into Galaxy from the Tool Shed.
+"""
+
+import json
+import logging
+import urllib
+import urllib2
+
+from galaxy.util import asbool
+
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import encoding_util
+from tool_shed.util import shed_util_common as suc
+from tool_shed.util import tool_util
+
+log = logging.getLogger( __name__ )
+
+
+class RepositoryDependencyManager( object ):
+
+ def __init__( self, app ):
+ self.app = app
+
+ def build_repository_dependency_relationships( self, repo_info_dicts, tool_shed_repositories ):
+ """
+ Build relationships between installed tool shed repositories and other installed
+ tool shed repositories upon which they depend. These relationships are defined in
+ the repository_dependencies entry for each dictionary in the received list of repo_info_dicts.
+ Each of these dictionaries is associated with a repository in the received tool_shed_repositories
+ list.
+ """
+ install_model = self.app.install_model
+ log.debug( "Building repository dependency relationships..." )
+ for repo_info_dict in repo_info_dicts:
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, \
+ repository_clone_url, \
+ changeset_revision, \
+ ctx_rev, \
+ repository_owner, \
+ repository_dependencies, \
+ tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ d_repository = None
+ repository_components_tuple = container_util.get_components_from_key( key )
+ components_list = suc.extract_components_from_tuple( repository_components_tuple )
+ d_toolshed, d_name, d_owner, d_changeset_revision = components_list[ 0:4 ]
+ for tsr in tool_shed_repositories:
+ # Get the the tool_shed_repository defined by name, owner and changeset_revision. This is
+ # the repository that will be dependent upon each of the tool shed repositories contained in
+ # val. We'll need to check tool_shed_repository.tool_shed as well if/when repository dependencies
+ # across tool sheds is supported.
+ if tsr.name == d_name and tsr.owner == d_owner and tsr.changeset_revision == d_changeset_revision:
+ d_repository = tsr
+ break
+ if d_repository is None:
+ # The dependent repository is not in the received list so look in the database.
+ d_repository = suc.get_or_create_tool_shed_repository( self.app,
+ d_toolshed,
+ d_name,
+ d_owner,
+ d_changeset_revision )
+ # Process each repository_dependency defined for the current dependent repository.
+ for repository_dependency_components_list in val:
+ required_repository = None
+ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ rd_changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependency_components_list )
+ # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision. This
+ # is the repository that will be required by the current d_repository.
+ # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
+ for tsr in tool_shed_repositories:
+ if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision:
+ required_repository = tsr
+ break
+ if required_repository is None:
+ # The required repository is not in the received list so look in the database.
+ required_repository = suc.get_or_create_tool_shed_repository( self.app,
+ rd_toolshed,
+ rd_name,
+ rd_owner,
+ rd_changeset_revision )
+ # Ensure there is a repository_dependency relationship between d_repository and required_repository.
+ rrda = None
+ for rd in d_repository.repository_dependencies:
+ if rd.id == required_repository.id:
+ rrda = rd
+ break
+ if not rrda:
+ # Make sure required_repository is in the repository_dependency table.
+ repository_dependency = self.get_repository_dependency_by_repository_id( install_model,
+ required_repository.id )
+ if not repository_dependency:
+ log.debug( 'Creating new repository_dependency record for installed revision %s of repository: %s owned by %s.' % \
+ ( str( required_repository.installed_changeset_revision ),
+ str( required_repository.name ),
+ str( required_repository.owner ) ) )
+ repository_dependency = install_model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
+ install_model.context.add( repository_dependency )
+ install_model.context.flush()
+ # Build the relationship between the d_repository and the required_repository.
+ rrda = install_model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
+ repository_dependency_id=repository_dependency.id )
+ install_model.context.add( rrda )
+ install_model.context.flush()
+
+ def create_repository_dependency_objects( self, tool_path, tool_shed_url, repo_info_dicts, install_repository_dependencies=False,
+ no_changes_checked=False, tool_panel_section_id=None, new_tool_panel_section_label=None ):
+ """
+ Discover all repository dependencies and make sure all tool_shed_repository and
+ associated repository_dependency records exist as well as the dependency relationships
+ between installed repositories. This method is called when uninstalled repositories
+ are being reinstalled. If the user elected to install repository dependencies, all
+ items in the all_repo_info_dicts list will be processed. However, if repository
+ dependencies are not to be installed, only those items contained in the received
+ repo_info_dicts list will be processed.
+ """
+ install_model = self.app.install_model
+ log.debug( "Creating repository dependency objects..." )
+ # The following list will be maintained within this method to contain all created
+ # or updated tool shed repositories, including repository dependencies that may not
+ # be installed.
+ all_created_or_updated_tool_shed_repositories = []
+ # There will be a one-to-one mapping between items in 3 lists:
+ # created_or_updated_tool_shed_repositories, tool_panel_section_keys
+ # and filtered_repo_info_dicts. The 3 lists will filter out repository
+ # dependencies that are not to be installed.
+ created_or_updated_tool_shed_repositories = []
+ tool_panel_section_keys = []
+ # Repositories will be filtered (e.g., if already installed, if elected
+ # to not be installed, etc), so filter the associated repo_info_dicts accordingly.
+ filtered_repo_info_dicts = []
+ # Discover all repository dependencies and retrieve information for installing
+ # them. Even if the user elected to not install repository dependencies we have
+ # to make sure all repository dependency objects exist so that the appropriate
+ # repository dependency relationships can be built.
+ all_required_repo_info_dict = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
+ if not all_repo_info_dicts:
+ # No repository dependencies were discovered so process the received repositories.
+ all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
+ for repo_info_dict in all_repo_info_dicts:
+ # If the user elected to install repository dependencies, all items in the
+ # all_repo_info_dicts list will be processed. However, if repository dependencies
+ # are not to be installed, only those items contained in the received repo_info_dicts
+ # list will be processed but the all_repo_info_dicts list will be used to create all
+ # defined repository dependency relationships.
+ if self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+ for name, repo_info_tuple in repo_info_dict.items():
+ can_update_db_record = False
+ description, \
+ repository_clone_url, \
+ changeset_revision, \
+ ctx_rev, \
+ repository_owner, \
+ repository_dependencies, \
+ tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # See if the repository has an existing record in the database.
+ repository_db_record, installed_changeset_revision = \
+ suc.repository_was_previously_installed( self.app, tool_shed_url, name, repo_info_tuple, from_tip=False )
+ if repository_db_record:
+ if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.INSTALLED,
+ install_model.ToolShedRepository.installation_status.CLONING,
+ install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ debug_msg = "Skipping installation of revision %s of repository '%s' because it was installed " % \
+ ( str( changeset_revision ), str( repository_db_record.name ) )
+ debug_msg += "with the (possibly updated) revision %s and its current installation status is '%s'." % \
+ ( str( installed_changeset_revision ), str( repository_db_record.status ) )
+ log.debug( debug_msg )
+ can_update_db_record = False
+ else:
+ if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.ERROR,
+ install_model.ToolShedRepository.installation_status.NEW,
+ install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ # The current tool shed repository is not currently installed, so we can update its
+ # record in the database.
+ name = repository_db_record.name
+ installed_changeset_revision = repository_db_record.installed_changeset_revision
+ metadata_dict = repository_db_record.metadata
+ dist_to_shed = repository_db_record.dist_to_shed
+ can_update_db_record = True
+ elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # The current tool shed repository is deactivated, so updating its database record
+ # is not necessary - just activate it.
+ log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
+ self.app.installed_repository_manager.activate_repository( repository_db_record )
+ # No additional updates to the database record are necessary.
+ can_update_db_record = False
+ elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]:
+ # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find
+ # the previously installed and uninstalled repository instead of creating a new record.
+ changeset_revision = repository_db_record.installed_changeset_revision
+ suc.reset_previously_installed_repository( install_model, repository_db_record )
+ can_update_db_record = True
+ else:
+ # No record exists in the database for the repository currently being processed.
+ installed_changeset_revision = changeset_revision
+ metadata_dict = {}
+ dist_to_shed = False
+ can_update_db_record = True
+ if can_update_db_record:
+ # The database record for the tool shed repository currently being processed can be updated.
+ # Get the repository metadata to see where it was previously located in the tool panel.
+ if repository_db_record and repository_db_record.metadata:
+ tool_section, tool_panel_section_key = \
+ tool_util.handle_tool_panel_selection( toolbox=self.app.toolbox,
+ metadata=repository_db_record.metadata,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section_id=tool_panel_section_id,
+ new_tool_panel_section_label=new_tool_panel_section_label )
+ else:
+ # We're installing a new tool shed repository that does not yet have a database record.
+ tool_panel_section_key, tool_section = \
+ tool_util.handle_tool_panel_section( self.app.toolbox,
+ tool_panel_section_id=tool_panel_section_id,
+ new_tool_panel_section_label=new_tool_panel_section_label )
+ tool_shed_repository = \
+ suc.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=install_model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=changeset_revision,
+ owner=repository_owner,
+ dist_to_shed=False )
+ if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
+ all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if
+ # it is supposed to be installed.
+ if install_repository_dependencies or self.is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ if tool_shed_repository not in created_or_updated_tool_shed_repositories:
+ # Keep the one-to-one mapping between items in 3 lists.
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ tool_panel_section_keys.append( tool_panel_section_key )
+ filtered_repo_info_dicts.append( repo_info_dict )
+ # Build repository dependency relationships even if the user chose to not install repository dependencies.
+ self.build_repository_dependency_relationships( all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
+ return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
+
+ def get_repository_dependency_by_repository_id( self, install_model, decoded_repository_id ):
+ return install_model.context.query( install_model.RepositoryDependency ) \
+ .filter( install_model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
+ .first()
+
+ def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
+ """
+ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
+ them to the list. All repository_dependency entries in each of the received repo_info_dicts includes
+ all required repositories, so only one pass through this method is required to retrieve all repository
+ dependencies.
+ """
+ all_required_repo_info_dict = {}
+ all_repo_info_dicts = []
+ if repo_info_dicts:
+ # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
+ # shed to discover repository ids.
+ required_repository_tups = []
+ for repo_info_dict in repo_info_dicts:
+ if repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( repo_info_dict )
+ for repository_name, repo_info_tup in repo_info_dict.items():
+ description, \
+ repository_clone_url, \
+ changeset_revision, \
+ ctx_rev, \
+ repository_owner, \
+ repository_dependencies, \
+ tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tup )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ repository_components_tuple = container_util.get_components_from_key( key )
+ components_list = suc.extract_components_from_tuple( repository_components_tuple )
+ # Skip listing a repository dependency if it is required only to compile a tool dependency
+ # defined for the dependent repository since in this case, the repository dependency is really
+ # a dependency of the dependent repository's contained tool dependency, and only if that
+ # tool dependency requires compilation.
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ prior_installation_required = 'False'
+ only_if_compiling_contained_td = 'False'
+ if len( components_list ) == 4:
+ prior_installation_required = 'False'
+ only_if_compiling_contained_td = 'False'
+ elif len( components_list ) == 5:
+ prior_installation_required = components_list[ 4 ]
+ only_if_compiling_contained_td = 'False'
+ if not asbool( only_if_compiling_contained_td ):
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ for components_list in val:
+ try:
+ only_if_compiling_contained_td = components_list[ 5 ]
+ except:
+ only_if_compiling_contained_td = 'False'
+ # Skip listing a repository dependency if it is required only to compile a tool dependency
+ # defined for the dependent repository (see above comment).
+ if not asbool( only_if_compiling_contained_td ):
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ else:
+ # We have a single repository with no dependencies.
+ components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
+ required_repository_tups.append( components_list )
+ if required_repository_tups:
+ # The value of required_repository_tups is a list of tuples, so we need to encode it.
+ encoded_required_repository_tups = []
+ for required_repository_tup in required_repository_tups:
+ # Convert every item in required_repository_tup to a string.
+ required_repository_tup = [ str( item ) for item in required_repository_tup ]
+ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+ encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+ encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+ if suc.is_tool_shed_client( self.app ):
+ # Handle secure / insecure Tool Shed URL protocol changes and port changes.
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
+ url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
+ # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
+ url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
+ request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
+ response = urllib2.urlopen( request ).read()
+ if response:
+ try:
+ required_repo_info_dict = json.loads( response )
+ except Exception, e:
+ log.exception( e )
+ return all_repo_info_dicts
+ required_repo_info_dicts = []
+ for k, v in required_repo_info_dict.items():
+ if k == 'repo_info_dicts':
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ else:
+ if k not in all_required_repo_info_dict:
+ all_required_repo_info_dict[ k ] = v
+ else:
+ if v and not all_required_repo_info_dict[ k ]:
+ all_required_repo_info_dict[ k ] = v
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
+ # of dictionaries, each of which has a single entry. We'll check keys here rather than
+ # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
+ # lists of discovered repository dependencies, but these lists will be empty in the
+ # required_repo_info_dict since dependency discovery has not yet been performed for these
+ # dictionaries.
+ required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
+ all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
+ if required_repo_info_dict_key not in all_repo_info_dicts_keys:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
+ return all_required_repo_info_dict
+
+ def is_in_repo_info_dicts( self, repo_info_dict, repo_info_dicts ):
+ """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
+ for name, repo_info_tuple in repo_info_dict.items():
+ for rid in repo_info_dicts:
+ for rid_name, rid_repo_info_tuple in rid.items():
+ if rid_name == name:
+ if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
+ for item in rid_repo_info_tuple:
+ if item not in repo_info_tuple:
+ return False
+ return True
+ return False
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/tool_shed/util/export_util.py
--- a/lib/tool_shed/util/export_util.py
+++ b/lib/tool_shed/util/export_util.py
@@ -19,6 +19,8 @@
from tool_shed.util import repository_dependency_util
from tool_shed.util import xml_util
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+
eggs.require( 'mercurial' )
from mercurial import commands
@@ -238,6 +240,7 @@
Return a list of dictionaries defining repositories that are required by the repository associated with the
received repository_id.
"""
+ rdm = RepositoryDependencyManager( trans.app )
repository = suc.get_repository_in_tool_shed( trans.app, repository_id )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
@@ -261,9 +264,7 @@
str( repository.user.username ),
repository_dependencies,
None )
- all_required_repo_info_dict = repository_dependency_util.get_required_repo_info_dicts( trans.app,
- tool_shed_url,
- [ repo_info_dict ] )
+ all_required_repo_info_dict = rdm.get_required_repo_info_dicts( tool_shed_url, [ repo_info_dict ] )
all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
return all_repo_info_dicts
diff -r 82379b8b2428fff1c1a0f4d32628566f2c7e581d -r 576be32f8aabc6b3124d32ea47c29b2d95b9a544 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -1,8 +1,6 @@
import json
import logging
import os
-import urllib
-import urllib2
from galaxy.util import asbool
from galaxy.util import listify
@@ -17,79 +15,6 @@
log = logging.getLogger( __name__ )
-def build_repository_dependency_relationships( app, repo_info_dicts, tool_shed_repositories ):
- """
- Build relationships between installed tool shed repositories and other installed tool shed repositories upon which they depend. These
- relationships are defined in the repository_dependencies entry for each dictionary in the received list of repo_info_dicts. Each of
- these dictionaries is associated with a repository in the received tool_shed_repositories list.
- """
- install_model = app.install_model
- log.debug( "Building repository dependency relationships..." )
- for repo_info_dict in repo_info_dicts:
- for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- if repository_dependencies:
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- d_repository = None
- repository_components_tuple = container_util.get_components_from_key( key )
- components_list = suc.extract_components_from_tuple( repository_components_tuple )
- d_toolshed, d_name, d_owner, d_changeset_revision = components_list[ 0:4 ]
- for tsr in tool_shed_repositories:
- # Get the the tool_shed_repository defined by name, owner and changeset_revision. This is
- # the repository that will be dependent upon each of the tool shed repositories contained in
- # val. We'll need to check tool_shed_repository.tool_shed as well if/when repository dependencies
- # across tool sheds is supported.
- if tsr.name == d_name and tsr.owner == d_owner and tsr.changeset_revision == d_changeset_revision:
- d_repository = tsr
- break
- if d_repository is None:
- # The dependent repository is not in the received list so look in the database.
- d_repository = suc.get_or_create_tool_shed_repository( app, d_toolshed, d_name, d_owner, d_changeset_revision )
- # Process each repository_dependency defined for the current dependent repository.
- for repository_dependency_components_list in val:
- required_repository = None
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependency_components_list )
- # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision. This
- # is the repository that will be required by the current d_repository.
- # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
- for tsr in tool_shed_repositories:
- if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision:
- required_repository = tsr
- break
- if required_repository is None:
- # The required repository is not in the received list so look in the database.
- required_repository = suc.get_or_create_tool_shed_repository( app,
- rd_toolshed,
- rd_name,
- rd_owner,
- rd_changeset_revision )
- # Ensure there is a repository_dependency relationship between d_repository and required_repository.
- rrda = None
- for rd in d_repository.repository_dependencies:
- if rd.id == required_repository.id:
- rrda = rd
- break
- if not rrda:
- # Make sure required_repository is in the repository_dependency table.
- repository_dependency = get_repository_dependency_by_repository_id( install_model, required_repository.id )
- if not repository_dependency:
- log.debug( 'Creating new repository_dependency record for installed revision %s of repository: %s owned by %s.' % \
- ( str( required_repository.installed_changeset_revision ),
- str( required_repository.name ),
- str( required_repository.owner ) ) )
- repository_dependency = install_model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
- install_model.context.add( repository_dependency )
- install_model.context.flush()
- # Build the relationship between the d_repository and the required_repository.
- rrda = install_model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=d_repository.id,
- repository_dependency_id=repository_dependency.id )
- install_model.context.add( rrda )
- install_model.context.flush()
-
def can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts ):
"""Handle the case where an update to the changeset revision was done."""
k = key_rd_dict.keys()[ 0 ]
@@ -103,131 +28,6 @@
return False
return True
-def create_repository_dependency_objects( app, tool_path, tool_shed_url, repo_info_dicts, install_repository_dependencies=False,
- no_changes_checked=False, tool_panel_section_id=None, new_tool_panel_section_label=None ):
- """
- Discover all repository dependencies and make sure all tool_shed_repository and associated repository_dependency
- records exist as well as the dependency relationships between installed repositories. This method is called when
- uninstalled repositories are being reinstalled. If the user elected to install repository dependencies, all items
- in the all_repo_info_dicts list will be processed. However, if repository dependencies are not to be installed,
- only those items contained in the received repo_info_dicts list will be processed.
- """
- install_model = app.install_model
- log.debug( "Creating repository dependency objects..." )
- # The following list will be maintained within this method to contain all created or updated tool shed repositories,
- # including repository dependencies that may not be installed.
- all_created_or_updated_tool_shed_repositories = []
- # There will be a one-to-one mapping between items in 3 lists: created_or_updated_tool_shed_repositories,
- # tool_panel_section_keys and filtered_repo_info_dicts. The 3 lists will filter out repository dependencies
- # that are not to be installed.
- created_or_updated_tool_shed_repositories = []
- tool_panel_section_keys = []
- # Repositories will be filtered (e.g., if already installed, if elected to not be installed, etc), so filter
- # the associated repo_info_dicts accordingly.
- filtered_repo_info_dicts = []
- # Discover all repository dependencies and retrieve information for installing them. Even if the user elected
- # to not install repository dependencies we have to make sure all repository dependency objects exist so that
- # the appropriate repository dependency relationships can be built.
- all_required_repo_info_dict = get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts )
- all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
- if not all_repo_info_dicts:
- # No repository dependencies were discovered so process the received repositories.
- all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
- for repo_info_dict in all_repo_info_dicts:
- # If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will
- # be processed. However, if repository dependencies are not to be installed, only those items contained
- # in the received repo_info_dicts list will be processed but the all_repo_info_dicts list will be used
- # to create all defined repository dependency relationships.
- if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
- for name, repo_info_tuple in repo_info_dict.items():
- can_update_db_record = False
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- # See if the repository has an existing record in the database.
- repository_db_record, installed_changeset_revision = \
- suc.repository_was_previously_installed( app, tool_shed_url, name, repo_info_tuple, from_tip=False )
- if repository_db_record:
- if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.INSTALLED,
- install_model.ToolShedRepository.installation_status.CLONING,
- install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
- install_model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
- install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
- install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
- debug_msg = "Skipping installation of revision %s of repository '%s' because it was installed " % \
- ( str( changeset_revision ), str( repository_db_record.name ) )
- debug_msg += "with the (possibly updated) revision %s and its current installation status is '%s'." % \
- ( str( installed_changeset_revision ), str( repository_db_record.status ) )
- log.debug( debug_msg )
- can_update_db_record = False
- else:
- if repository_db_record.status in [ install_model.ToolShedRepository.installation_status.ERROR,
- install_model.ToolShedRepository.installation_status.NEW,
- install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
- # The current tool shed repository is not currently installed, so we can update its record in the database.
- name = repository_db_record.name
- installed_changeset_revision = repository_db_record.installed_changeset_revision
- metadata_dict = repository_db_record.metadata
- dist_to_shed = repository_db_record.dist_to_shed
- can_update_db_record = True
- elif repository_db_record.status in [ install_model.ToolShedRepository.installation_status.DEACTIVATED ]:
- # The current tool shed repository is deactivated, so updating its database record is not necessary - just activate it.
- log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
- app.installed_repository_manager.activate_repository( repository_db_record )
- # No additional updates to the database record are necessary.
- can_update_db_record = False
- elif repository_db_record.status not in [ install_model.ToolShedRepository.installation_status.NEW ]:
- # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
- # and uninstalled repository instead of creating a new record.
- changeset_revision = repository_db_record.installed_changeset_revision
- suc.reset_previously_installed_repository( install_model, repository_db_record )
- can_update_db_record = True
- else:
- # No record exists in the database for the repository currently being processed.
- installed_changeset_revision = changeset_revision
- metadata_dict = {}
- dist_to_shed = False
- can_update_db_record = True
- if can_update_db_record:
- # The database record for the tool shed repository currently being processed can be updated. Get the repository metadata
- # to see where it was previously located in the tool panel.
- if repository_db_record and repository_db_record.metadata:
- tool_section, tool_panel_section_key = \
- tool_util.handle_tool_panel_selection( toolbox=app.toolbox,
- metadata=repository_db_record.metadata,
- no_changes_checked=no_changes_checked,
- tool_panel_section_id=tool_panel_section_id,
- new_tool_panel_section_label=new_tool_panel_section_label )
- else:
- # We're installing a new tool shed repository that does not yet have a database record.
- tool_panel_section_key, tool_section = \
- tool_util.handle_tool_panel_section( app.toolbox,
- tool_panel_section_id=tool_panel_section_id,
- new_tool_panel_section_label=new_tool_panel_section_label )
- tool_shed_repository = \
- suc.create_or_update_tool_shed_repository( app=app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=install_model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=changeset_revision,
- owner=repository_owner,
- dist_to_shed=False )
- if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
- all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
- if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
- if tool_shed_repository not in created_or_updated_tool_shed_repositories:
- # Keep the one-to-one mapping between items in 3 lists.
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- filtered_repo_info_dicts.append( repo_info_dict )
- # Build repository dependency relationships even if the user chose to not install repository dependencies.
- build_repository_dependency_relationships( app, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
- return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
-
def generate_message_for_invalid_repository_dependencies( metadata_dict, error_from_tuple=False ):
"""Get or generate and return an error message associated with an invalid repository dependency."""
message = ''
@@ -347,11 +147,6 @@
prior_installation_required,
only_if_compiling_contained_td )
-def get_repository_dependency_by_repository_id( install_model, decoded_repository_id ):
- return install_model.context.query( install_model.RepositoryDependency ) \
- .filter( install_model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
- .first()
-
def get_repository_dependencies_for_installed_tool_shed_repository( app, repository ):
"""
Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
@@ -486,118 +281,6 @@
log.debug( "Cannot locate repository %s owned by %s for inclusion in repository dependency tups." % \
( name, owner ) )
return dependency_tups
-
-def get_required_repo_info_dicts( app, tool_shed_url, repo_info_dicts ):
- """
- Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
- them to the list. All repository_dependency entries in each of the received repo_info_dicts includes
- all required repositories, so only one pass through this method is required to retrieve all repository
- dependencies.
- """
- all_required_repo_info_dict = {}
- all_repo_info_dicts = []
- if repo_info_dicts:
- # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
- # shed to discover repository ids.
- required_repository_tups = []
- for repo_info_dict in repo_info_dicts:
- if repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( repo_info_dict )
- for repository_name, repo_info_tup in repo_info_dict.items():
- description, \
- repository_clone_url, \
- changeset_revision, \
- ctx_rev, \
- repository_owner, \
- repository_dependencies, \
- tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tup )
- if repository_dependencies:
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- repository_components_tuple = container_util.get_components_from_key( key )
- components_list = suc.extract_components_from_tuple( repository_components_tuple )
- # Skip listing a repository dependency if it is required only to compile a tool dependency
- # defined for the dependent repository since in this case, the repository dependency is really
- # a dependency of the dependent repository's contained tool dependency, and only if that
- # tool dependency requires compilation.
- # For backward compatibility to the 12/20/12 Galaxy release.
- prior_installation_required = 'False'
- only_if_compiling_contained_td = 'False'
- if len( components_list ) == 4:
- prior_installation_required = 'False'
- only_if_compiling_contained_td = 'False'
- elif len( components_list ) == 5:
- prior_installation_required = components_list[ 4 ]
- only_if_compiling_contained_td = 'False'
- if not asbool( only_if_compiling_contained_td ):
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- for components_list in val:
- try:
- only_if_compiling_contained_td = components_list[ 5 ]
- except:
- only_if_compiling_contained_td = 'False'
- # Skip listing a repository dependency if it is required only to compile a tool dependency
- # defined for the dependent repository (see above comment).
- if not asbool( only_if_compiling_contained_td ):
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- else:
- # We have a single repository with no dependencies.
- components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
- required_repository_tups.append( components_list )
- if required_repository_tups:
- # The value of required_repository_tups is a list of tuples, so we need to encode it.
- encoded_required_repository_tups = []
- for required_repository_tup in required_repository_tups:
- # Convert every item in required_repository_tup to a string.
- required_repository_tup = [ str( item ) for item in required_repository_tup ]
- encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
- encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
- encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- if suc.is_tool_shed_client( app ):
- # Handle secure / insecure Tool Shed URL protocol changes and port changes.
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
- url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
- # Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
- url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
- request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
- response = urllib2.urlopen( request ).read()
- if response:
- try:
- required_repo_info_dict = json.loads( response )
- except Exception, e:
- log.exception( e )
- return all_repo_info_dicts
- required_repo_info_dicts = []
- for k, v in required_repo_info_dict.items():
- if k == 'repo_info_dicts':
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- else:
- if k not in all_required_repo_info_dict:
- all_required_repo_info_dict[ k ] = v
- else:
- if v and not all_required_repo_info_dict[ k ]:
- all_required_repo_info_dict[ k ] = v
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- # Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
- # of dictionaries, each of which has a single entry. We'll check keys here rather than
- # the entire dictionary because a dictionary entry in all_repo_info_dicts will include
- # lists of discovered repository dependencies, but these lists will be empty in the
- # required_repo_info_dict since dependency discovery has not yet been performed for these
- # dictionaries.
- required_repo_info_dict_key = required_repo_info_dict.keys()[ 0 ]
- all_repo_info_dicts_keys = [ d.keys()[ 0 ] for d in all_repo_info_dicts ]
- if required_repo_info_dict_key not in all_repo_info_dicts_keys:
- all_repo_info_dicts.append( required_repo_info_dict )
- all_required_repo_info_dict[ 'all_repo_info_dicts' ] = all_repo_info_dicts
- return all_required_repo_info_dict
def get_updated_changeset_revisions_for_repository_dependencies( app, key_rd_dicts ):
updated_key_rd_dicts = []
@@ -803,19 +486,6 @@
return True
return False
-def is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
- """Return True if the received repo_info_dict is contained in the list of received repo_info_dicts."""
- for name, repo_info_tuple in repo_info_dict.items():
- for rid in repo_info_dicts:
- for rid_name, rid_repo_info_tuple in rid.items():
- if rid_name == name:
- if len( rid_repo_info_tuple ) == len( repo_info_tuple ):
- for item in rid_repo_info_tuple:
- if item not in repo_info_tuple:
- return False
- return True
- return False
-
def filter_only_if_compiling_contained_td( key_rd_dict ):
"""
Return a copy of the received key_rd_dict with repository dependencies that are needed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a6729bbd904a/
Changeset: a6729bbd904a
User: natefoo
Date: 2014-06-19 14:46:21
Summary: Avoid a race condition in handling LWR status updates received via AMQP at startup.
Affected #: 1 file
diff -r c182d156b93ea7f824b232e6d17d2ed9bf24f430 -r a6729bbd904a2ad44544f9f7077e663482f89309 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -156,6 +156,10 @@
return job_state
def __async_update( self, full_status ):
+ while not hasattr( self.app, 'job_manager' ):
+ # The status update thread can start consuming before app is done initializing
+ log.debug( 'Received a status update message before app is initialized, waiting 5 seconds' )
+ sleep( 5 )
job_id = None
try:
job_id = full_status[ "job_id" ]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.