galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
9 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ab827b2c7907/
changeset: ab827b2c7907
user: jmchilton
date: 2012-12-26 19:42:31
summary: Implement new job runner super class ClusterJobRunner intended to reduce amount of duplicated code between drmaa, pbs, and lwr job runners (also I guess cli and condor classes seem like they could benefit from this as well). This super class will manage the monitor and worker threads and queues.
I am submitting only changes to the LWR that use this class, but I would encourage the Galaxy team to refactor the drmaa and pbs runners to use this class as well (or I would be happy to make these changes if given access or a promise the changes will be accepted quickly).
A variant of the drmaa runner that has been refactored to use this class can be found here: https://bitbucket.org/jmchilton/galaxy-central-lwr-enhancement-1/src/tip/li… from the now defunct pull request 80.
---
lib/galaxy/jobs/runners/__init__.py | 160 +++++++++++++++++++++++++++++++++++
1 file changed, 160 insertions(+)
affected #: 1 file
diff -r 4f4875265599424fed16f35dd82eb785167f6c25 -r ab827b2c790750c5fd9b9d815d54aeba8100008a lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -1,5 +1,10 @@
import os, logging, os.path
+from galaxy import model
+from Queue import Queue, Empty
+import time
+import threading
+
log = logging.getLogger( __name__ )
class BaseJobRunner( object ):
@@ -90,3 +95,158 @@
set_extension = False,
kwds = { 'overwrite' : False } )
return commands
+
+class ClusterJobState( object ):
+ """
+ Encapsulate the state of a cluster job, this should be subclassed as
+ needed for various job runners to capture additional information needed
+ to communicate with cluster job manager.
+ """
+
+ def __init__( self ):
+ self.job_wrapper = None
+ self.job_id = None
+ self.old_state = None
+ self.running = False
+ self.runner_url = None
+
+STOP_SIGNAL = object()
+
+JOB_STATUS_QUEUED = 'queue'
+JOB_STATUS_FAILED = 'fail'
+JOB_STATUS_FINISHED = 'finish'
+
+class ClusterJobRunner( BaseJobRunner ):
+ """
+ Not sure this is the best name for this class, but there is common code
+ shared between sge, pbs, drmaa, etc...
+ """
+
+ def __init__( self, app ):
+ self.app = app
+ self.sa_session = app.model.context
+ # 'watched' and 'queue' are both used to keep track of jobs to watch.
+ # 'queue' is used to add new watched jobs, and can be called from
+ # any thread (usually by the 'queue_job' method). 'watched' must only
+ # be modified by the monitor thread, which will move items from 'queue'
+ # to 'watched' and then manage the watched jobs.
+ self.watched = []
+ self.monitor_queue = Queue()
+
+ def _init_monitor_thread(self):
+ self.monitor_thread = threading.Thread( name="%s.monitor_thread" % self.runner_name, target=self.monitor )
+ self.monitor_thread.setDaemon( True )
+ self.monitor_thread.start()
+
+ def _init_worker_threads(self):
+ self.work_queue = Queue()
+ self.work_threads = []
+ nworkers = self.app.config.cluster_job_queue_workers
+ for i in range( nworkers ):
+ worker = threading.Thread( name="%s.work_thread-%d" % (self.runner_name, i), target=self.run_next )
+ worker.start()
+ self.work_threads.append( worker )
+
+ def handle_stop(self):
+ # DRMAA and SGE runners should override this and disconnect.
+ pass
+
+ def monitor( self ):
+ """
+ Watches jobs currently in the cluster queue and deals with state changes
+ (queued to running) and job completion
+ """
+ while 1:
+ # Take any new watched jobs and put them on the monitor list
+ try:
+ while 1:
+ cluster_job_state = self.monitor_queue.get_nowait()
+ if cluster_job_state is STOP_SIGNAL:
+ # TODO: This is where any cleanup would occur
+ self.handle_stop()
+ return
+ self.watched.append( cluster_job_state )
+ except Empty:
+ pass
+ # Iterate over the list of watched jobs and check state
+ self.check_watched_items()
+ # Sleep a bit before the next state check
+ time.sleep( 1 )
+
+ def run_next( self ):
+ """
+ Run the next item in the queue (a job waiting to run or finish )
+ """
+ while 1:
+ ( op, obj ) = self.work_queue.get()
+ if op is STOP_SIGNAL:
+ return
+ try:
+ if op == JOB_STATUS_QUEUED:
+ # If the next item is to be run, then only run it if the
+ # job state is "queued". Otherwise the next item was either
+ # cancelled or one of its siblings encountered an error.
+ job_state = obj.get_state()
+ if model.Job.states.QUEUED == job_state:
+ self.queue_job( obj )
+ else:
+ log.debug( "Not executing job %d in state %s" % ( obj.get_id_tag(), job_state ) )
+ elif op == JOB_STATUS_FINISHED:
+ self.finish_job( obj )
+ elif op == JOB_STATUS_FAILED:
+ self.fail_job( obj )
+ except:
+ log.exception( "Uncaught exception %sing job" % op )
+
+ def monitor_job(self, job_state):
+ self.monitor_queue.put( job_state )
+
+ def put( self, job_wrapper ):
+ """Add a job to the queue (by job identifier)"""
+ # Change to queued state before handing to worker thread so the runner won't pick it up again
+ job_wrapper.change_state( model.Job.states.QUEUED )
+ self.mark_as_queued(job_wrapper)
+
+ def shutdown( self ):
+ """Attempts to gracefully shut down the monitor thread"""
+ log.info( "sending stop signal to worker threads" )
+ self.monitor_queue.put( STOP_SIGNAL )
+ for i in range( len( self.work_threads ) ):
+ self.work_queue.put( ( STOP_SIGNAL, None ) )
+
+ def check_watched_items(self):
+ """
+ This method is responsible for iterating over self.watched and handling
+ state changes and updating self.watched with a new list of watched job
+ states. Subclasses can opt to override this directly (as older job runners will
+ initially) or just override check_watched_item and allow the list processing to
+ reuse the logic here.
+ """
+ new_watched = []
+ for cluster_job_state in self.watched:
+ new_cluster_job_state = self.check_watched_item(cluster_job_state)
+ if new_cluster_job_state:
+ new_watched.append(new_cluster_job_state)
+ self.watched = new_watched
+
+ # Subclasses should implement this unless they override check_watched_items all together.
+ def check_watched_item(self):
+ raise NotImplementedError()
+
+ def queue_job(self, job_wrapper):
+ raise NotImplementedError()
+
+ def finish_job(self, job_state):
+ raise NotImplementedError()
+
+ def fail_job(self, job_state):
+ raise NotImplementedError()
+
+ def mark_as_finished(self, job_state):
+ self.work_queue.put( ( JOB_STATUS_FINISHED, job_state ) )
+
+ def mark_as_failed(self, job_state):
+ self.work_queue.put( ( JOB_STATUS_FAILED, job_state ) )
+
+ def mark_as_queued(self, job_wrapper):
+ self.work_queue.put( ( JOB_STATUS_QUEUED, job_wrapper ) )
https://bitbucket.org/galaxy/galaxy-central/changeset/8c6cf637058a/
changeset: 8c6cf637058a
user: jmchilton
date: 2012-12-26 19:42:31
summary: Refactor the LWRJobRunner to be a ClusterJobRunner and implement a recover method for this runner, the upshot of this is that LWR jobs can now survive Galaxy restarts. Downside is that jobs are no longer queued on Galaxy server, so LWR server should be updated (to changeset 5213f6d or newer) to queue jobs on the remote server. This is not manidatory however, this will still work it is just more jobs may run simultaneously than is desired.
---
lib/galaxy/jobs/runners/lwr.py | 211 ++++++++++++++++++++++++----------------
1 file changed, 128 insertions(+), 83 deletions(-)
affected #: 1 file
diff -r ab827b2c790750c5fd9b9d815d54aeba8100008a -r 8c6cf637058a1f14137b5f9caa9af29e077b29ce lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -7,7 +7,7 @@
from galaxy import model
from galaxy.datatypes.data import nice_size
-from galaxy.jobs.runners import BaseJobRunner
+from galaxy.jobs.runners import ClusterJobState, ClusterJobRunner
import os, errno
from time import sleep
@@ -199,12 +199,18 @@
def wait(self):
""" """
while True:
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
- complete = check_complete_response["complete"] == "true"
+ complete = self.check_complete()
if complete:
return check_complete_response
time.sleep(1)
+ def raw_check_complete(self):
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ return check_complete_response
+
+ def check_complete(self):
+ return self.raw_check_complete()["complete"] == "true"
+
def clean(self):
self.__raw_execute("clean", { "job_id" : self.job_id })
@@ -213,51 +219,34 @@
-class LwrJobRunner( BaseJobRunner ):
+class LwrJobRunner( ClusterJobRunner ):
"""
- Lwr Job Runner
+ LWR Job Runner
"""
- STOP_SIGNAL = object()
+ runner_name = "LWRRunner"
+
def __init__( self, app ):
- """Start the job runner with 'nworkers' worker threads"""
- self.app = app
- self.sa_session = app.model.context
+ """Start the job runner """
+ super( LwrJobRunner, self ).__init__( app )
+ self._init_monitor_thread()
+ log.info( "starting LWR workers" )
+ self._init_worker_threads()
- # start workers
- self.queue = Queue()
- self.threads = []
- nworkers = app.config.local_job_queue_workers
- log.info( "starting workers" )
- for i in range( nworkers ):
- worker = threading.Thread( ( name="LwrJobRunner.thread-%d" % i ), target=self.run_next )
- worker.setDaemon( True )
- worker.start()
- self.threads.append( worker )
- log.debug( "%d workers ready", nworkers )
+ def check_watched_item(self, job_state):
+ try:
+ client = self.get_client_from_state(job_state)
+ complete = client.check_complete()
+ except Exception:
+ # An orphaned job was put into the queue at app startup, so remote server went down
+ # either way we are done I guess.
+ self.mark_as_finished(job_state)
+ return None
+ if complete:
+ self.mark_as_finished(job_state)
+ return None
+ return job_state
- def run_next( self ):
- """Run the next job, waiting until one is available if neccesary"""
- while 1:
- job_wrapper = self.queue.get()
- if job_wrapper is self.STOP_SIGNAL:
- return
- try:
- self.run_job( job_wrapper )
- except:
- log.exception( "Uncaught exception running job" )
-
- def determine_lwr_url(self, url):
- lwr_url = url[ len( 'lwr://' ) : ]
- return lwr_url
-
- def get_client_from_wrapper(self, job_wrapper):
- return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
-
- def get_client(self, job_runner, job_id):
- lwr_url = self.determine_lwr_url( job_runner )
- return Client(lwr_url, job_id)
-
- def run_job( self, job_wrapper ):
+ def queue_job(self, job_wrapper):
stderr = stdout = command_line = ''
runner_url = job_wrapper.get_job_runner_url()
@@ -277,35 +266,76 @@
return
# If we were able to get a command line, run the job
- if command_line:
- try:
- #log.debug( 'executing: %s' % command_line )
- client = self.get_client_from_wrapper(job_wrapper)
- output_fnames = job_wrapper.get_output_fnames()
- output_files = [ str( o ) for o in output_fnames ]
- input_files = job_wrapper.get_input_fnames()
- file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
- rebuilt_command_line = file_stager.get_rewritten_command_line()
- client.launch( rebuilt_command_line )
+ if not command_line:
+ job_wrapper.finish( '', '' )
+ return
- job_wrapper.set_runner( runner_url, job_wrapper.job_id )
- job_wrapper.change_state( model.Job.states.RUNNING )
+ try:
+ #log.debug( 'executing: %s' % command_line )
+ client = self.get_client_from_wrapper(job_wrapper)
+ output_files = self.get_output_files(job_wrapper)
+ input_files = job_wrapper.get_input_fnames()
+ file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
+ rebuilt_command_line = file_stager.get_rewritten_command_line()
+ client.launch( rebuilt_command_line )
+ job_wrapper.set_runner( runner_url, job_wrapper.job_id )
+ job_wrapper.change_state( model.Job.states.RUNNING )
- run_results = client.wait()
- log.debug('run_results %s' % run_results )
- stdout = run_results['stdout']
- stderr = run_results['stderr']
+ except Exception, exc:
+ job_wrapper.fail( "failure running job", exception=True )
+ log.exception("failure running job %d" % job_wrapper.job_id)
+ return
-
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
- for output_file in output_files:
- client.download_output(output_file)
- client.clean()
- log.debug('execution finished: %s' % command_line)
- except Exception, exc:
- job_wrapper.fail( "failure running job", exception=True )
- log.exception("failure running job %d" % job_wrapper.job_id)
- return
+ lwr_job_state = ClusterJobState()
+ lwr_job_state.job_wrapper = job_wrapper
+ lwr_job_state.job_id = job_wrapper.job_id
+ lwr_job_state.old_state = True
+ lwr_job_state.running = True
+ lwr_job_state.runner_url = runner_url
+ self.monitor_job(lwr_job_state)
+
+ def get_output_files(self, job_wrapper):
+ output_fnames = job_wrapper.get_output_fnames()
+ return [ str( o ) for o in output_fnames ]
+
+
+ def determine_lwr_url(self, url):
+ lwr_url = url[ len( 'lwr://' ) : ]
+ return lwr_url
+
+ def get_client_from_wrapper(self, job_wrapper):
+ return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
+
+ def get_client_from_state(self, job_state):
+ job_runner = job_state.runner_url
+ job_id = job_state.job_id
+ return self.get_client(job_runner, job_id)
+
+ def get_client(self, job_runner, job_id):
+ lwr_url = self.determine_lwr_url( job_runner )
+ return Client(lwr_url, job_id)
+
+ def finish_job( self, job_state ):
+ stderr = stdout = command_line = ''
+ job_wrapper = job_state.job_wrapper
+ try:
+ client = self.get_client_from_state(job_state)
+
+ run_results = client.raw_check_complete()
+ log.debug('run_results %s' % run_results )
+ stdout = run_results['stdout']
+ stderr = run_results['stderr']
+
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
+ output_files = self.get_output_files(job_wrapper)
+ for output_file in output_files:
+ client.download_output(output_file)
+ client.clean()
+ log.debug('execution finished: %s' % command_line)
+ except Exception, exc:
+ job_wrapper.fail( "failure running job", exception=True )
+ log.exception("failure running job %d" % job_wrapper.job_id)
+ return
#run the metadata setting script here
#this is terminate-able when output dataset/job is deleted
#so that long running set_meta()s can be canceled without having to reboot the server
@@ -321,7 +351,7 @@
job_wrapper.external_output_metadata.set_job_runner_external_pid( external_metadata_proc.pid, self.sa_session )
external_metadata_proc.wait()
log.debug( 'execution of external set_meta finished for job %d' % job_wrapper.job_id )
-
+
# Finish the job
try:
job_wrapper.finish( stdout, stderr )
@@ -329,12 +359,13 @@
log.exception("Job wrapper finish method failed")
job_wrapper.fail("Unable to finish job", exception=True)
- def put( self, job_wrapper ):
- """Add a job to the queue (by job identifier)"""
- # Change to queued state before handing to worker thread so the runner won't pick it up again
- job_wrapper.change_state( model.Job.states.QUEUED )
- self.queue.put( job_wrapper )
-
+ def fail_job( self, job_state ):
+ """
+ Seperated out so we can use the worker threads for it.
+ """
+ self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )
+ job_state.job_wrapper.fail( job_state.fail_message )
+
def shutdown( self ):
"""Attempts to gracefully shut down the worker threads"""
log.info( "sending stop signal to worker threads" )
@@ -383,7 +414,21 @@
log.debug("Attempt remote lwr kill of job with url %s and id %s" % (lwr_url, job_id))
client = self.get_client(lwr_url, job_id)
client.kill()
+
+
def recover( self, job, job_wrapper ):
- # local jobs can't be recovered
- job_wrapper.change_state( model.Job.states.ERROR, info = "This job was killed when Galaxy was restarted. Please retry the job." )
-
+ """Recovers jobs stuck in the queued/running state when Galaxy started"""
+ job_state = ClusterJobState()
+ job_state.job_id = str( job.get_job_runner_external_id() )
+ job_state.runner_url = job_wrapper.get_job_runner_url()
+ job_wrapper.command_line = job.get_command_line()
+ job_state.job_wrapper = job_wrapper
+ if job.get_state() == model.Job.states.RUNNING:
+ log.debug( "(LWR/%s) is still in running state, adding to the LWR queue" % ( job.get_id()) )
+ job_state.old_state = True
+ job_state.running = True
+ self.monitor_queue.put( job_state )
+ elif job.get_state() == model.Job.states.QUEUED:
+ # LWR doesn't queue currently, so this indicates galaxy was shutoff while
+ # job was being staged. Not sure how to recover from that.
+ job_state.job_wrapper.fail( "This job was killed when Galaxy was restarted. Please retry the job." )
https://bitbucket.org/galaxy/galaxy-central/changeset/e6676636cfe7/
changeset: e6676636cfe7
user: jmchilton
date: 2012-12-26 19:42:31
summary: Allow execution of jobs created by task splitting via the LWR job runner.
---
lib/galaxy/jobs/runners/lwr.py | 62 +++++++++++++++++++++++++++++++---------
1 file changed, 49 insertions(+), 13 deletions(-)
affected #: 1 file
diff -r 8c6cf637058a1f14137b5f9caa9af29e077b29ce -r e6676636cfe79de273602f7c68b2174f74a0d2d5 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -27,13 +27,14 @@
class FileStager(object):
- def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir):
+ def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
self.client = client
self.command_line = command_line
self.config_files = config_files
self.input_files = input_files
self.output_files = output_files
self.tool_dir = os.path.abspath(tool_dir)
+ self.working_directory = working_directory
self.file_renames = {}
@@ -46,7 +47,9 @@
self.__initialize_referenced_tool_files()
self.__upload_tool_files()
self.__upload_input_files()
+ self.__upload_working_directory_files()
self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
self.__initialize_config_file_renames()
self.__rewrite_and_upload_config_files()
self.__rewrite_command_line()
@@ -69,13 +72,27 @@
for input_file in self.input_files:
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
-
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be uploaded
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
def __initialize_output_file_renames(self):
for output_file in self.output_files:
self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
self.remote_path_separator,
os.path.basename(output_file))
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
def __initialize_config_file_renames(self):
for config_file in self.config_files:
self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
@@ -172,13 +189,27 @@
def upload_config_file(self, path, contents):
return self.__upload_contents("upload_config_file", path, contents)
-
- def download_output(self, path):
+
+ def upload_working_directory_file(self, path):
+ return self.__upload_file("upload_working_directory_file", path)
+
+ def _get_output_type(self, name):
+ return self.__raw_execute_and_parse('get_output_type', {'name': name,
+ 'job_id': self.job_id})
+
+ def download_output(self, path, working_directory):
""" """
name = os.path.basename(path)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id})
- output = open(path, 'wb')
+ output_type = self._get_output_type(name)
+ response = self.__raw_execute('download_output', {'name' : name,
+ "job_id" : self.job_id,
+ 'output_type': output_type})
+ if output_type == 'direct':
+ output = open(path, 'wb')
+ elif output_type == 'task':
+ output = open(os.path.join(working_directory, name), 'wb')
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
try:
while True:
buffer = response.read(1024)
@@ -254,7 +285,7 @@
try:
job_wrapper.prepare()
if hasattr(job_wrapper, 'prepare_input_files_cmds') and job_wrapper.prepare_input_files_cmds is not None:
- for cmd in job_wrapper.prepare_input_file_cmds: # run the commands to stage the input files
+ for cmd in job_wrapper.prepare_input_files_cmds: # run the commands to stage the input files
#log.debug( 'executing: %s' % cmd )
if 0 != os.system(cmd):
raise Exception('Error running file staging command: %s' % cmd)
@@ -275,7 +306,8 @@
client = self.get_client_from_wrapper(job_wrapper)
output_files = self.get_output_files(job_wrapper)
input_files = job_wrapper.get_input_fnames()
- file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir)
+ working_directory = job_wrapper.working_directory
+ file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir, working_directory)
rebuilt_command_line = file_stager.get_rewritten_command_line()
client.launch( rebuilt_command_line )
job_wrapper.set_runner( runner_url, job_wrapper.job_id )
@@ -304,7 +336,10 @@
return lwr_url
def get_client_from_wrapper(self, job_wrapper):
- return self.get_client( job_wrapper.get_job_runner_url(), job_wrapper.job_id )
+ job_id = job_wrapper.job_id
+ if hasattr(job_wrapper, 'task_id'):
+ job_id = "%s_%s" % (job_id, job_wrapper.task_id)
+ return self.get_client( job_wrapper.get_job_runner_url(), job_id )
def get_client_from_state(self, job_state):
job_runner = job_state.runner_url
@@ -329,7 +364,7 @@
if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
output_files = self.get_output_files(job_wrapper)
for output_file in output_files:
- client.download_output(output_file)
+ client.download_output(output_file, working_directory=job_wrapper.working_directory)
client.clean()
log.debug('execution finished: %s' % command_line)
except Exception, exc:
@@ -386,8 +421,9 @@
def stop_job( self, job ):
#if our local job has JobExternalOutputMetadata associated, then our primary job has to have already finished
- if job.external_output_metadata:
- pid = job.external_output_metadata[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
+ job_ext_output_metadata = job.get_external_output_metadata()
+ if job_ext_output_metadata:
+ pid = job_ext_output_metadata[0].job_runner_external_pid #every JobExternalOutputMetadata has a pid set, we just need to take from one of them
if pid in [ None, '' ]:
log.warning( "stop_job(): %s: no PID in database for job, unable to stop" % job.id )
return
https://bitbucket.org/galaxy/galaxy-central/changeset/e4adc9ad0bb7/
changeset: e4adc9ad0bb7
user: jmchilton
date: 2012-12-26 19:42:31
summary: Extend LWR job runner to stage an input's extra_files_path (if present).
---
lib/galaxy/jobs/runners/lwr.py | 22 ++++++++++++++++++----
1 file changed, 18 insertions(+), 4 deletions(-)
affected #: 1 file
diff -r e6676636cfe79de273602f7c68b2174f74a0d2d5 -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -72,6 +72,16 @@
for input_file in self.input_files:
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
# Task manager stages files into working directory, these need to be uploaded
@@ -167,17 +177,18 @@
response = self.__raw_execute(command, args, data)
return simplejson.loads(response.read())
- def __upload_file(self, action, path, contents = None):
+ def __upload_file(self, action, path, name=None, contents = None):
""" """
input = open(path, 'rb')
try:
mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input)
+ return self.__upload_contents(action, path, mmapped_input, name)
finally:
input.close()
- def __upload_contents(self, action, path, contents):
- name = os.path.basename(path)
+ def __upload_contents(self, action, path, contents, name=None):
+ if not name:
+ name = os.path.basename(path)
args = {"job_id" : self.job_id, "name" : name}
return self.__raw_execute_and_parse(action, args, contents)
@@ -187,6 +198,9 @@
def upload_input(self, path):
return self.__upload_file("upload_input", path)
+ def upload_extra_input(self, path, relative_name):
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
def upload_config_file(self, path, contents):
return self.__upload_contents("upload_config_file", path, contents)
https://bitbucket.org/galaxy/galaxy-central/changeset/93dd6202175a/
changeset: 93dd6202175a
user: jmchilton
date: 2012-12-26 19:42:31
summary: Refactor much of the lwr client code out into its own module. This will make it easier to keep content insync with client code from lwr source.
---
lib/galaxy/jobs/runners/lwr.py | 256 +-----------------------
lib/galaxy/jobs/runners/lwr_client/__init__.py | 246 +++++++++++++++++++++++
2 files changed, 249 insertions(+), 253 deletions(-)
create mode 100644 lib/galaxy/jobs/runners/lwr_client/__init__.py
affected #: 2 files
diff -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -1,268 +1,18 @@
import logging
import subprocess
-from Queue import Queue
-import threading
-
-import re
from galaxy import model
-from galaxy.datatypes.data import nice_size
from galaxy.jobs.runners import ClusterJobState, ClusterJobRunner
-import os, errno
+import errno
from time import sleep
+from lwr_client import FileStager, Client
+
log = logging.getLogger( __name__ )
__all__ = [ 'LwrJobRunner' ]
-import urllib
-import urllib2
-import httplib
-import mmap
-import tempfile
-import time
-
-import simplejson
-
-class FileStager(object):
-
- def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
- self.client = client
- self.command_line = command_line
- self.config_files = config_files
- self.input_files = input_files
- self.output_files = output_files
- self.tool_dir = os.path.abspath(tool_dir)
- self.working_directory = working_directory
-
- self.file_renames = {}
-
- job_config = client.setup()
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
-
- self.__initialize_referenced_tool_files()
- self.__upload_tool_files()
- self.__upload_input_files()
- self.__upload_working_directory_files()
- self.__initialize_output_file_renames()
- self.__initialize_task_output_file_renames()
- self.__initialize_config_file_renames()
- self.__rewrite_and_upload_config_files()
- self.__rewrite_command_line()
-
- def __initialize_referenced_tool_files(self):
- pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
- referenced_tool_files = []
- referenced_tool_files += re.findall(pattern, self.command_line)
- if self.config_files != None:
- for config_file in self.config_files:
- referenced_tool_files += re.findall(pattern, self.__read(config_file))
- self.referenced_tool_files = referenced_tool_files
-
- def __upload_tool_files(self):
- for referenced_tool_file in self.referenced_tool_files:
- tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
- self.file_renames[referenced_tool_file] = tool_upload_response['path']
-
- def __upload_input_files(self):
- for input_file in self.input_files:
- input_upload_response = self.client.upload_input(input_file)
- self.file_renames[input_file] = input_upload_response['path']
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
-
- def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be uploaded
- for working_directory_file in os.listdir(self.working_directory):
- path = os.path.join(self.working_directory, working_directory_file)
- working_file_response = self.client.upload_working_directory_file(path)
- self.file_renames[path] = working_file_response['path']
-
- def __initialize_output_file_renames(self):
- for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
- os.path.basename(output_file))
-
- def __initialize_task_output_file_renames(self):
- for output_file in self.output_files:
- name = os.path.basename(output_file)
- self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- name)
-
- def __initialize_config_file_renames(self):
- for config_file in self.config_files:
- self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
- self.remote_path_separator,
- os.path.basename(config_file))
-
- def __rewrite_paths(self, contents):
- new_contents = contents
- for local_path, remote_path in self.file_renames.iteritems():
- new_contents = new_contents.replace(local_path, remote_path)
- return new_contents
-
- def __rewrite_and_upload_config_files(self):
- for config_file in self.config_files:
- config_contents = self.__read(config_file)
- new_config_contents = self.__rewrite_paths(config_contents)
- self.client.upload_config_file(config_file, new_config_contents)
-
- def __rewrite_command_line(self):
- self.rewritten_command_line = self.__rewrite_paths(self.command_line)
-
- def get_rewritten_command_line(self):
- return self.rewritten_command_line
-
- def __read(self, path):
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
-
-
-
-class Client(object):
- """
- """
- """
- """
- def __init__(self, remote_host, job_id, private_key=None):
- if not remote_host.endswith("/"):
- remote_host = remote_host + "/"
- ## If we don't have an explicit private_key defined, check for
- ## one embedded in the URL. A URL of the form
- ## https://moo@cow:8913 will try to contact https://cow:8913
- ## with a private key of moo
- private_key_format = "https?://(.*)@.*/?"
- private_key_match= re.match(private_key_format, remote_host)
- if not private_key and private_key_match:
- private_key = private_key_match.group(1)
- remote_host = remote_host.replace("%s@" % private_key, '', 1)
- self.remote_host = remote_host
- self.job_id = job_id
- self.private_key = private_key
-
- def url_open(self, request, data):
- return urllib2.urlopen(request, data)
-
- def __build_url(self, command, args):
- if self.private_key:
- args["private_key"] = self.private_key
- data = urllib.urlencode(args)
- url = self.remote_host + command + "?" + data
- return url
-
- def __raw_execute(self, command, args = {}, data = None):
- url = self.__build_url(command, args)
- request = urllib2.Request(url=url, data=data)
- response = self.url_open(request, data)
- return response
-
- def __raw_execute_and_parse(self, command, args = {}, data = None):
- response = self.__raw_execute(command, args, data)
- return simplejson.loads(response.read())
-
- def __upload_file(self, action, path, name=None, contents = None):
- """ """
- input = open(path, 'rb')
- try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
- return self.__upload_contents(action, path, mmapped_input, name)
- finally:
- input.close()
-
- def __upload_contents(self, action, path, contents, name=None):
- if not name:
- name = os.path.basename(path)
- args = {"job_id" : self.job_id, "name" : name}
- return self.__raw_execute_and_parse(action, args, contents)
-
- def upload_tool_file(self, path):
- return self.__upload_file("upload_tool_file", path)
-
- def upload_input(self, path):
- return self.__upload_file("upload_input", path)
-
- def upload_extra_input(self, path, relative_name):
- return self.__upload_file("upload_extra_input", path, name=relative_name)
-
- def upload_config_file(self, path, contents):
- return self.__upload_contents("upload_config_file", path, contents)
-
- def upload_working_directory_file(self, path):
- return self.__upload_file("upload_working_directory_file", path)
-
- def _get_output_type(self, name):
- return self.__raw_execute_and_parse('get_output_type', {'name': name,
- 'job_id': self.job_id})
-
- def download_output(self, path, working_directory):
- """ """
- name = os.path.basename(path)
- output_type = self._get_output_type(name)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id,
- 'output_type': output_type})
- if output_type == 'direct':
- output = open(path, 'wb')
- elif output_type == 'task':
- output = open(os.path.join(working_directory, name), 'wb')
- else:
- raise Exception("No remote output found for dataset with path %s" % path)
- try:
- while True:
- buffer = response.read(1024)
- if buffer == "":
- break
- output.write(buffer)
- finally:
- output.close()
-
- def launch(self, command_line):
- """ """
- return self.__raw_execute("launch", {"command_line" : command_line,
- "job_id" : self.job_id})
-
- def kill(self):
- return self.__raw_execute("kill", {"job_id" : self.job_id})
-
- def wait(self):
- """ """
- while True:
- complete = self.check_complete()
- if complete:
- return check_complete_response
- time.sleep(1)
-
- def raw_check_complete(self):
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
- return check_complete_response
-
- def check_complete(self):
- return self.raw_check_complete()["complete"] == "true"
-
- def clean(self):
- self.__raw_execute("clean", { "job_id" : self.job_id })
-
- def setup(self):
- return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
-
-
class LwrJobRunner( ClusterJobRunner ):
"""
diff -r e4adc9ad0bb72111e502c6a998b697aa43ccbebd -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd lib/galaxy/jobs/runners/lwr_client/__init__.py
--- /dev/null
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -0,0 +1,246 @@
+import mmap
+import os
+import re
+import time
+import urllib
+import urllib2
+
+import simplejson
+
+
+class FileStager(object):
+
+ def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
+ self.client = client
+ self.command_line = command_line
+ self.config_files = config_files
+ self.input_files = input_files
+ self.output_files = output_files
+ self.tool_dir = os.path.abspath(tool_dir)
+ self.working_directory = working_directory
+
+ self.file_renames = {}
+
+ job_config = client.setup()
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+
+ self.__initialize_referenced_tool_files()
+ self.__upload_tool_files()
+ self.__upload_input_files()
+ self.__upload_working_directory_files()
+ self.__initialize_output_file_renames()
+ self.__initialize_task_output_file_renames()
+ self.__initialize_config_file_renames()
+ self.__rewrite_and_upload_config_files()
+ self.__rewrite_command_line()
+
+ def __initialize_referenced_tool_files(self):
+ pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
+ referenced_tool_files = []
+ referenced_tool_files += re.findall(pattern, self.command_line)
+ if self.config_files != None:
+ for config_file in self.config_files:
+ referenced_tool_files += re.findall(pattern, self.__read(config_file))
+ self.referenced_tool_files = referenced_tool_files
+
+ def __upload_tool_files(self):
+ for referenced_tool_file in self.referenced_tool_files:
+ tool_upload_response = self.client.upload_tool_file(referenced_tool_file)
+ self.file_renames[referenced_tool_file] = tool_upload_response['path']
+
+ def __upload_input_files(self):
+ for input_file in self.input_files:
+ input_upload_response = self.client.upload_input(input_file)
+ self.file_renames[input_file] = input_upload_response['path']
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
+
+ def __upload_working_directory_files(self):
+ # Task manager stages files into working directory, these need to be uploaded
+ for working_directory_file in os.listdir(self.working_directory):
+ path = os.path.join(self.working_directory, working_directory_file)
+ working_file_response = self.client.upload_working_directory_file(path)
+ self.file_renames[path] = working_file_response['path']
+
+ def __initialize_output_file_renames(self):
+ for output_file in self.output_files:
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
+ os.path.basename(output_file))
+
+ def __initialize_task_output_file_renames(self):
+ for output_file in self.output_files:
+ name = os.path.basename(output_file)
+ self.file_renames[os.path.join(self.working_directory, name)] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ name)
+
+ def __initialize_config_file_renames(self):
+ for config_file in self.config_files:
+ self.file_renames[config_file] = r'%s%s%s' % (self.new_working_directory,
+ self.remote_path_separator,
+ os.path.basename(config_file))
+
+ def __rewrite_paths(self, contents):
+ new_contents = contents
+ for local_path, remote_path in self.file_renames.iteritems():
+ new_contents = new_contents.replace(local_path, remote_path)
+ return new_contents
+
+ def __rewrite_and_upload_config_files(self):
+ for config_file in self.config_files:
+ config_contents = self.__read(config_file)
+ new_config_contents = self.__rewrite_paths(config_contents)
+ self.client.upload_config_file(config_file, new_config_contents)
+
+ def __rewrite_command_line(self):
+ self.rewritten_command_line = self.__rewrite_paths(self.command_line)
+
+ def get_rewritten_command_line(self):
+ return self.rewritten_command_line
+
+ def __read(self, path):
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
+
+
+
+class Client(object):
+ """
+ """
+ """
+ """
+ def __init__(self, remote_host, job_id, private_key=None):
+ if not remote_host.endswith("/"):
+ remote_host = remote_host + "/"
+ ## If we don't have an explicit private_key defined, check for
+ ## one embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_key_format = "https?://(.*)@.*/?"
+ private_key_match= re.match(private_key_format, remote_host)
+ if not private_key and private_key_match:
+ private_key = private_key_match.group(1)
+ remote_host = remote_host.replace("%s@" % private_key, '', 1)
+ self.remote_host = remote_host
+ self.job_id = job_id
+ self.private_key = private_key
+
+ def url_open(self, request, data):
+ return urllib2.urlopen(request, data)
+
+ def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
+ data = urllib.urlencode(args)
+ url = self.remote_host + command + "?" + data
+ return url
+
+ def __raw_execute(self, command, args = {}, data = None):
+ url = self.__build_url(command, args)
+ request = urllib2.Request(url=url, data=data)
+ response = self.url_open(request, data)
+ return response
+
+ def __raw_execute_and_parse(self, command, args = {}, data = None):
+ response = self.__raw_execute(command, args, data)
+ return simplejson.loads(response.read())
+
+ def __upload_file(self, action, path, name=None, contents = None):
+ """ """
+ input = open(path, 'rb')
+ try:
+ mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
+ return self.__upload_contents(action, path, mmapped_input, name)
+ finally:
+ input.close()
+
+ def __upload_contents(self, action, path, contents, name=None):
+ if not name:
+ name = os.path.basename(path)
+ args = {"job_id" : self.job_id, "name" : name}
+ return self.__raw_execute_and_parse(action, args, contents)
+
+ def upload_tool_file(self, path):
+ return self.__upload_file("upload_tool_file", path)
+
+ def upload_input(self, path):
+ return self.__upload_file("upload_input", path)
+
+ def upload_extra_input(self, path, relative_name):
+ return self.__upload_file("upload_extra_input", path, name=relative_name)
+
+ def upload_config_file(self, path, contents):
+ return self.__upload_contents("upload_config_file", path, contents)
+
+ def upload_working_directory_file(self, path):
+ return self.__upload_file("upload_working_directory_file", path)
+
+ def _get_output_type(self, name):
+ return self.__raw_execute_and_parse('get_output_type', {'name': name,
+ 'job_id': self.job_id})
+
+ def download_output(self, path, working_directory):
+ """ """
+ name = os.path.basename(path)
+ output_type = self._get_output_type(name)
+ response = self.__raw_execute('download_output', {'name' : name,
+ "job_id" : self.job_id,
+ 'output_type': output_type})
+ if output_type == 'direct':
+ output = open(path, 'wb')
+ elif output_type == 'task':
+ output = open(os.path.join(working_directory, name), 'wb')
+ else:
+ raise Exception("No remote output found for dataset with path %s" % path)
+ try:
+ while True:
+ buffer = response.read(1024)
+ if buffer == "":
+ break
+ output.write(buffer)
+ finally:
+ output.close()
+
+ def launch(self, command_line):
+ """ """
+ return self.__raw_execute("launch", {"command_line" : command_line,
+ "job_id" : self.job_id})
+
+ def kill(self):
+ return self.__raw_execute("kill", {"job_id" : self.job_id})
+
+ def wait(self):
+ """ """
+ while True:
+ complete = self.check_complete()
+ if complete:
+ return check_complete_response
+ time.sleep(1)
+
+ def raw_check_complete(self):
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ return check_complete_response
+
+ def check_complete(self):
+ return self.raw_check_complete()["complete"] == "true"
+
+ def clean(self):
+ self.__raw_execute("clean", { "job_id" : self.job_id })
+
+ def setup(self):
+ return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
https://bitbucket.org/galaxy/galaxy-central/changeset/50c1edba7fe0/
changeset: 50c1edba7fe0
user: jmchilton
date: 2012-12-26 19:42:31
summary: Documentation and PEP8 fixes for lwr client code.
---
lib/galaxy/jobs/runners/lwr_client/__init__.py | 206 +++++++++++++++++++-----
1 file changed, 164 insertions(+), 42 deletions(-)
affected #: 1 file
diff -r 93dd6202175a12a592a38a2d6d6669baa1fbd5fd -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -1,3 +1,10 @@
+"""
+lwr_client
+==========
+
+This module contains logic for interfacing with an external LWR server.
+
+"""
import mmap
import os
import re
@@ -9,8 +16,32 @@
class FileStager(object):
-
+ """
+ Objects of the FileStager class interact with an LWR client object to
+ stage the files required to run jobs on a remote LWR server.
+
+ **Parameters**
+
+ client : Client
+ LWR client object.
+ command_line : str
+ The local command line to execute, this will be rewritten for the remote server.
+ config_files : list
+ List of Galaxy 'configfile's produced for this job. These will be rewritten and sent to remote server.
+ input_files : list
+ List of input files used by job. These will be transferred and references rewritten.
+ output_files : list
+ List of output_files produced by job.
+ tool_dir : str
+ Directory containing tool to execute (if a wrapper is used, it will be transferred to remote server).
+ working_directory : str
+ Local path created by Galaxy for running this job.
+
+ """
+
def __init__(self, client, command_line, config_files, input_files, output_files, tool_dir, working_directory):
+ """
+ """
self.client = client
self.command_line = command_line
self.config_files = config_files
@@ -67,7 +98,8 @@
self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
- # Task manager stages files into working directory, these need to be uploaded
+ # Task manager stages files into working directory, these need to be
+ # uploaded if present.
for working_directory_file in os.listdir(self.working_directory):
path = os.path.join(self.working_directory, working_directory_file)
working_file_response = self.client.upload_working_directory_file(path)
@@ -75,8 +107,8 @@
def __initialize_output_file_renames(self):
for output_file in self.output_files:
- self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
- self.remote_path_separator,
+ self.file_renames[output_file] = r'%s%s%s' % (self.new_outputs_directory,
+ self.remote_path_separator,
os.path.basename(output_file))
def __initialize_task_output_file_renames(self):
@@ -108,6 +140,10 @@
self.rewritten_command_line = self.__rewrite_paths(self.command_line)
def get_rewritten_command_line(self):
+ """
+ Returns the rewritten version of the command line to execute suitable
+ for remote host.
+ """
return self.rewritten_command_line
def __read(self, path):
@@ -117,13 +153,21 @@
finally:
input.close()
-
-
+
class Client(object):
- """
"""
- """
+ Objects of this client class perform low-level communication with a remote LWR server.
+
+ **Parameters**
+
+ remote_host : str
+ Remote URL of the LWR server.
+ job_id : str
+ Galaxy job/task id.
+ private_key : str (optional)
+ Secret key the remote LWR server is configured with.
"""
+
def __init__(self, remote_host, job_id, private_key=None):
if not remote_host.endswith("/"):
remote_host = remote_host + "/"
@@ -132,7 +176,7 @@
## https://moo@cow:8913 will try to contact https://cow:8913
## with a private key of moo
private_key_format = "https?://(.*)@.*/?"
- private_key_match= re.match(private_key_format, remote_host)
+ private_key_match = re.match(private_key_format, remote_host)
if not private_key and private_key_match:
private_key = private_key_match.group(1)
remote_host = remote_host.replace("%s@" % private_key, '', 1)
@@ -140,9 +184,9 @@
self.job_id = job_id
self.private_key = private_key
- def url_open(self, request, data):
+ def _url_open(self, request, data):
return urllib2.urlopen(request, data)
-
+
def __build_url(self, command, args):
if self.private_key:
args["private_key"] = self.private_key
@@ -150,21 +194,20 @@
url = self.remote_host + command + "?" + data
return url
- def __raw_execute(self, command, args = {}, data = None):
+ def __raw_execute(self, command, args={}, data=None):
url = self.__build_url(command, args)
request = urllib2.Request(url=url, data=data)
- response = self.url_open(request, data)
+ response = self._url_open(request, data)
return response
- def __raw_execute_and_parse(self, command, args = {}, data = None):
+ def __raw_execute_and_parse(self, command, args={}, data=None):
response = self.__raw_execute(command, args, data)
return simplejson.loads(response.read())
- def __upload_file(self, action, path, name=None, contents = None):
- """ """
+ def __upload_file(self, action, path, name=None, contents=None):
input = open(path, 'rb')
try:
- mmapped_input = mmap.mmap(input.fileno(), 0, access = mmap.ACCESS_READ)
+ mmapped_input = mmap.mmap(input.fileno(), 0, access=mmap.ACCESS_READ)
return self.__upload_contents(action, path, mmapped_input, name)
finally:
input.close()
@@ -172,39 +215,93 @@
def __upload_contents(self, action, path, contents, name=None):
if not name:
name = os.path.basename(path)
- args = {"job_id" : self.job_id, "name" : name}
+ args = {"job_id": self.job_id, "name": name}
return self.__raw_execute_and_parse(action, args, contents)
-
+
def upload_tool_file(self, path):
+ """
+ Upload a tool related file (e.g. wrapper) required to run job.
+
+ **Parameters**
+
+ path : str
+ Local path tool.
+ """
return self.__upload_file("upload_tool_file", path)
def upload_input(self, path):
+ """
+ Upload input dataset to remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of input dataset.
+ """
return self.__upload_file("upload_input", path)
def upload_extra_input(self, path, relative_name):
+ """
+ Upload extra input file to remote server.
+
+ **Parameters**
+
+ path : str
+ Extra files path of input dataset corresponding to this input.
+ relative_name : str
+ Relative path of extra file to upload relative to inputs extra files path.
+ """
return self.__upload_file("upload_extra_input", path, name=relative_name)
def upload_config_file(self, path, contents):
+ """
+ Upload a job's config file to the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path to the original config file.
+ contents : str
+ Rewritten contents of the config file to upload.
+ """
return self.__upload_contents("upload_config_file", path, contents)
def upload_working_directory_file(self, path):
+ """
+ Upload the supplied file (path) from a job's working directory
+ to remote server.
+
+ **Parameters**
+
+ path : str
+ Path to file to upload.
+ """
return self.__upload_file("upload_working_directory_file", path)
def _get_output_type(self, name):
- return self.__raw_execute_and_parse('get_output_type', {'name': name,
- 'job_id': self.job_id})
+ return self.__raw_execute_and_parse("get_output_type", {"name": name,
+ "job_id": self.job_id})
def download_output(self, path, working_directory):
- """ """
+ """
+ Download an output dataset from the remote server.
+
+ **Parameters**
+
+ path : str
+ Local path of the dataset.
+ working_directory : str
+ Local working_directory for the job.
+ """
name = os.path.basename(path)
output_type = self._get_output_type(name)
- response = self.__raw_execute('download_output', {'name' : name,
- "job_id" : self.job_id,
- 'output_type': output_type})
- if output_type == 'direct':
- output = open(path, 'wb')
- elif output_type == 'task':
- output = open(os.path.join(working_directory, name), 'wb')
+ response = self.__raw_execute("download_output", {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type})
+ if output_type == "direct":
+ output = open(path, "wb")
+ elif output_type == "task":
+ output = open(os.path.join(working_directory, name), "wb")
else:
raise Exception("No remote output found for dataset with path %s" % path)
try:
@@ -215,32 +312,57 @@
output.write(buffer)
finally:
output.close()
-
+
def launch(self, command_line):
- """ """
- return self.__raw_execute("launch", {"command_line" : command_line,
- "job_id" : self.job_id})
+ """
+ Run or queue up the execution of the supplied
+ `command_line` on the remote server.
+
+ **Parameters**
+
+ command_line : str
+ Command to execute.
+ """
+ return self.__raw_execute("launch", {"command_line": command_line,
+ "job_id": self.job_id})
def kill(self):
- return self.__raw_execute("kill", {"job_id" : self.job_id})
-
+ """
+ Cancel remote job, either removing from the queue or killing it.
+ """
+ return self.__raw_execute("kill", {"job_id": self.job_id})
+
def wait(self):
- """ """
+ """
+ Wait for job to finish.
+ """
while True:
- complete = self.check_complete()
- if complete:
- return check_complete_response
+ complete_response = self.raw_check_complete()
+ if complete_response["complete"] == "true":
+ return complete_response
time.sleep(1)
def raw_check_complete(self):
- check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id" : self.job_id })
+ """
+ Get check_complete response from the remote server.
+ """
+ check_complete_response = self.__raw_execute_and_parse("check_complete", {"job_id": self.job_id})
return check_complete_response
def check_complete(self):
+ """
+ Return boolean indicating whether the job is complete.
+ """
return self.raw_check_complete()["complete"] == "true"
def clean(self):
- self.__raw_execute("clean", { "job_id" : self.job_id })
+ """
+ Cleanup the remote job.
+ """
+ self.__raw_execute("clean", {"job_id": self.job_id})
def setup(self):
- return self.__raw_execute_and_parse("setup", { "job_id" : self.job_id })
+ """
+ Setup remote LWR server to run this job.
+ """
+ return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
https://bitbucket.org/galaxy/galaxy-central/changeset/5822038f8c96/
changeset: 5822038f8c96
user: jmchilton
date: 2012-12-26 19:42:31
summary: Extend lwr to allow execution of jobs with outputs specified using 'from_work_dir'.
---
lib/galaxy/jobs/runners/__init__.py | 72 +++++++++++++-----------
lib/galaxy/jobs/runners/lwr.py | 7 ++-
lib/galaxy/jobs/runners/lwr_client/__init__.py | 31 ++++++++--
3 files changed, 72 insertions(+), 38 deletions(-)
affected #: 3 files
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -8,7 +8,7 @@
log = logging.getLogger( __name__ )
class BaseJobRunner( object ):
- def build_command_line( self, job_wrapper, include_metadata=False ):
+ def build_command_line( self, job_wrapper, include_metadata=False, include_work_dir_outputs=True ):
"""
Compose the sequence of commands necessary to execute a job. This will
currently include:
@@ -19,18 +19,6 @@
- commands to set metadata (if include_metadata is True)
"""
- def in_directory( file, directory ):
- """
- Return true, if the common prefix of both is equal to directory
- e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
- """
-
- # Make both absolute.
- directory = os.path.abspath( directory )
- file = os.path.abspath( file )
-
- return os.path.commonprefix( [ file, directory ] ) == directory
-
commands = job_wrapper.get_command_line()
# All job runners currently handle this case which should never
# occur
@@ -47,6 +35,41 @@
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
# -- Append commands to copy job outputs based on from_work_dir attribute. --
+ if include_work_dir_outputs:
+ work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
+ if work_dir_outputs:
+ commands += "; " + "; ".join( [ "cp %s %s" % ( source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+
+ # Append metadata setting commands, we don't want to overwrite metadata
+ # that was copied over in init_meta(), as per established behavior
+ if include_metadata and self.app.config.set_metadata_externally:
+ commands += "; cd %s; " % os.path.abspath( os.getcwd() )
+ commands += job_wrapper.setup_external_metadata(
+ exec_dir = os.path.abspath( os.getcwd() ),
+ tmp_dir = job_wrapper.working_directory,
+ dataset_files_path = self.app.model.Dataset.file_path,
+ output_fnames = job_wrapper.get_output_fnames(),
+ set_extension = False,
+ kwds = { 'overwrite' : False } )
+ return commands
+
+ def get_work_dir_outputs( self, job_wrapper ):
+ """
+ Returns list of pairs (source_file, destination) describing path
+ to work_dir output file and ultimate destination.
+ """
+
+ def in_directory( file, directory ):
+ """
+ Return true, if the common prefix of both is equal to directory
+ e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+ """
+
+ # Make both absolute.
+ directory = os.path.abspath( directory )
+ file = os.path.abspath( file )
+
+ return os.path.commonprefix( [ file, directory ] ) == directory
# Set up dict of dataset id --> output path; output path can be real or
# false depending on outputs_to_working_directory
@@ -57,6 +80,7 @@
path = dataset_path.false_path
output_paths[ dataset_path.dataset_id ] = path
+ output_pairs = []
# Walk job's output associations to find and use from_work_dir attributes.
job = job_wrapper.get_job()
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
@@ -72,30 +96,14 @@
source_file = os.path.join( os.path.abspath( job_wrapper.working_directory ), hda_tool_output.from_work_dir )
destination = output_paths[ dataset.dataset_id ]
if in_directory( source_file, job_wrapper.working_directory ):
- try:
- commands += "; cp %s %s" % ( source_file, destination )
- log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
- except ( IOError, OSError ):
- log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, destination ) )
+ output_pairs.append( ( source_file, destination ) )
+ log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
+ return output_pairs
-
- # Append metadata setting commands, we don't want to overwrite metadata
- # that was copied over in init_meta(), as per established behavior
- if include_metadata and self.app.config.set_metadata_externally:
- commands += "; cd %s; " % os.path.abspath( os.getcwd() )
- commands += job_wrapper.setup_external_metadata(
- exec_dir = os.path.abspath( os.getcwd() ),
- tmp_dir = job_wrapper.working_directory,
- dataset_files_path = self.app.model.Dataset.file_path,
- output_fnames = job_wrapper.get_output_fnames(),
- set_extension = False,
- kwds = { 'overwrite' : False } )
- return commands
-
class ClusterJobState( object ):
"""
Encapsulate the state of a cluster job, this should be subclassed as
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -54,7 +54,7 @@
if 0 != os.system(cmd):
raise Exception('Error running file staging command: %s' % cmd)
job_wrapper.prepare_input_files_cmds = None # prevent them from being used in-line
- command_line = self.build_command_line( job_wrapper, include_metadata=False )
+ command_line = self.build_command_line( job_wrapper, include_metadata=False, include_work_dir_outputs=False )
except:
job_wrapper.fail( "failure preparing job", exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
@@ -126,7 +126,12 @@
stderr = run_results['stderr']
if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ]:
+ work_dir_outputs = self.get_work_dir_outputs(job_wrapper)
output_files = self.get_output_files(job_wrapper)
+ for source_file, output_file in work_dir_outputs:
+ client.download_work_dir_output(source_file, job_wrapper.working_directory, output_file)
+ # Remove from full output_files list so don't try to download directly.
+ output_files.remove(output_file)
for output_file in output_files:
client.download_output(output_file, working_directory=job_wrapper.working_directory)
client.clean()
diff -r 50c1edba7fe058e475c1da4aebf9caf85be435e5 -r 5822038f8c9677cbcaea60597a8d988cbd65b174 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -282,6 +282,24 @@
return self.__raw_execute_and_parse("get_output_type", {"name": name,
"job_id": self.job_id})
+ def download_work_dir_output(self, source, working_directory, output_path):
+ """
+ Download an output dataset specified with from_work_dir from the
+ remote server.
+
+ **Parameters**
+
+ source : str
+ Path in job's working_directory to find output in.
+ working_directory : str
+ Local working_directory for the job.
+ output_path : str
+ Full path to output dataset.
+ """
+ output = open(output_path, "wb")
+ name = os.path.basename(source)
+ self.__raw_download_output(name, self.job_id, "work_dir", output)
+
def download_output(self, path, working_directory):
"""
Download an output dataset from the remote server.
@@ -295,23 +313,26 @@
"""
name = os.path.basename(path)
output_type = self._get_output_type(name)
- response = self.__raw_execute("download_output", {"name": name,
- "job_id": self.job_id,
- "output_type": output_type})
if output_type == "direct":
output = open(path, "wb")
elif output_type == "task":
output = open(os.path.join(working_directory, name), "wb")
else:
raise Exception("No remote output found for dataset with path %s" % path)
+ self.__raw_download_output(name, self.job_id, output_type, output)
+
+ def __raw_download_output(self, name, job_id, output_type, output_file):
+ response = self.__raw_execute("download_output", {"name": name,
+ "job_id": self.job_id,
+ "output_type": output_type})
try:
while True:
buffer = response.read(1024)
if buffer == "":
break
- output.write(buffer)
+ output_file.write(buffer)
finally:
- output.close()
+ output_file.close()
def launch(self, command_line):
"""
https://bitbucket.org/galaxy/galaxy-central/changeset/6265bf3f27ad/
changeset: 6265bf3f27ad
user: jmchilton
date: 2012-12-26 19:42:31
summary: Implement optimization attempting to not transfer unneeded inputs to remote LWR server. More general refactoring and testing of lwr client code.
---
lib/galaxy/jobs/runners/lwr_client/__init__.py | 170 +++++++++++++++++++-----
1 file changed, 136 insertions(+), 34 deletions(-)
affected #: 1 file
diff -r 5822038f8c9677cbcaea60597a8d988cbd65b174 -r 6265bf3f27ad611db6c676e94166c25500c13432 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -15,6 +15,99 @@
import simplejson
+class JobInputs(object):
+ """
+ Abstractions over dynamic inputs created for a given job (namely the command to
+ execute and created configfiles).
+
+ **Parameters**
+
+ command_line : str
+ Local command to execute for this job. (To be rewritten.)
+ config_files : str
+ Config files created for this job. (To be rewritten.)
+
+
+ >>> import tempfile
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> def setup_inputs(tf):
+ ... open(tf.name, "w").write("world /path/to/input the rest")
+ ... inputs = JobInputs("hello /path/to/input", [tf.name])
+ ... return inputs
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.rewrite_paths("/path/to/input", 'C:\\input')
+ >>> inputs.rewritten_command_line
+ 'hello C:\\\\input'
+ >>> inputs.rewritten_config_files[tf.name]
+ 'world C:\\\\input the rest'
+ >>> tf.close()
+ >>> tf = tempfile.NamedTemporaryFile()
+ >>> inputs = setup_inputs(tf)
+ >>> inputs.find_referenced_subfiles('/path/to')
+ ['/path/to/input']
+ >>> inputs.path_referenced('/path/to')
+ True
+ >>> inputs.path_referenced('/path/to/input')
+ True
+ >>> inputs.path_referenced('/path/to/notinput')
+ False
+ >>> tf.close()
+ """
+
+ def __init__(self, command_line, config_files):
+ self.rewritten_command_line = command_line
+ self.rewritten_config_files = {}
+ for config_file in config_files or []:
+ config_contents = _read(config_file)
+ self.rewritten_config_files[config_file] = config_contents
+
+ def find_referenced_subfiles(self, directory):
+ """
+ Return list of files below specified `directory` in job inputs. Could
+ use more sophisticated logic (match quotes to handle spaces, handle
+ subdirectories, etc...).
+
+ **Parameters**
+
+ directory : str
+ Full path to directory to search.
+
+ """
+ pattern = r"(%s%s\S+)" % (directory, os.sep)
+ referenced_files = set()
+ for input_contents in self.__items():
+ referenced_files.update(re.findall(pattern, input_contents))
+ return list(referenced_files)
+
+ def path_referenced(self, path):
+ pattern = r"%s" % path
+ found = False
+ for input_contents in self.__items():
+ if re.findall(pattern, input_contents):
+ found = True
+ break
+ return found
+
+ def rewrite_paths(self, local_path, remote_path):
+ """
+ Rewrite references to `local_path` with `remote_path` in job inputs.
+ """
+ self.__rewrite_command_line(local_path, remote_path)
+ self.__rewrite_config_files(local_path, remote_path)
+
+ def __rewrite_command_line(self, local_path, remote_path):
+ self.rewritten_command_line = self.rewritten_command_line.replace(local_path, remote_path)
+
+ def __rewrite_config_files(self, local_path, remote_path):
+ for config_file, rewritten_contents in self.rewritten_config_files.iteritems():
+ self.rewritten_config_files[config_file] = rewritten_contents.replace(local_path, remote_path)
+
+ def __items(self):
+ items = [self.rewritten_command_line]
+ items.extend(self.rewritten_config_files.values())
+ return items
+
+
class FileStager(object):
"""
Objects of the FileStager class interact with an LWR client object to
@@ -50,6 +143,10 @@
self.tool_dir = os.path.abspath(tool_dir)
self.working_directory = working_directory
+ # Setup job inputs, these will need to be rewritten before
+ # shipping off to remote LWR server.
+ self.job_inputs = JobInputs(self.command_line, self.config_files)
+
self.file_renames = {}
job_config = client.setup()
@@ -65,17 +162,11 @@
self.__initialize_output_file_renames()
self.__initialize_task_output_file_renames()
self.__initialize_config_file_renames()
- self.__rewrite_and_upload_config_files()
- self.__rewrite_command_line()
+ self.__handle_rewrites()
+ self.__upload_rewritten_config_files()
def __initialize_referenced_tool_files(self):
- pattern = r"(%s%s\S+)" % (self.tool_dir, os.sep)
- referenced_tool_files = []
- referenced_tool_files += re.findall(pattern, self.command_line)
- if self.config_files != None:
- for config_file in self.config_files:
- referenced_tool_files += re.findall(pattern, self.__read(config_file))
- self.referenced_tool_files = referenced_tool_files
+ self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
def __upload_tool_files(self):
for referenced_tool_file in self.referenced_tool_files:
@@ -84,18 +175,25 @@
def __upload_input_files(self):
for input_file in self.input_files:
+ self.__upload_input_file(input_file)
+ self.__upload_input_extra_files(input_file)
+
+ def __upload_input_file(self, input_file):
+ if self.job_inputs.path_referenced(input_file):
input_upload_response = self.client.upload_input(input_file)
self.file_renames[input_file] = input_upload_response['path']
- # TODO: Determine if this is object store safe and what needs to be
- # done if it is not.
- files_path = "%s_files" % input_file[0:-len(".dat")]
- if os.path.exists(files_path):
- for extra_file in os.listdir(files_path):
- extra_file_path = os.path.join(files_path, extra_file)
- relative_path = os.path.basename(files_path)
- extra_file_relative_path = os.path.join(relative_path, extra_file)
- response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
- self.file_renames[extra_file_path] = response['path']
+
+ def __upload_input_extra_files(self, input_file):
+ # TODO: Determine if this is object store safe and what needs to be
+ # done if it is not.
+ files_path = "%s_files" % input_file[0:-len(".dat")]
+ if os.path.exists(files_path) and self.job_inputs.path_referenced(files_path):
+ for extra_file in os.listdir(files_path):
+ extra_file_path = os.path.join(files_path, extra_file)
+ relative_path = os.path.basename(files_path)
+ extra_file_relative_path = os.path.join(relative_path, extra_file)
+ response = self.client.upload_extra_input(extra_file_path, extra_file_relative_path)
+ self.file_renames[extra_file_path] = response['path']
def __upload_working_directory_files(self):
# Task manager stages files into working directory, these need to be
@@ -130,28 +228,20 @@
new_contents = new_contents.replace(local_path, remote_path)
return new_contents
- def __rewrite_and_upload_config_files(self):
- for config_file in self.config_files:
- config_contents = self.__read(config_file)
- new_config_contents = self.__rewrite_paths(config_contents)
+ def __handle_rewrites(self):
+ for local_path, remote_path in self.file_renames.iteritems():
+ self.job_inputs.rewrite_paths(local_path, remote_path)
+
+ def __upload_rewritten_config_files(self):
+ for config_file, new_config_contents in self.job_inputs.rewritten_config_files.iteritems():
self.client.upload_config_file(config_file, new_config_contents)
- def __rewrite_command_line(self):
- self.rewritten_command_line = self.__rewrite_paths(self.command_line)
-
def get_rewritten_command_line(self):
"""
Returns the rewritten version of the command line to execute suitable
for remote host.
"""
- return self.rewritten_command_line
-
- def __read(self, path):
- input = open(path, "r")
- try:
- return input.read()
- finally:
- input.close()
+ return self.job_inputs.rewritten_command_line
class Client(object):
@@ -387,3 +477,15 @@
Setup remote LWR server to run this job.
"""
return self.__raw_execute_and_parse("setup", {"job_id": self.job_id})
+
+
+def _read(path):
+ """
+ Utility method to quickly read small files (config files and tool
+ wrappers) into memory as strings.
+ """
+ input = open(path, "r")
+ try:
+ return input.read()
+ finally:
+ input.close()
https://bitbucket.org/galaxy/galaxy-central/changeset/875ac898df00/
changeset: 875ac898df00
user: jmchilton
date: 2012-12-26 19:42:31
summary: Rework job_id handling in LWR runner allowing remote LWR server to assign a job_id during setup, save this will serve as the job's external id. This change allows multiple Galaxy instances to submit jobs to the same LWR backend server and will prove useful when implementing additional backends (pbs/drmaa/etc...) for the LWR server.
---
lib/galaxy/jobs/runners/lwr.py | 5 +++--
lib/galaxy/jobs/runners/lwr_client/__init__.py | 22 ++++++++++++++++------
2 files changed, 19 insertions(+), 8 deletions(-)
affected #: 2 files
diff -r 6265bf3f27ad611db6c676e94166c25500c13432 -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -73,8 +73,9 @@
working_directory = job_wrapper.working_directory
file_stager = FileStager(client, command_line, job_wrapper.extra_filenames, input_files, output_files, job_wrapper.tool.tool_dir, working_directory)
rebuilt_command_line = file_stager.get_rewritten_command_line()
+ job_id = file_stager.job_id
client.launch( rebuilt_command_line )
- job_wrapper.set_runner( runner_url, job_wrapper.job_id )
+ job_wrapper.set_runner( runner_url, job_id )
job_wrapper.change_state( model.Job.states.RUNNING )
except Exception, exc:
@@ -84,7 +85,7 @@
lwr_job_state = ClusterJobState()
lwr_job_state.job_wrapper = job_wrapper
- lwr_job_state.job_id = job_wrapper.job_id
+ lwr_job_state.job_id = job_id
lwr_job_state.old_state = True
lwr_job_state.running = True
lwr_job_state.runner_url = runner_url
diff -r 6265bf3f27ad611db6c676e94166c25500c13432 -r 875ac898df00fd919b6b24f58562fadbf03dc5e1 lib/galaxy/jobs/runners/lwr_client/__init__.py
--- a/lib/galaxy/jobs/runners/lwr_client/__init__.py
+++ b/lib/galaxy/jobs/runners/lwr_client/__init__.py
@@ -149,12 +149,7 @@
self.file_renames = {}
- job_config = client.setup()
-
- self.new_working_directory = job_config['working_directory']
- self.new_outputs_directory = job_config['outputs_directory']
- self.remote_path_separator = job_config['path_separator']
-
+ self.__handle_setup()
self.__initialize_referenced_tool_files()
self.__upload_tool_files()
self.__upload_input_files()
@@ -165,6 +160,21 @@
self.__handle_rewrites()
self.__upload_rewritten_config_files()
+ def __handle_setup(self):
+ job_config = self.client.setup()
+
+ self.new_working_directory = job_config['working_directory']
+ self.new_outputs_directory = job_config['outputs_directory']
+ self.remote_path_separator = job_config['path_separator']
+ # If remote LWR server assigned job id, use that otherwise
+ # just use local job_id assigned.
+ galaxy_job_id = self.client.job_id
+ self.job_id = job_config.get('job_id', galaxy_job_id)
+ if self.job_id != galaxy_job_id:
+ # Remote LWR server assigned an id different than the
+ # Galaxy job id, update client to reflect this.
+ self.client.job_id = self.job_id
+
def __initialize_referenced_tool_files(self):
self.referenced_tool_files = self.job_inputs.find_referenced_subfiles(self.tool_dir)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Display installed repository dependencies in a separate container from missing repository dependencies.
by Bitbucket 25 Dec '12
by Bitbucket 25 Dec '12
25 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4f4875265599/
changeset: 4f4875265599
user: greg
date: 2012-12-25 17:49:39
summary: Display installed repository dependencies in a separate container from missing repository dependencies.
affected #: 7 files
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3167,40 +3167,40 @@
def has_readme_files( self ):
return self.metadata and 'readme_files' in self.metadata
@property
- def required_repositories( self ):
+ def repository_dependencies( self ):
required_repositories = []
- for rrda in self.repository_dependencies:
+ for rrda in self.required_repositories:
repository_dependency = rrda.repository_dependency
required_repository = repository_dependency.repository
required_repositories.append( required_repository )
return required_repositories
@property
- def installed_required_repositories( self ):
+ def installed_repository_dependencies( self ):
"""Return the repository's repository dependencies that are currently installed."""
installed_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.INSTALLED:
installed_required_repositories.append( required_repository )
return installed_required_repositories
@property
- def missing_required_repositories( self ):
+ def missing_repository_dependencies( self ):
"""Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
missing_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status not in [ self.installation_status.INSTALLED ]:
missing_required_repositories.append( required_repository )
return missing_required_repositories
@property
- def required_repositories_being_installed( self ):
+ def repository_dependencies_being_installed( self ):
required_repositories_being_installed = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
required_repositories_being_installed.append( required_repository )
return required_repositories_being_installed
@property
- def required_repositories_missing_or_being_installed( self ):
+ def repository_dependencies_missing_or_being_installed( self ):
required_repositories_missing_or_being_installed = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status in [ self.installation_status.ERROR,
self.installation_status.INSTALLING,
self.installation_status.NEVER_INSTALLED,
@@ -3208,17 +3208,17 @@
required_repositories_missing_or_being_installed.append( required_repository )
return required_repositories_missing_or_being_installed
@property
- def required_repositories_with_installation_errors( self ):
+ def repository_dependencies_with_installation_errors( self ):
required_repositories_with_installation_errors = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.ERROR:
required_repositories_with_installation_errors.append( required_repository )
return required_repositories_with_installation_errors
@property
- def uninstalled_required_repositories( self ):
+ def uninstalled_repository_dependencies( self ):
"""Return the repository's repository dependencies that have been uninstalled."""
uninstalled_required_repositories = []
- for required_repository in self.required_repositories:
+ for required_repository in self.repository_dependencies:
if required_repository.status == self.installation_status.UNINSTALLED:
uninstalled_required_repositories.append( required_repository )
return uninstalled_required_repositories
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1758,8 +1758,8 @@
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
order_by=ToolDependency.table.c.name,
backref='tool_shed_repository' ),
- repository_dependencies=relation( RepositoryRepositoryDependencyAssociation,
- primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+ required_repositories=relation( RepositoryRepositoryDependencyAssociation,
+ primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
assign_mapper( context, RepositoryRepositoryDependencyAssociation, RepositoryRepositoryDependencyAssociation.table,
properties=dict( repository=relation( ToolShedRepository,
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -488,6 +488,40 @@
if idx == count:
break
return headers
+def get_installed_and_missing_repository_dependencies( trans, repository ):
+ missing_repository_dependencies = {}
+ installed_repository_dependencies = {}
+ if repository.has_repository_dependencies:
+ metadata = repository.metadata
+ installed_rd_tups = []
+ missing_rd_tups = []
+ # The repository dependencies container will include only the immediate repository dependencies of this repository, so
+ # the container will be only a single level in depth.
+ for rd in repository.repository_dependencies:
+ rd_tup = [ rd.tool_shed, rd.name, rd.owner, rd.changeset_revision, rd.id, rd.status ]
+ if rd.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
+ installed_rd_tups.append( rd_tup )
+ else:
+ missing_rd_tups.append( rd_tup )
+ if installed_rd_tups or missing_rd_tups:
+ # Get the description from the metadata in case it has a value.
+ repository_dependencies = metadata.get( 'repository_dependencies', {} )
+ description = repository_dependencies.get( 'description', None )
+ # We need to add a root_key entry to one or both of installed_repository_dependencies dictionary and the
+ # missing_repository_dependencies dictionary for proper display parsing.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ if installed_rd_tups:
+ installed_repository_dependencies[ 'root_key' ] = root_key
+ installed_repository_dependencies[ root_key ] = installed_rd_tups
+ installed_repository_dependencies[ 'description' ] = description
+ if missing_rd_tups:
+ missing_repository_dependencies[ 'root_key' ] = root_key
+ missing_repository_dependencies[ root_key ] = missing_rd_tups
+ missing_repository_dependencies[ 'description' ] = description
+ return installed_repository_dependencies, missing_repository_dependencies
def get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ):
if all_tool_dependencies:
tool_dependencies = {}
@@ -861,8 +895,11 @@
"""
metadata = repository.metadata
if metadata:
+ # Handle proprietary datatypes.
datatypes = metadata.get( 'datatypes', None )
+ # Handle invalid tools.
invalid_tools = metadata.get( 'invalid_tools', None )
+ # Handle README files.
if repository.has_readme_files:
if reinstalling:
# Since we're reinstalling, we need to sned a request to the tool shed to get the README files.
@@ -877,33 +914,20 @@
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
- repository_dependencies_dict_for_display = {}
- if repository.has_repository_dependencies:
- rd_tups = []
- # We need to add a root_key entry to the repository_dependencies dictionary for proper display parsing.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision )
- # The repository dependencies container will include only the immediate repository dependencies of this repository, so
- # the container will be only a single level in depth.
- for rr in repository.required_repositories:
- rd_tup = [ rr.tool_shed, rr.name, rr.owner, rr.changeset_revision, rr.id, rr.status ]
- rd_tups.append( rd_tup )
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- # Get the description from the metadata in case it has a value.
- repository_dependencies = metadata.get( 'repository_dependencies', {} )
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies.get( 'description', None )
+ # Handle repository dependencies.
+ installed_repository_dependencies, missing_repository_dependencies = get_installed_and_missing_repository_dependencies( trans, repository )
+ # Handle tool dependencies.
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ installed_tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
if reinstalling:
# All tool dependencies will be considered missing since we are reinstalling the repository.
- if tool_dependencies:
- for td in tool_dependencies:
+ if installed_tool_dependencies:
+ for td in installed_tool_dependencies:
missing_tool_dependencies.append( td )
- tool_dependencies = None
+ installed_tool_dependencies = None
+ # Handle valid tools.
valid_tools = metadata.get( 'tools', None )
+ # Handle workflows.
workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
@@ -913,10 +937,11 @@
repository=repository,
datatypes=datatypes,
invalid_tools=invalid_tools,
+ missing_repository_dependencies=missing_repository_dependencies,
missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies_dict_for_display,
- tool_dependencies=tool_dependencies,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
valid_tools=valid_tools,
workflows=workflows )
else:
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -76,14 +76,15 @@
log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
return readme_files_dict
def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
- invalid_tools, missing_tool_dependencies, readme_files_dict, repository_dependencies, tool_dependencies,
- valid_tools, workflows ):
+ invalid_tools, missing_repository_dependencies, missing_tool_dependencies, readme_files_dict,
+ repository_dependencies, tool_dependencies, valid_tools, workflows ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
missing_tool_dependencies=None,
readme_files=None,
repository_dependencies=None,
+ missing_repository_dependencies=None,
tool_dependencies=None,
valid_tools=None,
workflows=None )
@@ -121,7 +122,7 @@
if readme_files_dict:
folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
- # Repository dependencies container.
+ # Installed repository dependencies container.
if repository_dependencies:
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
toolshed_base_url=toolshed_base_url,
@@ -129,16 +130,31 @@
repository_owner=repository_owner,
changeset_revision=changeset_revision,
folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ repository_dependencies=repository_dependencies,
+ label='Installed repository dependencies',
+ installed=True )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
- # Tool dependencies container.
+ # Missing repository dependencies container.
+ if missing_repository_dependencies:
+ folder_id, missing_repository_dependencies_root_folder = \
+ container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
+ repository_name=repository_name,
+ repository_owner=repository_owner,
+ changeset_revision=changeset_revision,
+ folder_id=folder_id,
+ repository_dependencies=missing_repository_dependencies,
+ label='Missing repository dependencies',
+ installed=False )
+ containers_dict[ 'missing_repository_dependencies' ] = missing_repository_dependencies_root_folder
+ # Installed tool dependencies container.
if tool_dependencies:
# We only want to display the Status column if the tool_dependency is missing.
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
label='Installed tool dependencies',
- display_status=False )
+ installed=True )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
@@ -147,7 +163,7 @@
folder_id,
missing_tool_dependencies,
label='Missing tool dependencies',
- display_status=True )
+ installed=False )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
@@ -210,7 +226,9 @@
repository_owner=repository.user.username,
changeset_revision=changeset_revision,
folder_id=folder_id,
- repository_dependencies=repository_dependencies )
+ repository_dependencies=repository_dependencies,
+ label='Repository dependencies',
+ installed=False )
if repository_dependencies_root_folder:
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
# Tool dependencies container.
@@ -219,7 +237,7 @@
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
folder_id,
tool_dependencies,
- display_status=False )
+ installed=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -201,7 +201,7 @@
readme_files_root_folder = None
return folder_id, readme_files_root_folder
def build_repository_dependencies_folder( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
- label='Repository dependencies' ):
+ label='Repository dependencies', installed=False ):
"""Return a folder hierarchy containing repository dependencies."""
if repository_dependencies:
repository_dependency_id = 0
@@ -274,8 +274,11 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', display_status=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', installed=False ):
"""Return a folder hierarchy containing tool dependencies."""
+ # The status will be displayed only if the values of the received installed is False. When this is the case, we're in Galaxy
+ # (not the tool shed), and the tool dependencies are not installed or are in an error state, so they are considered missing.
+ # The tool dependency status will be displayed only if the tool dependency is not installed.
if tool_dependencies:
tool_dependency_id = 0
folder_id += 1
@@ -283,11 +286,10 @@
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
if trans.webapp.name == 'galaxy':
- if display_status:
- # The status will be displayed only if the tool dependency status is not 'Installed'.
+ if installed:
+ folder.description = 'click the name to browse the dependency installation directory'
+ else:
folder.description = 'click the name to install the missing dependency'
- else:
- folder.description = 'click the name to browse the dependency installation directory'
tool_dependencies_root_folder.folders.append( folder )
# Insert a header row.
tool_dependency_id += 1
@@ -297,17 +299,15 @@
name='Name',
version='Version',
type='Type' )
- if display_status:
+ if installed:
+ tool_dependency.install_dir = 'Install directory'
+ else:
tool_dependency.installation_status = 'Status'
- else:
- tool_dependency.install_dir = 'Install directory'
else:
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
version='Version',
type='Type' )
- if display_status:
- tool_dependency.installation_status = 'Status'
folder.tool_dependencies.append( tool_dependency )
for dependency_key, requirements_dict in tool_dependencies.items():
tool_dependency_id += 1
@@ -317,7 +317,7 @@
type = set_environment_dict[ 'type' ]
repository_id = set_environment_dict.get( 'repository_id', None )
td_id = set_environment_dict.get( 'tool_dependency_id', None )
- if display_status:
+ if trans.webapp.name == 'galaxy':
installation_status = set_environment_dict.get( 'status', None )
else:
installation_status = None
@@ -335,7 +335,7 @@
install_dir = requirements_dict.get( 'install_dir', None )
repository_id = requirements_dict.get( 'repository_id', None )
td_id = requirements_dict.get( 'tool_dependency_id', None )
- if display_status:
+ if trans.webapp.name == 'galaxy':
installation_status = requirements_dict.get( 'status', None )
else:
installation_status = None
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -44,7 +44,7 @@
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
bgcolor = trans.model.ToolShedRepository.states.WARNING
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- if tool_shed_repository.missing_required_repositories:
+ if tool_shed_repository.missing_repository_dependencies:
bgcolor = trans.model.ToolShedRepository.states.WARNING
status_label = '%s, missing repository dependencies' % status_label
elif tool_shed_repository.missing_tool_dependencies:
@@ -1202,6 +1202,7 @@
repository=None,
datatypes=None,
invalid_tools=None,
+ missing_repository_dependencies=None,
missing_tool_dependencies=None,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
diff -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 -r 4f4875265599424fed16f35dd82eb785167f6c25 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -472,15 +472,18 @@
<${cell_type} style="padding-left: ${pad+20}px;">
%if row_is_header:
${tool_dependency.name | h}
- %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.repository_id and not tool_dependency.installation_status:
- ## tool_dependency.installation_status will be None if the status value in the database is 'Installed'.
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
+ %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id:
+ %if tool_dependency.repository_id and tool_dependency.installation_status == 'Installed':
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
+ ${tool_dependency.name | h}
+ </a>
+ %elif tool_dependency.installation_status != 'Installed':
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
+ ${tool_dependency.name}
+ </a>
+ %else:
${tool_dependency.name | h}
- </a>
- %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.installation_status:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
- ${tool_dependency.name}
- </a>
+ %endif
%else:
${tool_dependency.name | h}
%endif
@@ -551,6 +554,7 @@
invalid_tools_root_folder = containers_dict.get( 'invalid_tools', None )
readme_files_root_folder = containers_dict.get( 'readme_files', None )
repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+ missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
@@ -578,10 +582,17 @@
</div></div>
%endif
- %if repository_dependencies_root_folder or tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
+ %if missing_repository_dependencies_root_folder or repository_dependencies_root_folder or tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
<div class="toolForm"><div class="toolFormTitle">Dependencies of this repository</div><div class="toolFormBody">
+ %if missing_repository_dependencies_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_repository_dependencies">
+ ${render_folder( missing_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
%if repository_dependencies_root_folder:
<p/><% row_counter = RowCounter() %>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Slight enhancements for managing simple repository dependencies.
by Bitbucket 22 Dec '12
by Bitbucket 22 Dec '12
22 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a3915a264f6c/
changeset: a3915a264f6c
user: greg
date: 2012-12-22 21:12:41
summary: Slight enhancements for managing simple repository dependencies.
affected #: 3 files
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3014,6 +3014,7 @@
installation_status = Bunch( NEW='New',
CLONING='Cloning',
SETTING_TOOL_VERSIONS='Setting tool versions',
+ INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
INSTALLED='Installed',
@@ -3166,6 +3167,62 @@
def has_readme_files( self ):
return self.metadata and 'readme_files' in self.metadata
@property
+ def required_repositories( self ):
+ required_repositories = []
+ for rrda in self.repository_dependencies:
+ repository_dependency = rrda.repository_dependency
+ required_repository = repository_dependency.repository
+ required_repositories.append( required_repository )
+ return required_repositories
+ @property
+ def installed_required_repositories( self ):
+ """Return the repository's repository dependencies that are currently installed."""
+ installed_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.INSTALLED:
+ installed_required_repositories.append( required_repository )
+ return installed_required_repositories
+ @property
+ def missing_required_repositories( self ):
+ """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
+ missing_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status not in [ self.installation_status.INSTALLED ]:
+ missing_required_repositories.append( required_repository )
+ return missing_required_repositories
+ @property
+ def required_repositories_being_installed( self ):
+ required_repositories_being_installed = []
+ for required_repository in self.required_repositories:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ required_repositories_being_installed.append( required_repository )
+ return required_repositories_being_installed
+ @property
+ def required_repositories_missing_or_being_installed( self ):
+ required_repositories_missing_or_being_installed = []
+ for required_repository in self.required_repositories:
+ if required_repository.status in [ self.installation_status.ERROR,
+ self.installation_status.INSTALLING,
+ self.installation_status.NEVER_INSTALLED,
+ self.installation_status.UNINSTALLED ]:
+ required_repositories_missing_or_being_installed.append( required_repository )
+ return required_repositories_missing_or_being_installed
+ @property
+ def required_repositories_with_installation_errors( self ):
+ required_repositories_with_installation_errors = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.ERROR:
+ required_repositories_with_installation_errors.append( required_repository )
+ return required_repositories_with_installation_errors
+ @property
+ def uninstalled_required_repositories( self ):
+ """Return the repository's repository dependencies that have been uninstalled."""
+ uninstalled_required_repositories = []
+ for required_repository in self.required_repositories:
+ if required_repository.status == self.installation_status.UNINSTALLED:
+ uninstalled_required_repositories.append( required_repository )
+ return uninstalled_required_repositories
+ @property
def installed_tool_dependencies( self ):
"""Return the repository's tool dependencies that are currently installed."""
installed_dependencies = []
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -856,7 +856,8 @@
def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
- from Galaxy and not the tool shed.
+ from Galaxy (not the tool shed) when displaying repository dependencies for installed repositories and when displaying them for uninstalled
+ repositories that are being reinstalled.
"""
metadata = repository.metadata
if metadata:
@@ -876,43 +877,24 @@
readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool shed repository metadata.
+ if repository.has_repository_dependencies:
+ rd_tups = []
+ # We need to add a root_key entry to the repository_dependencies dictionary for proper display parsing.
root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
repository.name,
repository.owner,
- repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- for index, rd_tup in enumerate( rd_tups ):
- # Get the id and the installation status of the required repository.
- tool_shed, name, owner, changeset_revision = rd_tup
- required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
- # TODO: Since the changeset revision defined in the tool shed repository's repository_dependencies.xml file may have a changeset_revision
- # value that is outdated, we ened to make a call to the tool shed get the update dchangeset revision if repository is still None here.
- if required_repository:
- rd_tup.append( required_repository.id )
- rd_tup.append( str( required_repository.status ) )
- else:
- # See above TODO. For now, we'll take a short cut and attempt to find the repository by name and owner only. This will not work long
- # term because multiple revisions of a reposiory with the same name and owner could be installed into a Galaxy instance. The long term
- # fix is to call get_update_to_changeset_revision_and_ctx_rev( trans, repository ) for each required repository.
- required_repository = trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
- trans.model.ToolShedRepository.table.c.owner == owner ) ) \
- .first()
- if required_repository:
- rd_tup.append( required_repository.id )
- rd_tup.append( str( required_repository.status ) )
- else:
- rd_tup.append( None )
- rd_tup.append( None )
- rd_tups[ index ] = rd_tup
+ repository.installed_changeset_revision )
+ # The repository dependencies container will include only the immediate repository dependencies of this repository, so
+ # the container will be only a single level in depth.
+ for rr in repository.required_repositories:
+ rd_tup = [ rr.tool_shed, rr.name, rr.owner, rr.changeset_revision, rr.id, rr.status ]
+ rd_tups.append( rd_tup )
repository_dependencies_dict_for_display[ 'root_key' ] = root_key
repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ # Get the description from the metadata in case it has a value.
+ repository_dependencies = metadata.get( 'repository_dependencies', {} )
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies.get( 'description', None )
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
if reinstalling:
diff -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 -r a3915a264f6c3ae7cec12778b3c3e7794ca5a8e8 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -32,6 +32,7 @@
status_label = tool_shed_repository.status
if tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
bgcolor = trans.model.ToolShedRepository.states.INSTALLING
@@ -43,9 +44,12 @@
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
bgcolor = trans.model.ToolShedRepository.states.WARNING
elif tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- if tool_shed_repository.missing_tool_dependencies:
+ if tool_shed_repository.missing_required_repositories:
bgcolor = trans.model.ToolShedRepository.states.WARNING
- status_label = '%s, missing dependencies' % status_label
+ status_label = '%s, missing repository dependencies' % status_label
+ elif tool_shed_repository.missing_tool_dependencies:
+ bgcolor = trans.model.ToolShedRepository.states.WARNING
+ status_label = '%s, missing tool dependencies' % status_label
else:
bgcolor = trans.model.ToolShedRepository.states.OK
else:
@@ -182,6 +186,7 @@
[ model.ToolShedRepository.installation_status.NEW,
model.ToolShedRepository.installation_status.CLONING,
model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES,
model.ToolShedRepository.installation_status.UNINSTALLED ], \
@@ -500,7 +505,7 @@
removed = False
if removed:
tool_shed_repository.uninstalled = True
- # Remove all installed tool dependencies.
+ # Remove all installed tool dependencies, but don't touch any repository dependencies..
for tool_dependency in tool_shed_repository.installed_tool_dependencies:
uninstalled, error_message = shed_util.remove_tool_dependency( trans, tool_dependency )
if error_message:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: Fix shutdown on python >= 2.6.2 by calling setDaemon when creating threads (these are still cleanly shutdown by atexit). Also add descriptive names to most job worker threads
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/32ea53484cec/
changeset: 32ea53484cec
user: james_taylor
date: 2012-12-21 22:43:30
summary: Fix shutdown on python >= 2.6.2 by calling setDaemon when creating threads (these are still cleanly shutdown by atexit). Also add descriptive names to most job worker threads
affected #: 7 files
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -61,7 +61,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
def start( self ):
"""
@@ -353,7 +354,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job handler stop queue started" )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -68,7 +68,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
# Recover jobs at startup
self.__check_jobs_at_startup()
# Start the queue
@@ -219,7 +220,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job manager stop queue started" )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -105,13 +105,14 @@
self.monitor_queue = Queue()
self.ds = drmaa.Session()
self.ds.initialize()
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="DRMAAJobRunner.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
self.work_queue = Queue()
self.work_threads = []
nworkers = app.config.cluster_job_queue_workers
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "DRMAAJobRunner.work_threads-%d" % i ), target=self.run_next )
worker.start()
self.work_threads.append( worker )
log.debug( "%d workers ready" % nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -37,7 +37,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "LocalJobRunner.threads-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -229,7 +229,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( ( name="LwrJobRunner.thread-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -29,7 +29,8 @@
nworkers = app.config.local_task_queue_workers
log.info( "Starting tasked-job runners" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "TaskedJobRunner-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec -r 32ea53484cec638021a38c0c252d5dc1d4bf5da4 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -38,6 +38,8 @@
import traceback, sys
traceback.print_exc()
sys.exit( 1 )
+ # Call app's shutdown method when the interpeter exits, this cleanly stops
+ # the various Galaxy application daemon threads
atexit.register( app.shutdown )
# Create the universe WSGI application
webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fixes for sorting and merging genomic regions during visual analysis.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c54ebfe2c008/
changeset: c54ebfe2c008
user: jgoecks
date: 2012-12-21 22:08:16
summary: Fixes for sorting and merging genomic regions during visual analysis.
affected #: 1 file
diff -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d -r c54ebfe2c0086b6e690e98a958e2d41ecf722bec lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -158,17 +158,16 @@
if len( regions ) > 1:
# Sort by chrom name, start so that data is not fetched out of order.
- regions.sort( key=lambda r: r.chrom )
- regions.sort( key=lambda r: r.start )
-
+ regions = sorted(regions, key=lambda r: (r.chrom.lower(), r.start))
+
# Merge overlapping regions so that regions do not overlap
# and hence data is not included multiple times.
prev = regions[0]
cur = regions[1]
index = 1
while True:
- if cur.start <= prev.end:
- # Found overlapping regions, so join them.
+ if cur.chrom == prev.chrom and cur.start <= prev.end:
+ # Found overlapping regions, so join them into prev.
prev.end = cur.end
del regions[ index ]
else:
@@ -182,7 +181,7 @@
break
else:
cur = regions[ index ]
-
+
run_on_regions = True
# Dataset check.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional test enhancements. Functional tests for uninstalling, deactivating, reinstalling, and reactivating installed repositories.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/692a1e8b6999/
changeset: 692a1e8b6999
user: inithello
date: 2012-12-21 18:47:30
summary: Tool shed functional test enhancements. Functional tests for uninstalling, deactivating, reinstalling, and reactivating installed repositories.
affected #: 9 files
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -68,13 +68,10 @@
for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
- """
- TODO: Uncomment these when Greg enhances the tool dependencies and missing tool dependencies containers to display the status.
if dependencies_installed:
strings_displayed.append( 'Installed' )
else:
strings_displayed.append( 'Never installed' )
- """
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
@@ -322,6 +319,14 @@
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ def get_datatypes_count( self ):
+ url = '/admin/view_datatypes_registry'
+ self.visit_galaxy_url( url )
+ html = self.last_page()
+ datatypes_count = re.search( 'registry contains (\d+) data types', html )
+ if datatypes_count:
+ return datatypes_count.group( 1 )
+ return None
def get_filename( self, filename, filepath=None ):
if filepath is not None:
return os.path.abspath( os.path.join( filepath, filename ) )
@@ -404,15 +409,16 @@
# group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy
# admin_toolshed controller.
install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html )
- iri_ids = install_parameters.group(1)
- encoded_kwd = install_parameters.group(2)
- reinstalling = install_parameters.group(3)
- url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
- ( iri_ids, encoded_kwd, reinstalling )
- self.visit_galaxy_url( url )
- def install_repository( self, name, owner, category_name, install_tool_dependencies=False, changeset_revision=None, strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[], **kwd ):
- if test_db_util.get_installed_repository_by_name_owner( name, owner ) is not None:
- return
+ if install_parameters:
+ iri_ids = install_parameters.group(1)
+ encoded_kwd = install_parameters.group(2)
+ reinstalling = install_parameters.group(3)
+ url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
+ ( iri_ids, encoded_kwd, reinstalling )
+ self.visit_galaxy_url( url )
+ def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
+ changeset_revision=None, strings_displayed=[], strings_not_displayed=[],
+ preview_strings_displayed=[], post_submit_strings_displayed=[], **kwd ):
self.browse_tool_shed( url=self.url )
self.browse_category( test_db_util.get_category_by_name( category_name ) )
self.preview_repository_in_tool_shed( name, common.test_user_1_name, strings_displayed=preview_strings_displayed )
@@ -435,6 +441,7 @@
if 'shed_tool_conf' not in kwd:
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
+ self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
self.initiate_installation_process()
self.wait_for_repository_installation( repository, changeset_revision )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
@@ -460,6 +467,11 @@
self.visit_url( '/repository/preview_tools_in_changeset?repository_id=%s&changeset_revision=%s' % \
( self.security.encode_id( repository.id ), changeset_revision ) )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def reactivate_repository( self, installed_repository ):
+ url = '/admin_toolshed/browse_repositories?operation=activate+or+reinstall&id=%s' % self.security.encode_id( installed_repository.id )
+ self.visit_galaxy_url( url )
+ strings_displayed = [ installed_repository.name, 'repository has been activated' ]
+ self.check_for_strings( strings_displayed, [] )
def reinstall_repository( self, installed_repository ):
url = '/admin_toolshed/reinstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
@@ -513,13 +525,16 @@
def uninstall_repository( self, installed_repository, remove_from_disk=True ):
url = '/admin_toolshed/deactivate_or_uninstall_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
-# form = tc.browser.get_form( 'deactivate_or_uninstall_repository' )
- tc.fv ( 1, "remove_from_disk", '1' )
-# checkbox.readonly = False
-# if remove_from_disk:
-# checkbox.selected = True
+ if remove_from_disk:
+ tc.fv ( 1, "remove_from_disk", 'true' )
+ else:
+ tc.fv ( 1, "remove_from_disk", 'false' )
tc.submit( 'deactivate_or_uninstall_repository_button' )
- strings_displayed = [ 'has been uninstalled', 'The repository named' ]
+ strings_displayed = [ 'The repository named' ]
+ if remove_from_disk:
+ strings_displayed.append( 'has been uninstalled' )
+ else:
+ strings_displayed.append( 'has been deactivated' )
self.check_for_strings( strings_displayed, strings_not_displayed=[] )
def update_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/check_for_updates?id=%s' % self.security.encode_id( installed_repository.id )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -26,7 +26,7 @@
def test_0005_create_category( self ):
"""Create a category for this test suite"""
self.create_category( name='test_0040_repository_circular_dependencies', description='Testing handling of circular repository dependencies.' )
- def test_0010_create_freebayes_repository_name( self ):
+ def test_0010_create_freebayes_repository( self ):
'''Create and populate freebayes_0040.'''
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -65,6 +65,7 @@
self.preview_repository_in_tool_shed( 'emboss_0020', common.test_user_1_name, strings_displayed=[ 'emboss_0020', 'Valid tools' ] )
def test_0015_install_emboss_repository( self ):
'''Install the emboss repository without installing tool dependencies.'''
+ old_datatypes = self.get_datatypes_count()
self.install_repository( 'emboss_0020',
common.test_user_1_name,
'Test 0020 Basic Repository Dependencies',
@@ -81,6 +82,8 @@
strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
+ new_datatypes = self.get_datatypes_count()
+ assert new_datatypes > old_datatypes, 'Installing emboss did not add datatypes to the registry'
def test_0020_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
self.verify_installed_repository_metadata_unchanged( 'emboss_0020', common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -102,11 +102,10 @@
def test_0015_install_emboss_repository( self ):
'''Install the emboss repository without installing tool dependencies.'''
repository = test_db_util.get_repository_by_name_and_owner( 'emboss_0030', common.test_user_1_name )
- revisions = self.get_repository_metadata_revisions( repository )
+ old_datatypes = self.get_datatypes_count()
self.install_repository( 'emboss_0030',
common.test_user_1_name,
'Test 0030 Repository Dependency Revisions',
- changeset_revision=revisions[1],
install_tool_dependencies=False,
new_tool_panel_section='test_1030' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
@@ -121,6 +120,8 @@
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
+ new_datatypes = self.get_datatypes_count()
+ assert new_datatypes > old_datatypes, 'Installing emboss did not add datatypes to the registry.'
def test_0025_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
self.verify_installed_repository_metadata_unchanged( 'emboss_0030', common.test_user_1_name )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
--- a/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
+++ b/test/tool_shed/functional/test_1200_uninstall_and_reinstall_basic_repository.py
@@ -1,8 +1,8 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
-class BasicToolShedFeatures( ShedTwillTestCase ):
- '''Test installing a basic repository.'''
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a basic repository.'''
def test_0000_initiate_users( self ):
"""Create necessary user accounts."""
self.logout()
@@ -54,7 +54,6 @@
def test_0015_uninstall_filtering_repository( self ):
'''Uninstall the filtering repository.'''
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- old_metadata = installed_repository.metadata
self.uninstall_repository( installed_repository, remove_from_disk=True )
strings_not_displayed = [ installed_repository.name,
installed_repository.description,
@@ -84,7 +83,7 @@
def test_0030_reactivate_filtering_repository( self ):
'''Reactivate the filtering repository and verify that it now shows up in the list of installed repositories.'''
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- self.reinstall_repository( installed_repository )
+ self.reactivate_repository( installed_repository )
strings_displayed = [ installed_repository.name,
installed_repository.description,
installed_repository.owner,
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -0,0 +1,110 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0010 category and upload the freebayes repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name='freebayes_0010',
+ description="Galaxy's freebayes tool",
+ long_description="Long description of Galaxy's freebayes tool",
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ) )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'freebayes/freebayes.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded freebayes.xml." )
+ self.upload_file( repository,
+ 'freebayes/tool_data_table_conf.xml.sample',
+ valid_tools_only=False,
+ commit_message="Uploaded tool_data_table_conf.xml.",
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/sam_fa_indices.loc.sample',
+ commit_message="Uploaded sam_fa_indices.loc.sample.",
+ valid_tools_only=False,
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/invalid_tool_dependencies/tool_dependencies.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded invalid_tool_dependencies/tool_dependencies.xml.",
+ remove_repo_files_not_in_tar='No' )
+ self.upload_file( repository,
+ 'freebayes/tool_dependencies.xml',
+ valid_tools_only=False,
+ commit_message="Uploaded tool_dependencies.xml",
+ remove_repo_files_not_in_tar='No' )
+ def test_0010_install_freebayes_repository( self ):
+ '''Install the freebayes repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( 'freebayes_0010',
+ common.test_user_1_name,
+ 'Test 0010 Repository With Tool Dependencies',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_freebayes_repository( self ):
+ '''Uninstall the freebayes repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0020_reinstall_freebayes_repository( self ):
+ '''Reinstall the freebayes repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0025_deactivate_freebayes_repository( self ):
+ '''Deactivate the freebayes repository without removing it from disk.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0030_reactivate_freebayes_repository( self ):
+ '''Reactivate the freebayes repository and verify that it now shows up in the list of installed repositories.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
@@ -0,0 +1,123 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0020'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0020'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with tool dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0020 category and upload the emboss repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0020 Basic Repository Dependencies', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( datatypes_repository ):
+ self.upload_file( datatypes_repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1020', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ def test_0010_install_emboss_repository( self ):
+ '''Install the emboss repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ 'Test 0020 Basic Repository Dependencies',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_emboss_repository( self ):
+ '''Uninstall the emboss repository.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.name, installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes < old_datatypes, 'Uninstalling emboss did not remove datatypes from the registry.'
+ def test_0020_reinstall_emboss_repository( self ):
+ '''Reinstall the emboss repository.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes > old_datatypes, 'Reinstalling emboss did not add datatypes to the registry.'
+ def test_0025_deactivate_emboss_repository( self ):
+ '''Deactivate the emboss repository without removing it from disk.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.name,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes < old_datatypes, 'Deactivating emboss did not remove datatypes from the registry.'
+ def test_0030_reactivate_emboss_repository( self ):
+ '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+# old_datatypes = self.get_datatypes_count()
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+# new_datatypes = self.get_datatypes_count()
+# assert new_datatypes > old_datatypes, 'Reactivating emboss did not add datatypes to the registry.'
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
@@ -0,0 +1,146 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0030'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0030'
+emboss_5_repository_name = 'emboss_5_0030'
+emboss_6_repository_name = 'emboss_6_0030'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+class UninstallingAndReinstallingRepositories( ShedTwillTestCase ):
+ '''Test uninstalling and reinstalling a repository with repository dependency revisions.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_galaxy_private_role( admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_ensure_repositories_and_categories_exist( self ):
+ '''Create the 0030 category and upload the emboss repository to the tool shed, if necessary.'''
+ category = self.create_category( name='Test 0030 Repository Dependency Revisions', description='Tests for a repository with tool dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ datatypes_repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( datatypes_repository ):
+ self.upload_file( datatypes_repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_5_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_5_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ emboss_6_repository = self.get_or_create_repository( name=emboss_6_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_6_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '6' ] )
+ self.generate_repository_dependency_xml( [ datatypes_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_6_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ emboss_repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( emboss_repository, 'emboss/emboss.tar', commit_message='Uploaded emboss.tar' )
+ repository_dependencies_path = self.generate_temp_path( 'test_1030', additional_paths=[ 'emboss', '5' ] )
+ self.generate_repository_dependency_xml( [ emboss_5_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ self.generate_repository_dependency_xml( [ emboss_6_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ) )
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded repository_dependencies.xml' )
+ def test_0010_install_emboss_repository( self ):
+ '''Install the emboss repository into the Galaxy instance.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ 'Test 0030 Repository Dependency Revisions',
+ new_tool_panel_section='test_1210' )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ def test_0015_uninstall_emboss_repository( self ):
+ '''Uninstall the emboss repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=True )
+ strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0020_reinstall_emboss_repository( self ):
+ '''Reinstall the emboss repository.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reinstall_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0025_deactivate_emboss_repository( self ):
+ '''Deactivate the emboss repository without removing it from disk.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_repository, remove_from_disk=False )
+ strings_not_displayed = [ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
+ def test_0030_reactivate_emboss_repository( self ):
+ '''Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.'''
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.reactivate_repository( installed_repository )
+ strings_displayed = [ installed_repository.name,
+ installed_repository.description,
+ installed_repository.owner,
+ installed_repository.tool_shed,
+ installed_repository.installed_changeset_revision ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
+ self.display_installed_repository_manage_page( installed_repository,
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'emboss' ] )
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
diff -r b268bc0bbc63f525a981547d06c20911f553f1b2 -r 692a1e8b69998d99465838cb5c8b1d56f3a37d2d test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -270,7 +270,7 @@
file_path = galaxy_file_path,
tool_path = tool_path,
tool_data_path = tool_data_path,
- shed_tool_path=galaxy_shed_tool_path,
+ shed_tool_path = galaxy_shed_tool_path,
update_integrated_tool_panel = False,
tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes and enhancemensts for rendering simple repository dependencies for tools hed repository installed into a Galaxy instance.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b268bc0bbc63/
changeset: b268bc0bbc63
user: greg
date: 2012-12-21 18:35:28
summary: Fixes and enhancemensts for rendering simple repository dependencies for tools hed repository installed into a Galaxy instance.
affected #: 6 files
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -853,7 +853,7 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository ):
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False ):
"""
Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
from Galaxy and not the tool shed.
@@ -863,25 +863,64 @@
datatypes = metadata.get( 'datatypes', None )
invalid_tools = metadata.get( 'invalid_tools', None )
if repository.has_readme_files:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
+ if reinstalling:
+ # Since we're reinstalling, we need to sned a request to the tool shed to get the README files.
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( repository.name, repository.owner, repository.installed_changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = from_json_string( raw_text )
+ else:
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
repository_dependencies = metadata.get( 'repository_dependencies', None )
repository_dependencies_dict_for_display = {}
if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool shed repository metadata.
root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
repository.name,
repository.owner,
repository.installed_changeset_revision )
rd_tups_for_display = []
rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ for index, rd_tup in enumerate( rd_tups ):
+ # Get the id and the installation status of the required repository.
+ tool_shed, name, owner, changeset_revision = rd_tup
+ required_repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
+ # TODO: Since the changeset revision defined in the tool shed repository's repository_dependencies.xml file may have a changeset_revision
+ # value that is outdated, we ened to make a call to the tool shed get the update dchangeset revision if repository is still None here.
+ if required_repository:
+ rd_tup.append( required_repository.id )
+ rd_tup.append( str( required_repository.status ) )
+ else:
+ # See above TODO. For now, we'll take a short cut and attempt to find the repository by name and owner only. This will not work long
+ # term because multiple revisions of a reposiory with the same name and owner could be installed into a Galaxy instance. The long term
+ # fix is to call get_update_to_changeset_revision_and_ctx_rev( trans, repository ) for each required repository.
+ required_repository = trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
+ trans.model.ToolShedRepository.table.c.owner == owner ) ) \
+ .first()
+ if required_repository:
+ rd_tup.append( required_repository.id )
+ rd_tup.append( str( required_repository.status ) )
+ else:
+ rd_tup.append( None )
+ rd_tup.append( None )
+ rd_tups[ index ] = rd_tup
repository_dependencies_dict_for_display[ 'root_key' ] = root_key
repository_dependencies_dict_for_display[ root_key ] = rd_tups
repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ if reinstalling:
+ # All tool dependencies will be considered missing since we are reinstalling the repository.
+ if tool_dependencies:
+ for td in tool_dependencies:
+ missing_tool_dependencies.append( td )
+ tool_dependencies = None
valid_tools = metadata.get( 'tools', None )
workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -106,11 +106,12 @@
folder_id = 0
# Datatypes container.
if datatypes:
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if invalid_tools:
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
+ folder_id,
invalid_tools,
changeset_revision,
repository=repository,
@@ -118,11 +119,12 @@
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
if readme_files_dict:
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
if repository_dependencies:
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
repository_name=repository_name,
repository_owner=repository_owner,
changeset_revision=changeset_revision,
@@ -132,44 +134,40 @@
# Tool dependencies container.
if tool_dependencies:
# We only want to display the Status column if the tool_dependency is missing.
- description = 'click the name to browse the dependency installation directory'
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
tool_dependencies,
label='Installed tool dependencies',
- for_galaxy=True,
- description=description,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Missing tool dependencies container.
if missing_tool_dependencies:
- description = 'click the name to install the missing dependency'
# We only want to display the Status column if the tool_dependency is missing.
- folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, missing_tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
missing_tool_dependencies,
label='Missing tool dependencies',
- for_galaxy=True,
- description=description,
display_status=True )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
# Valid tools container.
if valid_tools:
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
+ folder_id,
valid_tools,
repository,
changeset_revision,
- label='Valid tools',
- description='click the name to inspect the tool metadata' )
+ label='Valid tools' )
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Workflows container.
if workflows:
- folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
finally:
lock.release()
return containers_dict
-def build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata ):
+def build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata ):
"""Return a dictionary of containers for the received repository's dependencies and contents for display in the tool shed."""
containers_dict = dict( datatypes=None,
invalid_tools=None,
@@ -187,12 +185,13 @@
# Datatypes container.
if metadata and 'datatypes' in metadata:
datatypes = metadata[ 'datatypes' ]
- folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( trans, folder_id, datatypes )
containers_dict[ 'datatypes' ] = datatypes_root_folder
# Invalid tools container.
if metadata and 'invalid_tools' in metadata:
invalid_tool_configs = metadata[ 'invalid_tools' ]
- folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( trans,
+ folder_id,
invalid_tool_configs,
changeset_revision,
repository=repository,
@@ -201,11 +200,12 @@
# Readme files container.
if metadata and 'readme_files' in metadata:
readme_files_dict = build_readme_files_dict( metadata )
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( trans, folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
- folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
+ folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( trans=trans,
+ toolshed_base_url=toolshed_base_url,
repository_name=repository.name,
repository_owner=repository.user.username,
changeset_revision=changeset_revision,
@@ -216,16 +216,16 @@
# Tool dependencies container.
if metadata and 'tool_dependencies' in metadata:
tool_dependencies = metadata[ 'tool_dependencies' ]
- folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
+ folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( trans,
+ folder_id,
tool_dependencies,
- for_galaxy=False,
- description=None,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
# Valid tools container.
if metadata and 'tools' in metadata:
valid_tools = metadata[ 'tools' ]
- folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( trans,
+ folder_id,
valid_tools,
repository,
changeset_revision,
@@ -234,7 +234,7 @@
# Workflows container.
if metadata and 'workflows' in metadata:
workflows = metadata[ 'workflows' ]
- folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' )
containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) )
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1850,7 +1850,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/manage_repository.mako',
cntrller=cntrller,
repo_name=repo_name,
@@ -1953,7 +1953,7 @@
selected_value=changeset_revision,
add_id_to_name=False,
downloadable=False )
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
containers_dict=containers_dict,
@@ -2481,7 +2481,7 @@
review_id = trans.security.encode_id( review.id )
else:
review_id = None
- containers_dict = suc.build_repository_containers_for_tool_shed( repository, changeset_revision, repository_dependencies, repository_metadata )
+ containers_dict = suc.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
return trans.fill_template( '/webapps/community/repository/view_repository.mako',
cntrller=cntrller,
repo=repo,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -71,13 +71,14 @@
class RepositoryDependency( object ):
"""Repository dependency object"""
- def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, installation_status=None ):
+ def __init__( self, id=None, toolshed=None, repository_name=None, repository_owner=None, changeset_revision=None, installation_status=None, tool_shed_repository_id=None ):
self.id = id
self.toolshed = toolshed
self.repository_name = repository_name
self.repository_owner = repository_owner
self.changeset_revision = changeset_revision
self.installation_status = installation_status
+ self.tool_shed_repository_id = tool_shed_repository_id
@property
def listify( self ):
return [ self.toolshed, self.repository_name, self.repository_owner, self.changeset_revision ]
@@ -120,7 +121,7 @@
self.format_version = format_version
self.annotation = annotation
-def build_datatypes_folder( folder_id, datatypes, label='Datatypes', description=None ):
+def build_datatypes_folder( trans, folder_id, datatypes, label='Datatypes' ):
"""Return a folder hierarchy containing datatypes."""
if datatypes:
datatype_id = 0
@@ -128,8 +129,6 @@
datatypes_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='datatypes', label=label, parent=datatypes_root_folder )
- if description:
- folder.description = description
datatypes_root_folder.folders.append( folder )
# Insert a header row.
datatype_id += 1
@@ -150,7 +149,7 @@
else:
datatypes_root_folder = None
return folder_id, datatypes_root_folder
-def build_invalid_tools_folder( folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools', description=None ):
+def build_invalid_tools_folder( trans, folder_id, invalid_tool_configs, changeset_revision, repository=None, label='Invalid tools' ):
"""Return a folder hierarchy containing invalid tools."""
# TODO: Should we display invalid tools on the tool panel selection page when installing the repository into Galaxy?
if invalid_tool_configs:
@@ -159,8 +158,6 @@
invalid_tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='invalid_tools', label=label, parent=invalid_tools_root_folder )
- if description:
- folder.description = description
invalid_tools_root_folder.folders.append( folder )
for invalid_tool_config in invalid_tool_configs:
invalid_tool_id += 1
@@ -176,7 +173,7 @@
else:
invalid_tools_root_folder = None
return folder_id, invalid_tools_root_folder
-def build_readme_files_folder( folder_id, readme_files_dict, label='Readme files', description=None ):
+def build_readme_files_folder( trans, folder_id, readme_files_dict, label='Readme files' ):
"""Return a folder hierarchy containing readme text files."""
if readme_files_dict:
multiple_readme_files = len( readme_files_dict ) > 1
@@ -186,8 +183,6 @@
if multiple_readme_files:
folder_id += 1
readme_files_folder = Folder( id=folder_id, key='readme_files', label=label, parent=readme_files_root_folder )
- if description:
- readme_files_folder.description = description
readme_files_root_folder.folders.append( readme_files_folder )
for readme_file_name, readme_file_text in readme_files_dict.items():
readme_id += 1
@@ -205,8 +200,8 @@
else:
readme_files_root_folder = None
return folder_id, readme_files_root_folder
-def build_repository_dependencies_folder( toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
- label='Repository dependencies', description=None ):
+def build_repository_dependencies_folder( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, folder_id, repository_dependencies,
+ label='Repository dependencies' ):
"""Return a folder hierarchy containing repository dependencies."""
if repository_dependencies:
repository_dependency_id = 0
@@ -217,8 +212,6 @@
# Create the Repository dependencies folder and add it to the root folder.
repository_dependencies_folder_key = repository_dependencies[ 'root_key' ]
repository_dependencies_folder = Folder( id=folder_id, key=repository_dependencies_folder_key, label=label, parent=repository_dependencies_root_folder )
- if description:
- repository_dependencies_folder.description = description
del repository_dependencies[ 'root_key' ]
# The received repository_dependencies is a dictionary with keys: 'root_key', 'description', and one or more repository_dependency keys.
# We want the description value associated with the repository_dependencies_folder.
@@ -226,12 +219,12 @@
repository_dependencies_root_folder.folders.append( repository_dependencies_folder )
del repository_dependencies[ 'description' ]
repository_dependencies_folder, folder_id, repository_dependency_id = \
- populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
+ populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id )
repository_dependencies_folder = prune_repository_dependencies( repository_dependencies_folder )
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
-def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools', description=None ):
+def build_tools_folder( trans, folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
"""Return a folder hierarchy containing valid tools."""
if tool_dicts:
tool_id = 0
@@ -239,8 +232,8 @@
tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder )
- if description:
- folder.description = description
+ if trans.webapp.name == 'galaxy':
+ folder.description = 'click the name to inspect the tool metadata'
tools_root_folder.folders.append( folder )
# Insert a header row.
tool_id += 1
@@ -281,7 +274,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( folder_id, tool_dependencies, label='Tool dependencies', for_galaxy=False, description=None, display_status=False ):
+def build_tool_dependencies_folder( trans, folder_id, tool_dependencies, label='Tool dependencies', display_status=False ):
"""Return a folder hierarchy containing tool dependencies."""
if tool_dependencies:
tool_dependency_id = 0
@@ -289,12 +282,16 @@
tool_dependencies_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='tool_dependencies', label=label, parent=tool_dependencies_root_folder )
- if description:
- folder.description = description
+ if trans.webapp.name == 'galaxy':
+ if display_status:
+ # The status will be displayed only if the tool dependency status is not 'Installed'.
+ folder.description = 'click the name to install the missing dependency'
+ else:
+ folder.description = 'click the name to browse the dependency installation directory'
tool_dependencies_root_folder.folders.append( folder )
# Insert a header row.
tool_dependency_id += 1
- if for_galaxy:
+ if trans.webapp.name == 'galaxy':
# Include the installation directory.
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
@@ -354,7 +351,7 @@
else:
tool_dependencies_root_folder = None
return folder_id, tool_dependencies_root_folder
-def build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows', description=None ):
+def build_workflows_folder( trans, folder_id, workflows, repository_metadata, label='Workflows' ):
"""Return a folder hierarchy containing invalid tools."""
if workflows:
workflow_id = 0
@@ -362,8 +359,6 @@
workflows_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='workflows', label=label, parent=workflows_root_folder )
- if description:
- folder.description = description
workflows_root_folder.folders.append( folder )
# Insert a header row.
workflow_id += 1
@@ -436,7 +431,7 @@
repository_owner = items[ 2 ]
changeset_revision = items[ 3 ]
return toolshed_base_url, repository_name, repository_owner, changeset_revision
-def handle_repository_dependencies_container_entry( repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
+def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
folder = get_folder( repository_dependencies_folder, rd_key )
label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, changeset_revision, repository_dependencies_folder.key )
@@ -451,7 +446,25 @@
folder_id += 1
sub_folder = Folder( id=folder_id, key=rd_key, label=label, parent=repository_dependencies_folder )
repository_dependencies_folder.folders.append( sub_folder )
+ if trans.webapp.name == 'galaxy':
+ # Insert a header row.
+ repository_dependency_id += 1
+ repository_dependency = RepositoryDependency( id=repository_dependency_id,
+ repository_name='Name',
+ changeset_revision='Revision',
+ repository_owner='Owner',
+ installation_status='Installation status' )
+ # Insert the header row into the folder.
+ sub_folder.repository_dependencies.append( repository_dependency )
for repository_dependency in rd_value:
+ if trans.webapp.name == 'galaxy':
+ # We have two extra items in the tuple, repository.id and repository.status.
+ tool_shed_repository_id = repository_dependency[ 4 ]
+ installation_status = repository_dependency[ 5 ]
+ repository_dependency = repository_dependency[ 0:4 ]
+ else:
+ tool_shed_repository_id = None
+ installation_status = None
can_create_dependency = not is_subfolder_of( sub_folder, repository_dependency )
if can_create_dependency:
toolshed, repository_name, repository_owner, changeset_revision = repository_dependency
@@ -460,7 +473,9 @@
toolshed=toolshed,
repository_name=repository_name,
repository_owner=repository_owner,
- changeset_revision=changeset_revision )
+ changeset_revision=changeset_revision,
+ installation_status=installation_status,
+ tool_shed_repository_id=tool_shed_repository_id )
# Insert the repository_dependency into the folder.
sub_folder.repository_dependencies.append( repository_dependency )
return repository_dependencies_folder, folder_id, repository_dependency_id
@@ -474,11 +489,11 @@
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, key ):
toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
return repository_name == key_name and repository_owner == key_owner and changeset_revision == key_changeset_revision
-def populate_repository_dependencies_container( repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
+def populate_repository_dependencies_container( trans, repository_dependencies_folder, repository_dependencies, folder_id, repository_dependency_id ):
folder_keys = repository_dependencies.keys()
for key, value in repository_dependencies.items():
repository_dependencies_folder, folder_id, repository_dependency_id = \
- handle_repository_dependencies_container_entry( repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
+ handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, key, value, folder_id, repository_dependency_id, folder_keys )
return repository_dependencies_folder, folder_id, repository_dependency_id
def print_folders( pad, folder ):
# For debugging...
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -877,7 +877,7 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -1409,6 +1409,7 @@
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
+ tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
@@ -1457,66 +1458,7 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- if metadata:
- datatypes = metadata.get( 'datatypes', None )
- invalid_tools = metadata.get( 'invalid_tools', None )
- if tool_shed_repository.has_readme_files:
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = from_json_string( raw_text )
- else:
- readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
- repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
- root_key = container_util.generate_repository_dependencies_key_for_repository( tool_shed_repository.tool_shed,
- tool_shed_repository.name,
- tool_shed_repository.owner,
- tool_shed_repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans,
- tool_shed_repository,
- all_tool_dependencies )
- valid_tools = metadata.get( 'tools', None )
- workflows = metadata.get( 'workflows', None )
- # All tool dependencies will be considered missing since we are reinstalling the repository.
- if tool_dependencies:
- for td in tool_dependencies:
- missing_tool_dependencies.append( td )
- tool_dependencies = None
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- repository=tool_shed_repository,
- datatypes=datatypes,
- invalid_tools=invalid_tools,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies,
- tool_dependencies=missing_tool_dependencies,
- valid_tools=valid_tools,
- workflows=workflows )
- else:
- containers_dict = dict( datatypes=None,
- invalid_tools=None,
- readme_files_dict=None,
- repository_dependencies=None,
- tool_dependencies=None,
- valid_tools=None,
- workflows=None )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, tool_shed_repository, reinstalling=True )
# Handle repository dependencies check box.
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
@@ -1656,7 +1598,7 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository, reinstalling=False )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
diff -r 5e60f799a8687d92fb41dd8a764d434623837772 -r b268bc0bbc63f525a981547d06c20911f553f1b2 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -214,6 +214,8 @@
folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
else:
folder_label = "%s<i> - this repository requires installation of these additional repositories</i>" % folder_label
+ if trans.webapp.name == 'galaxy':
+ col_span_str = 'colspan="4"'
elif folder.label == 'Valid tools':
col_span_str = 'colspan="3"'
if folder.description:
@@ -252,8 +254,9 @@
%for readme in folder.readme_files:
${render_readme( readme, pad, my_row, row_counter )}
%endfor
- %for repository_dependency in folder.repository_dependencies:
- ${render_repository_dependency( repository_dependency, pad, my_row, row_counter )}
+ %for index, repository_dependency in enumerate( folder.repository_dependencies ):
+ <% row_is_header = index == 0 %>
+ ${render_repository_dependency( repository_dependency, pad, my_row, row_counter, row_is_header )}
%endfor
%for index, tool_dependency in enumerate( folder.tool_dependencies ):
<% row_is_header = index == 0 %>
@@ -349,21 +352,60 @@
%></%def>
-<%def name="render_repository_dependency( repository_dependency, pad, parent, row_counter )">
+<%def name="render_repository_dependency( repository_dependency, pad, parent, row_counter, row_is_header=False )"><%
encoded_id = trans.security.encode_id( repository_dependency.id )
+ if trans.webapp.name == 'galaxy':
+ if repository_dependency.tool_shed_repository_id:
+ encoded_required_repository_id = trans.security.encode_id( repository_dependency.tool_shed_repository_id )
+ else:
+ encoded_required_repository_id = None
+ if repository_dependency.installation_status:
+ installation_status = str( repository_dependency.installation_status )
+ else:
+ installation_status = None
repository_name = str( repository_dependency.repository_name )
changeset_revision = str( repository_dependency.changeset_revision )
repository_owner = str( repository_dependency.repository_owner )
+
+ if trans.webapp.name == 'galaxy':
+ if row_is_header:
+ cell_type = 'th'
+ else:
+ cell_type = 'td'
+ else:
+ cell_type = 'td'
%><tr class="datasetRow"
%if parent is not None:
parent="${parent}"
%endif
id="libraryItem-${encoded_id}">
- ##<td style="padding-left: ${pad+20}px;">${repository_dependency.toolshed | h}</td>
- <td style="padding-left: ${pad+20}px;">Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b></td>
+ %if trans.webapp.name == 'galaxy':
+ <${cell_type} style="padding-left: ${pad+20}px;">
+ %if row_is_header:
+ ${repository_name | h}
+ %elif encoded_required_repository_id:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=encoded_required_repository_id )}">${repository_name | h}</a>
+ %else:
+ ${repository_name | h}
+ %endif
+ </${cell_type}>
+ <${cell_type}>
+ ${changeset_revision | h}
+ </${cell_type}>
+ <${cell_type}>
+ ${repository_owner | h}
+ </${cell_type}>
+ <${cell_type}>
+ ${installation_status}
+ </${cell_type}>
+ %else:
+ <td style="padding-left: ${pad+20}px;">
+ Repository <b>${repository_name | h}</b> revision <b>${changeset_revision | h}</b> owned by <b>${repository_owner | h}</b>
+ </td>
+ %endif
</tr><%
my_row = row_counter.count
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8990b28bb0e7/
changeset: 8990b28bb0e7
user: inithello
date: 2012-12-21 16:32:45
summary: Fix handling of repository tools and tool dependencies. Fix issue with functional tests altering the integrated tool panel.
affected #: 2 files
diff -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -75,8 +75,9 @@
# Replace the old list of in-memory config_elems with the new list for this shed_tool_conf_dict.
shed_tool_conf_dict[ 'config_elems' ] = config_elems
app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
- # Write the current in-memory version of the integrated_tool_panel.xml file to disk.
- app.toolbox.write_integrated_tool_panel_config_file()
+ if app.config.update_integrated_tool_panel:
+ # Write the current in-memory version of the integrated_tool_panel.xml file to disk.
+ app.toolbox.write_integrated_tool_panel_config_file()
app.toolbox_search = ToolBoxSearch( app.toolbox )
def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
"""
@@ -1055,7 +1056,7 @@
shed_tool_conf_dict[ 'config_elems' ] = config_elems
trans.app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox )
- if uninstall:
+ if uninstall and trans.app.config.update_integrated_tool_panel:
# Write the current in-memory version of the integrated_tool_panel.xml file to disk.
trans.app.toolbox.write_integrated_tool_panel_config_file()
def remove_tool_dependency( trans, tool_dependency ):
diff -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1088,14 +1088,16 @@
if ( not includes_tools and not includes_repository_dependencies ) or \
( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
- if includes_tool_dependencies:
- install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
+ if includes_tools:
shed_tool_conf = kwd[ 'shed_tool_conf' ]
else:
- install_tool_dependencies = False
# If installing a repository that includes no tools, get the relative tool_path from the file to which the migrated_tools_config
# setting points.
shed_tool_conf = trans.app.config.migrated_tools_config
+ if includes_tool_dependencies:
+ install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
+ else:
+ install_tool_dependencies = False
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts, message = \
shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False )
https://bitbucket.org/galaxy/galaxy-central/changeset/5e60f799a868/
changeset: 5e60f799a868
user: inithello
date: 2012-12-21 16:33:18
summary: Make functional tests explicitly specify which shed tool config to use.
affected #: 2 files
diff -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 -r 5e60f799a8687d92fb41dd8a764d434623837772 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -28,6 +28,7 @@
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
+ self.shed_tool_conf = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF' )
# TODO: Figure out a way to alter these attributes during tests.
self.galaxy_tool_dependency_dir = None # os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
@@ -431,6 +432,8 @@
checkbox.selected = True
else:
checkbox.selected = False
+ if 'shed_tool_conf' not in kwd:
+ kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.initiate_installation_process()
self.wait_for_repository_installation( repository, changeset_revision )
diff -r 8990b28bb0e77dbf06a83e47e0f76045c28968f3 -r 5e60f799a8687d92fb41dd8a764d434623837772 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -256,6 +256,7 @@
shed_tool_conf_template_parser = string.Template( shed_tool_conf_xml_template )
shed_tool_conf_xml = shed_tool_conf_template_parser.safe_substitute( shed_tool_path=galaxy_shed_tool_path )
file( galaxy_shed_tool_conf_file, 'w' ).write( shed_tool_conf_xml )
+ os.environ[ 'GALAXY_TEST_SHED_TOOL_CONF' ] = galaxy_shed_tool_conf_file
# ---- Build Galaxy Application --------------------------------------------------
galaxy_global_conf = { '__file__' : 'universe_wsgi.ini.sample' }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for setting tool versions for tools contained in tool shed repositories installed into a Galaxy instance.
by Bitbucket 21 Dec '12
by Bitbucket 21 Dec '12
21 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5c9971aba1b4/
changeset: 5c9971aba1b4
user: greg
date: 2012-12-21 15:00:34
summary: Fix for setting tool versions for tools contained in tool shed repositories installed into a Galaxy instance.
affected #: 2 files
diff -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -852,6 +852,60 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
+def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository ):
+ """
+ Retrieve necessary information from the received repository's metadata to populate the containers_dict for display. This methos is called only
+ from Galaxy and not the tool shed.
+ """
+ metadata = repository.metadata
+ if metadata:
+ datatypes = metadata.get( 'datatypes', None )
+ invalid_tools = metadata.get( 'invalid_tools', None )
+ if repository.has_readme_files:
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
+ else:
+ readme_files_dict = None
+ repository_dependencies = metadata.get( 'repository_dependencies', None )
+ repository_dependencies_dict_for_display = {}
+ if repository_dependencies:
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
+ # shed repository metadata.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ rd_tups_for_display = []
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ repository_dependencies_dict_for_display[ 'root_key' ] = root_key
+ repository_dependencies_dict_for_display[ root_key ] = rd_tups
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ all_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies, missing_tool_dependencies = get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ valid_tools = metadata.get( 'tools', None )
+ workflows = metadata.get( 'workflows', None )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ repository=repository,
+ datatypes=datatypes,
+ invalid_tools=invalid_tools,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies_dict_for_display,
+ tool_dependencies=tool_dependencies,
+ valid_tools=valid_tools,
+ workflows=workflows )
+ else:
+ containers_dict = dict( datatypes=None,
+ invalid_tools=None,
+ readme_files_dict=None,
+ repository_dependencies=None,
+ tool_dependencies=None,
+ valid_tools=None,
+ workflows=None )
+ return containers_dict
def pull_repository( repo, repository_clone_url, ctx_rev ):
"""Pull changes from a remote repository to a local one."""
commands.pull( suc.get_configured_ui(), repo, source=repository_clone_url, rev=[ ctx_rev ] )
diff -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 -r 5c9971aba1b4f4062e27dcbdd9262e3e81c88ddf lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -877,45 +877,7 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
- metadata = repository.metadata
- datatypes = metadata.get( 'datatypes', None )
- invalid_tools = metadata.get( 'invalid_tools', None )
- if repository.has_readme_files:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
- else:
- readme_files_dict = None
- repository_dependencies = metadata.get( 'repository_dependencies', None )
- repository_dependencies_dict_for_display = {}
- if repository_dependencies:
- # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
- # shed repository metadata.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision )
- rd_tups_for_display = []
- rd_tups = repository_dependencies[ 'repository_dependencies' ]
- repository_dependencies_dict_for_display[ 'root_key' ] = root_key
- repository_dependencies_dict_for_display[ root_key ] = rd_tups
- repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
- all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
- valid_tools = metadata.get( 'tools', None )
- workflows = metadata.get( 'workflows', None )
- containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
- toolshed_base_url=tool_shed_url,
- repository_name=repository.name,
- repository_owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- repository=repository,
- datatypes=datatypes,
- invalid_tools=invalid_tools,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=repository_dependencies_dict_for_display,
- tool_dependencies=tool_dependencies,
- valid_tools=valid_tools,
- workflows=workflows )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -1668,7 +1630,10 @@
@web.expose
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
- # Get the tool_versions from the tool shed for each tool in the installed change set.
+ """
+ Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed repository and update the
+ metadata for the repository's revision in the Galaxy database.
+ """
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
@@ -1689,10 +1654,12 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ containers_dict = shed_util.populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
repo_files_dir=repo_files_dir,
+ containers_dict=containers_dict,
message=message,
status=status )
@web.json
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for administering tool dependencies associated with tool shed repositories installe dinto a Galaxy instance.
by Bitbucket 20 Dec '12
by Bitbucket 20 Dec '12
20 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0a3e4bc2b5a5/
changeset: 0a3e4bc2b5a5
user: greg
date: 2012-12-21 02:48:11
summary: Fixes for administering tool dependencies associated with tool shed repositories installe dinto a Galaxy instance.
affected #: 5 files
diff -r c159eafdf9d3e97e12b7c9db53509f13842c5b96 -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -499,6 +499,8 @@
type = td_info_dict[ 'type' ]
tool_dependency = get_tool_dependency_by_name_type_repository( trans, repository, name, type )
if tool_dependency:
+ td_info_dict[ 'repository_id' ] = repository.id
+ td_info_dict[ 'tool_dependency_id' ] = tool_dependency.id
td_info_dict[ 'status' ] = str( tool_dependency.status )
val[ index ] = td_info_dict
if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
@@ -510,12 +512,14 @@
version = val[ 'version' ]
type = val[ 'type' ]
tool_dependency = get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type )
- val[ 'status' ] = str( tool_dependency.status )
- if tool_dependency:
- if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
- tool_dependencies[ td_key ] = val
- else:
- missing_tool_dependencies[ td_key ] = val
+ if tool_dependency:
+ val[ 'repository_id' ] = repository.id
+ val[ 'tool_dependency_id' ] = tool_dependency.id
+ val[ 'status' ] = str( tool_dependency.status )
+ if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
+ tool_dependencies[ td_key ] = val
+ else:
+ missing_tool_dependencies[ td_key ] = val
else:
tool_dependencies = None
missing_tool_dependencies = None
diff -r c159eafdf9d3e97e12b7c9db53509f13842c5b96 -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -135,8 +135,8 @@
description = 'click the name to browse the dependency installation directory'
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
tool_dependencies,
+ label='Installed tool dependencies',
for_galaxy=True,
- repository_id=repository_id ,
description=description,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
@@ -148,7 +148,6 @@
missing_tool_dependencies,
label='Missing tool dependencies',
for_galaxy=True,
- repository_id=repository_id,
description=description,
display_status=True )
containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
@@ -220,7 +219,6 @@
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id,
tool_dependencies,
for_galaxy=False,
- repository_id=None,
description=None,
display_status=False )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
@@ -837,7 +835,7 @@
env_name = env_elem.get( 'name', None )
if env_name:
requirements_dict [ 'name' ] = env_name
- requirements_dict [ 'type' ] = 'environment variable'
+ requirements_dict [ 'type' ] = 'set_environment'
if requirements_dict:
if 'set_environment' in tool_dependencies_dict:
tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
diff -r c159eafdf9d3e97e12b7c9db53509f13842c5b96 -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -98,7 +98,8 @@
class ToolDependency( object ):
"""Tool dependency object"""
- def __init__( self, id=None, name=None, version=None, type=None, install_dir=None, readme=None, installation_status=None, repository_id=None, ):
+ def __init__( self, id=None, name=None, version=None, type=None, install_dir=None, readme=None, installation_status=None, repository_id=None,
+ tool_dependency_id=None ):
self.id = id
self.name = name
self.version = version
@@ -107,6 +108,7 @@
self.readme = readme
self.installation_status = installation_status
self.repository_id = repository_id
+ self.tool_dependency_id = tool_dependency_id
class Workflow( object ):
"""Workflow object"""
@@ -279,8 +281,7 @@
else:
tools_root_folder = None
return folder_id, tools_root_folder
-def build_tool_dependencies_folder( folder_id, tool_dependencies, label='Installed tool dependencies', for_galaxy=False, repository_id=None, description=None,
- display_status=False ):
+def build_tool_dependencies_folder( folder_id, tool_dependencies, label='Tool dependencies', for_galaxy=False, description=None, display_status=False ):
"""Return a folder hierarchy containing tool dependencies."""
if tool_dependencies:
tool_dependency_id = 0
@@ -298,8 +299,7 @@
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
version='Version',
- type='Type',
- repository_id=repository_id )
+ type='Type' )
if display_status:
tool_dependency.installation_status = 'Status'
else:
@@ -308,8 +308,7 @@
tool_dependency = ToolDependency( id=tool_dependency_id,
name='Name',
version='Version',
- type='Type',
- repository_id=repository_id )
+ type='Type' )
if display_status:
tool_dependency.installation_status = 'Status'
folder.tool_dependencies.append( tool_dependency )
@@ -317,25 +316,30 @@
tool_dependency_id += 1
if dependency_key == 'set_environment':
for set_environment_dict in requirements_dict:
- name = set_environment_dict[ 'name' ]
+ name = set_environment_dict.get( 'name', None )
type = set_environment_dict[ 'type' ]
+ repository_id = set_environment_dict.get( 'repository_id', None )
+ td_id = set_environment_dict.get( 'tool_dependency_id', None )
if display_status:
- installation_status = set_environment_dict[ 'status' ]
+ installation_status = set_environment_dict.get( 'status', None )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
name=name,
type=type,
installation_status=installation_status,
- repository_id=repository_id )
+ repository_id=repository_id,
+ tool_dependency_id=td_id )
folder.tool_dependencies.append( tool_dependency )
else:
name = requirements_dict[ 'name' ]
version = requirements_dict[ 'version' ]
type = requirements_dict[ 'type' ]
install_dir = requirements_dict.get( 'install_dir', None )
+ repository_id = requirements_dict.get( 'repository_id', None )
+ td_id = requirements_dict.get( 'tool_dependency_id', None )
if display_status:
- installation_status = requirements_dict[ 'status' ]
+ installation_status = requirements_dict.get( 'status', None )
else:
installation_status = None
tool_dependency = ToolDependency( id=tool_dependency_id,
@@ -344,7 +348,8 @@
type=type,
install_dir=install_dir,
installation_status=installation_status,
- repository_id=repository_id )
+ repository_id=repository_id,
+ tool_dependency_id=td_id )
folder.tool_dependencies.append( tool_dependency )
else:
tool_dependencies_root_folder = None
diff -r c159eafdf9d3e97e12b7c9db53509f13842c5b96 -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -430,17 +430,15 @@
<${cell_type} style="padding-left: ${pad+20}px;">
%if row_is_header:
${tool_dependency.name | h}
- %elif tool_dependency.repository_id:
- %if not tool_dependency.installation_status:
- ## tool_dependency.installation_status will be None if the status value in the database is 'Installed'.
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
- ${tool_dependency.name | h}
- </a>
- %else:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.id ) )}">
- ${tool_dependency.name}
- </a>
- %endif
+ %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.repository_id and not tool_dependency.installation_status:
+ ## tool_dependency.installation_status will be None if the status value in the database is 'Installed'.
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.tool_dependency_id ), repository_id=trans.security.encode_id( tool_dependency.repository_id ) )}">
+ ${tool_dependency.name | h}
+ </a>
+ %elif trans.webapp.name == 'galaxy' and tool_dependency.tool_dependency_id and tool_dependency.installation_status:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.tool_dependency_id ) )}">
+ ${tool_dependency.name}
+ </a>
%else:
${tool_dependency.name | h}
%endif
diff -r c159eafdf9d3e97e12b7c9db53509f13842c5b96 -r 0a3e4bc2b5a510820b8aba73b78433db810a3481 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -62,9 +62,7 @@
self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0010 Repository With Tool Dependencies' ] )
category = test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
self.browse_category( category, strings_displayed=[ 'freebayes_0010' ] )
- self.preview_repository_in_tool_shed( 'freebayes_0010', common.test_user_1_name, strings_displayed=[ 'freebayes_0010',
- 'Valid tools',
- 'Installed tool dependencies' ] )
+ self.preview_repository_in_tool_shed( 'freebayes_0010', common.test_user_1_name, strings_displayed=[ 'freebayes_0010', 'Valid tools', 'Tool dependencies' ] )
def test_0015_install_freebayes_repository( self ):
'''Install the freebayes repository without installing tool dependencies.'''
strings_displayed=[ 'set your tool_dependency_dir', 'can be automatically installed', 'Set the tool_dependency_dir' ]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0