galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
June 2012
- 1 participants
- 98 discussions
commit/galaxy-central: Scott McManus: PBS runner now writes and reads an exit code for each command
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6fe91b7bfe6e/
changeset: 6fe91b7bfe6e
user: Scott McManus
date: 2012-06-20 19:02:16
summary: PBS runner now writes and reads an exit code for each command
affected #: 1 file
diff -r 663e03e40c86ffa0766dd6638ea7c0886632117d -r 6fe91b7bfe6e4bdcf62299ae88a6206fcb50d3f1 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -33,6 +33,8 @@
__all__ = [ 'PBSJobRunner' ]
+# The last two lines execute the command and then retrieve the command's
+# exit code ($?) and write it to a file.
pbs_template = """#!/bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
@@ -45,8 +47,11 @@
%s
cd %s
%s
+echo $? > %s
"""
+# The last two lines execute the command and then retrieve the command's
+# exit code ($?) and write it to a file.
pbs_symlink_template = """#!/bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
@@ -65,6 +70,7 @@
%s
cd %s
%s
+echo $? > %s
"""
# From pbs' job.h
@@ -93,6 +99,7 @@
self.job_file = None
self.ofile = None
self.efile = None
+ self.ecfile = None
self.runner_url = None
self.check_count = 0
self.stop_job = False
@@ -233,6 +240,7 @@
# define job attributes
ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+ ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
output_fnames = job_wrapper.get_output_fnames()
@@ -273,12 +281,15 @@
self.app.config.pbs_stage_path,
job_wrapper.get_env_setup_clause(),
exec_dir,
- command_line )
+ command_line,
+ ecfile )
+
else:
script = pbs_template % ( job_wrapper.galaxy_lib_dir,
job_wrapper.get_env_setup_clause(),
exec_dir,
- command_line )
+ command_line,
+ ecfile )
job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
fh = file(job_file, "w")
fh.write(script)
@@ -289,7 +300,7 @@
log.debug( "Job %s deleted by user before it entered the PBS queue" % job_wrapper.job_id )
pbs.pbs_disconnect(c)
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
+ self.cleanup( ( ofile, efile, ecfile, job_file ) )
job_wrapper.cleanup()
return
@@ -321,6 +332,7 @@
pbs_job_state.job_id = job_id
pbs_job_state.ofile = ofile
pbs_job_state.efile = efile
+ pbs_job_state.ecfile = ecfile
pbs_job_state.job_file = job_file
pbs_job_state.old_state = 'N'
pbs_job_state.running = False
@@ -510,27 +522,35 @@
"""
ofile = pbs_job_state.ofile
efile = pbs_job_state.efile
+ ecfile = pbs_job_state.ecfile
job_file = pbs_job_state.job_file
# collect the output
try:
ofh = file(ofile, "r")
efh = file(efile, "r")
+ ecfh = file(ecfile, "r")
stdout = ofh.read( 32768 )
stderr = efh.read( 32768 )
+ # This should be an 8-bit exit code, but read ahead anyway:
+ exit_code = ecfh.read(32)
except:
stdout = ''
stderr = 'Job output not returned by PBS: the output datasets were deleted while the job was running, the job was manually dequeued or there was a cluster error.'
+ # By default, the exit code is 0, which usually indicates success
+ # (although clearly some error happened).
+ exit_code = 0
log.debug(stderr)
+ log.debug( "Job exit code: " + exit_code )
try:
- pbs_job_state.job_wrapper.finish( stdout, stderr )
+ pbs_job_state.job_wrapper.finish( stdout, stderr, exit_code )
except:
log.exception("Job wrapper finish method failed")
pbs_job_state.job_wrapper.fail("Unable to finish job", exception=True)
# clean up the pbs files
if self.app.config.cleanup_job == "always" or ( not stderr and self.app.config.cleanup_job == "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
+ self.cleanup( ( ofile, efile, ecfile, job_file ) )
def fail_job( self, pbs_job_state ):
"""
@@ -594,6 +614,7 @@
pbs_job_state = PBSJobState()
pbs_job_state.ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job.id)
+ pbs_job_state.ecfile = "%s/%s.ec" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.job_file = "%s/%s.sh" % (self.app.config.cluster_files_directory, job.id)
pbs_job_state.job_id = str( job.job_runner_external_id )
pbs_job_state.runner_url = job_wrapper.get_job_runner()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Remove debugging exception from 06249bb8e6c9.
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/663e03e40c86/
changeset: 663e03e40c86
user: dannon
date: 2012-06-20 09:39:05
summary: Remove debugging exception from 06249bb8e6c9.
affected #: 1 file
diff -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 -r 663e03e40c86ffa0766dd6638ea7c0886632117d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -284,7 +284,6 @@
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
- raise Exception
""" Old display method, for transition """
#Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Scott McManus: Adding in exit code management for local changes; I'm holding off on the PBS and generic DRMAA runners until they're tested.
by Bitbucket 20 Jun '12
by Bitbucket 20 Jun '12
20 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/06249bb8e6c9/
changeset: 06249bb8e6c9
user: Scott McManus
date: 2012-06-20 08:14:55
summary: Adding in exit code management for local changes; I'm holding off on the PBS and generic DRMAA runners until they're tested.
affected #: 4 files
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -284,6 +284,7 @@
return open( dataset.file_name )
def display_data(self, trans, data, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
+ raise Exception
""" Old display method, for transition """
#Relocate all composite datatype display to a common location.
composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -285,12 +285,14 @@
self.sa_session.add( job )
self.sa_session.flush()
- def finish( self, stdout, stderr ):
+ def finish( self, stdout, stderr, tool_exit_code=0 ):
"""
Called to indicate that the associated command has been run. Updates
the output datasets based on stderr and stdout from the command, and
the contents of the output files.
"""
+ # TODO: Eliminate debugging code after testing all runners
+ log.debug( "JobWrapper.finish: exit code:" + str(tool_exit_code) )
# default post job setup
self.sa_session.expunge_all()
job = self.get_job()
@@ -317,17 +319,15 @@
# that range, then apply the error level and add in a message.
# If we've reached a fatal error rule, then stop.
max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
- for exit_code in self.tool.stdio_exit_codes:
- # TODO: Fetch the exit code from the .rc file:
- tool_exit_code = 0
- if ( tool_exit_code >= exit_code.range_start and
- tool_exit_code <= exit_code.range_end ):
- if None != exit_code.desc:
- err_msg += exit_code.desc
+ for stdio_exit_code in self.tool.stdio_exit_codes:
+ if ( tool_exit_code >= stdio_exit_code.range_start and
+ tool_exit_code <= stdio_exit_code.range_end ):
+ if None != stdio_exit_code.desc:
+ err_msg += stdio_exit_code.desc
# TODO: Find somewhere to stick the err_msg - possibly to
# the source (stderr/stdout), possibly in a new db column.
max_error_level = max( max_error_level,
- exit_code.error_level )
+ stdio_exit_code.error_level )
if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
break
# If there is a regular expression for scanning stdout/stderr,
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -368,6 +368,7 @@
start_job_runners.append("tasks")
for name in start_job_runners:
self._load_plugin( name )
+ log.debug( "Job runners: " + ':'.join( start_job_runners ) )
def _load_plugin( self, name ):
module_name = 'galaxy.jobs.runners.' + name
@@ -397,6 +398,7 @@
def put( self, job_wrapper ):
try:
runner_name = self.__get_runner_name( job_wrapper )
+ log.debug( "Runner_name: " + runner_name )
if self.app.config.use_tasked_jobs and job_wrapper.tool.parallelism is not None and isinstance(job_wrapper, TaskWrapper):
#DBTODO Refactor
log.debug( "dispatching task %s, of job %d, to %s runner" %( job_wrapper.task_id, job_wrapper.job_id, runner_name ) )
diff -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 -r 06249bb8e6c9be1e2f94fbd860f123f4ae51a356 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -54,6 +54,7 @@
def run_job( self, job_wrapper ):
job_wrapper.set_runner( 'local:///', None )
stderr = stdout = command_line = ''
+ exit_code = 0
# Prepare the job to run
try:
job_wrapper.prepare()
@@ -99,7 +100,11 @@
if sleep_time < 8:
# So we don't stat every second
sleep_time *= 2
- proc.wait() # reap
+ # Reap the process and get the exit code. The exit code should
+ # only be None if the process isn't finished, but check anyway.
+ exit_code = proc.wait() # reap
+ if None == exit_code:
+ exit_code = 0
stdout_file.seek( 0 )
stderr_file.seek( 0 )
stdout = stdout_file.read( 32768 )
@@ -128,9 +133,9 @@
external_metadata_proc.wait()
log.debug( 'execution of external set_meta for job %d finished' % job_wrapper.job_id )
- # Finish the job
+ # Finish the job!
try:
- job_wrapper.finish( stdout, stderr )
+ job_wrapper.finish( stdout, stderr, exit_code )
except:
log.exception("Job wrapper finish method failed")
job_wrapper.fail("Unable to finish job", exception=True)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a8d96b0464c8/
changeset: a8d96b0464c8
user: fubar
date: 2012-06-20 02:45:10
summary: Removed hard coded sizes of 40 characters from templates/form.mako to allow parameter size= settings to work correctly
affected #: 1 file
diff -r 8d6138fa2251566b5d17d64e9f56f129d9dc624a -r a8d96b0464c85bd7996e4c9764f769c5be31d682 templates/form.mako
--- a/templates/form.mako
+++ b/templates/form.mako
@@ -81,7 +81,7 @@
%endif
<div class="form-row-input">
%if input.type == 'textarea':
- <textarea name="${input.name}" cols="40">${input.value}</textarea>
+ <textarea name="${input.name}">${input.value}</textarea>
%elif input.type == 'select':
<select name="${input.name}">
%for (name, value) in input.options:
@@ -89,7 +89,7 @@
%endfor
</select>
%else:
- <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+ <input type="${input.type}" name="${input.name}" value="${input.value}">
%endif
</div>
%if input.error:
@@ -107,4 +107,4 @@
</form></div></div>
-</%def>
\ No newline at end of file
+</%def>
https://bitbucket.org/galaxy/galaxy-central/changeset/a1276b504934/
changeset: a1276b504934
user: fubar
date: 2012-06-20 02:58:54
summary: Needed a base.css change also to fix the restriction on tool form parameter widths
It will be interesting to see if anyone notices this....
affected #: 1 file
diff -r a8d96b0464c85bd7996e4c9764f769c5be31d682 -r a1276b5049345152c02fc1cffff341de4904f164 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -569,14 +569,14 @@
div.form-row-error{background:#ffcccc;}
div.form-row label{font-weight:bold;display:block;margin-bottom:.2em;}
div.form-row label.inline{display:inline;}
-div.form-row-input{width:300px;float:left;}
+div.form-row-input{width:90%;float:left;}
div.form-row-input label{font-weight:normal;display:inline;}
div.form-row-error-message{width:300px;float:left;color:red;font-weight:bold;padding:3px 0;}
.form-row .help,.toolParamHelp{color:#666;}.form-row .help a,.toolParamHelp a{color:#666;}
.form-row.form-actions{background:whiteSmoke;border-top:solid #ddd 1px;padding-top:10px;padding-bottom:10px;margin-top:5px;}
select{padding:2px;font-size:12px;line-height:16px;}
select,input,textarea{font:inherit;}
-select,textarea,input[type="text"],input[type="file"],input[type="password"]{max-width:300px;}
+select,textarea,input[type="text"],input[type="file"],input[type="password"]{max-width:90%;}
textarea,input[type="text"],input[type="password"]{font-size:12px;line-height:16px;border:1px solid #999999;padding:3px;}
.search-query{display:inline-block;padding:4px;font-size:12px;line-height:16px;color:#555555;border:1px solid #999999;padding-left:14px !important;padding-right:14px !important;margin-bottom:0;-webkit-border-radius:14px;-moz-border-radius:14px;border-radius:14px;max-width:auto;}
.search-query:focus{border-color:rgba(24, 132, 218, 0.8);-webkit-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);-moz-box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);box-shadow:inset 0 1px 1px rgba(0, 0, 0, 0.075),0 0 8px rgba(82, 168, 236, 0.6);outline:0;outline:thin dotted \9;}
@@ -724,7 +724,7 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
-#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;display:none;background:url(largespinner.gif);}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
#content_table td.stringalign{text-align:left;}
.toolMenuAndView .toolForm{float:left;background-color:white;margin:10px;}
https://bitbucket.org/galaxy/galaxy-central/changeset/f1c5d6639f2e/
changeset: f1c5d6639f2e
user: fubar
date: 2012-06-20 03:02:58
summary: Can't be too careful - reverted base.css loading_indicator to tip version
affected #: 1 file
diff -r a1276b5049345152c02fc1cffff341de4904f164 -r f1c5d6639f2efcf0eec530d1901c9816d65b32b0 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -724,7 +724,7 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
-#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;display:none;background:url(largespinner.gif);}
#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
#content_table td.stringalign{text-align:left;}
.toolMenuAndView .toolForm{float:left;background-color:white;margin:10px;}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8d6138fa2251/
changeset: 8d6138fa2251
user: dannon
date: 2012-06-19 22:42:25
summary: S3 object store now functional.
Whitespace cleanup.
affected #: 1 file
diff -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 -r 8d6138fa2251566b5d17d64e9f56f129d9dc624a lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -40,20 +40,20 @@
def __init__(self):
self.running = True
self.extra_dirs = {}
-
+
def shutdown(self):
self.running = False
-
+
def exists(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Returns True if the object identified by `obj` exists in this file
store, False otherwise.
-
+
FIELD DESCRIPTIONS (these apply to all the methods in this class):
:type obj: object
:param obj: A Galaxy object with an assigned database ID accessible via
the .id attribute.
-
+
:type base_dir: string
:param base_dir: A key in self.extra_dirs corresponding to the base
directory in which this object should be created, or
@@ -64,19 +64,19 @@
identified by `obj` should be located, not the dataset
itself. This option applies to `extra_dir` argument as
well.
-
+
:type extra_dir: string
:param extra_dir: Append `extra_dir` to the directory structure where
the dataset identified by `obj` should be located.
(e.g., 000/extra_dir/obj.id)
-
+
:type extra_dir_at_root: bool
:param extra_dir_at_root: Applicable only if `extra_dir` is set.
If True, the `extra_dir` argument is placed at
root of the created directory structure rather
than at the end (e.g., extra_dir/000/obj.id
vs. 000/extra_dir/obj.id)
-
+
:type alt_name: string
:param alt_name: Use this name as the alternative name for the created
dataset rather than the default.
@@ -84,10 +84,10 @@
raise NotImplementedError()
def file_ready(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
- """ A helper method that checks if a file corresponding to a dataset
+ """ A helper method that checks if a file corresponding to a dataset
is ready and available to be used. Return True if so, False otherwise."""
return True
-
+
def create(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Mark the object identified by `obj` as existing in the store, but with
@@ -104,7 +104,7 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def size(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Return size of the object identified by `obj`.
@@ -112,13 +112,13 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def delete(self, obj, entire_dir=False, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Deletes the object identified by `obj`.
See `exists` method for the description of other fields.
:type entire_dir: bool
- :param entire_dir: If True, delete the entire directory pointed to by
+ :param entire_dir: If True, delete the entire directory pointed to by
extra_dir. For safety reasons, this option applies
only for and in conjunction with the extra_dir option.
"""
@@ -130,15 +130,15 @@
object identified uniquely by `obj`.
If the object does not exist raises `ObjectNotFound`.
See `exists` method for the description of other fields.
-
+
:type start: int
:param start: Set the position to start reading the dataset file
-
+
:type count: int
:param count: Read at most `count` bytes from the dataset
"""
raise NotImplementedError()
-
+
def get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
Get the expected filename (including the absolute path) which can be used
@@ -146,7 +146,7 @@
See `exists` method for the description of the fields.
"""
raise NotImplementedError()
-
+
def update_from_file(self, obj, base_dir=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, file_name=None, create=False):
"""
Inform the store that the file associated with the object has been
@@ -154,16 +154,16 @@
of the default.
If the object does not exist raises `ObjectNotFound`.
See `exists` method for the description of other fields.
-
+
:type file_name: string
- :param file_name: Use file pointed to by `file_name` as the source for
+ :param file_name: Use file pointed to by `file_name` as the source for
updating the dataset identified by `obj`
-
+
:type create: bool
:param create: If True and the default dataset does not exist, create it first.
"""
raise NotImplementedError()
-
+
def get_object_url(self, obj, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""
If the store supports direct URL access, return a URL. Otherwise return
@@ -178,7 +178,7 @@
Return the percentage indicating how full the store is
"""
raise NotImplementedError()
-
+
## def get_staging_command( id ):
## """
## Return a shell command that can be prepended to the job script to stage the
@@ -212,21 +212,21 @@
self.extra_dirs['temp'] = config.new_file_path
if extra_dirs is not None:
self.extra_dirs.update( extra_dirs )
-
+
def _get_filename(self, obj, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None):
"""Class method that returns the absolute path for the file corresponding
- to the `obj`.id regardless of whether the file exists.
+ to the `obj`.id regardless of whether the file exists.
"""
path = self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name, old_style=True)
- # For backward compatibility, check the old style root path first; otherwise,
+ # For backward compatibility, check the old style root path first; otherwise,
# construct hashed path
if not os.path.exists(path):
return self._construct_path(obj, base_dir=base_dir, dir_only=dir_only, extra_dir=extra_dir, extra_dir_at_root=extra_dir_at_root, alt_name=alt_name)
-
+
def _construct_path(self, obj, old_style=False, base_dir=None, dir_only=False, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
""" Construct the expected absolute path for accessing the object
identified by `obj`.id.
-
+
:type base_dir: string
:param base_dir: A key in self.extra_dirs corresponding to the base
directory in which this object should be created, or
@@ -237,16 +237,16 @@
identified by `obj` should be located, not the
dataset itself. This option applies to `extra_dir`
argument as well.
-
+
:type extra_dir: string
:param extra_dir: Append the value of this parameter to the expected path
used to access the object identified by `obj`
(e.g., /files/000/<extra_dir>/dataset_10.dat).
-
+
:type alt_name: string
:param alt_name: Use this name as the alternative name for the returned
dataset rather than the default.
-
+
:type old_style: bool
param old_style: This option is used for backward compatibility. If True
the composed directory structure does not include a hash id
@@ -274,7 +274,7 @@
def exists(self, obj, **kwargs):
path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
+ # For backward compatibility, check root path first; otherwise, construct
# and check hashed path
if os.path.exists(path):
return True
@@ -292,12 +292,12 @@
os.makedirs(dir)
# Create the file if it does not exist
if not dir_only:
- open(path, 'w').close()
+ open(path, 'w').close()
util.umask_fix_perms(path, self.config.umask, 0666)
def empty(self, obj, **kwargs):
return os.path.getsize(self.get_filename(obj, **kwargs)) > 0
-
+
def size(self, obj, **kwargs):
if self.exists(obj, **kwargs):
try:
@@ -306,7 +306,7 @@
return 0
else:
return 0
-
+
def delete(self, obj, entire_dir=False, **kwargs):
path = self.get_filename(obj, **kwargs)
extra_dir = kwargs.get('extra_dir', None)
@@ -327,16 +327,16 @@
content = data_file.read(count)
data_file.close()
return content
-
+
def get_filename(self, obj, **kwargs):
path = self._construct_path(obj, old_style=True, **kwargs)
- # For backward compatibility, check root path first; otherwise, construct
+ # For backward compatibility, check root path first; otherwise, construct
# and return hashed path
if os.path.exists(path):
return path
else:
return self._construct_path(obj, **kwargs)
-
+
def update_from_file(self, obj, file_name=None, create=False, **kwargs):
""" `create` parameter is not used in this implementation """
if create:
@@ -345,12 +345,12 @@
try:
shutil.copy(file_name, self.get_filename(obj, **kwargs))
except IOError, ex:
- log.critical('Error copying %s to %s: %s' % (file_name,
+ log.critical('Error copying %s to %s: %s' % (file_name,
self._get_filename(obj, **kwargs), ex))
-
+
def get_object_url(self, obj, **kwargs):
return None
-
+
def get_store_usage_percent(self):
st = os.statvfs(self.file_path)
return (float(st.f_blocks - st.f_bavail)/st.f_blocks) * 100
@@ -361,10 +361,9 @@
Object store that uses a directory for caching files, but defers and writes
back to another object store.
"""
-
+
def __init__(self, path, backend):
super(CachingObjectStore, self).__init__(self, path, backend)
-
class S3ObjectStore(ObjectStore):
@@ -381,16 +380,24 @@
self.s3_conn = S3Connection()
self.bucket = self._get_bucket(self.config.s3_bucket)
self.use_rr = self.config.use_reduced_redundancy
- self.cache_size = self.config.object_store_cache_size * 1073741824 # Convert GBs to bytes
+ self.cache_size = self.config.object_store_cache_size
self.transfer_progress = 0
# Clean cache only if value is set in universe_wsgi.ini
if self.cache_size != -1:
+ # Convert GBs to bytes for comparison
+ self.cache_size = self.cache_size * 1073741824
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.cache_monitor_thread = threading.Thread(target=self.__cache_monitor)
self.cache_monitor_thread.start()
log.info("Cache cleaner manager started")
-
+ # Test if 'axel' is available for parallel download and pull the key into cache
+ try:
+ subprocess.call('axel')
+ self.use_axel = True
+ except OSError:
+ self.use_axel = False
+
def __cache_monitor(self):
time.sleep(2) # Wait for things to load before starting the monitor
while self.running:
@@ -421,19 +428,19 @@
delete_this_much = total_size - cache_limit
self.__clean_cache(file_list, delete_this_much)
self.sleeper.sleep(30) # Test cache size every 30 seconds?
-
+
def __clean_cache(self, file_list, delete_this_much):
""" Keep deleting files from the file_list until the size of the deleted
files is greater than the value in delete_this_much parameter.
-
+
:type file_list: list
:param file_list: List of candidate files that can be deleted. This method
will start deleting files from the beginning of the list so the list
should be sorted accordingly. The list must contains 3-element tuples,
positioned as follows: position 0 holds file last accessed timestamp
- (as time.struct_time), position 1 holds file path, and position 2 has
+ (as time.struct_time), position 1 holds file path, and position 2 has
file size (e.g., (<access time>, /mnt/data/dataset_1.dat), 472394)
-
+
:type delete_this_much: int
:param delete_this_much: Total size of files, in bytes, that should be deleted.
"""
@@ -454,7 +461,7 @@
else:
log.debug("Cache cleaning done. Total space freed: %s" % convert_bytes(deleted_amount))
return
-
+
def _get_bucket(self, bucket_name):
""" Sometimes a handle to a bucket is not established right away so try
it a few times. Raise error is connection is not established. """
@@ -463,13 +470,13 @@
bucket = self.s3_conn.get_bucket(bucket_name)
log.debug("Using S3 object store; got bucket '%s'" % bucket.name)
return bucket
- except S3ResponseError:
+ except S3ResponseError:
log.debug("Could not get bucket '%s', attempt %s/5" % (bucket_name, i+1))
time.sleep(2)
# All the attempts have been exhausted and connection was not established,
# raise error
raise S3ResponseError
-
+
def _fix_permissions(self, rel_path):
""" Set permissions on rel_path"""
for basedir, dirs, files in os.walk(rel_path):
@@ -478,10 +485,10 @@
path = os.path.join(basedir, f)
# Ignore symlinks
if os.path.islink(path):
- continue
+ continue
util.umask_fix_perms( path, self.config.umask, 0666, self.config.gid )
-
- def _construct_path(self, obj, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None):
+
+ def _construct_path(self, obj, dir_only=None, extra_dir=None, extra_dir_at_root=False, alt_name=None, **kwargs):
rel_path = os.path.join(*directory_hash_id(obj.id))
if extra_dir is not None:
if extra_dir_at_root:
@@ -496,10 +503,10 @@
def _get_cache_path(self, rel_path):
return os.path.abspath(os.path.join(self.staging_path, rel_path))
-
+
def _get_transfer_progress(self):
return self.transfer_progress
-
+
def _get_size_in_s3(self, rel_path):
try:
key = self.bucket.get_key(rel_path)
@@ -510,7 +517,7 @@
except Exception, ex:
log.error("Could not get reference to the key object '%s'; returning -1 for key size: %s" % (rel_path, ex))
return -1
-
+
def _key_exists(self, rel_path):
exists = False
try:
@@ -532,15 +539,13 @@
if rel_path[0] == '/':
raise
return exists
-
+
def _in_cache(self, rel_path):
""" Check if the given dataset is in the local cache and return True if so. """
# log.debug("------ Checking cache for rel_path %s" % rel_path)
cache_path = self._get_cache_path(rel_path)
- exists = os.path.exists(cache_path)
- # print "Checking chache for %s; returning %s" % (cache_path, exists)
- return exists
- # EATODO: Part of checking if a file is in cache should be to ensure the
+ return os.path.exists(cache_path)
+ # TODO: Part of checking if a file is in cache should be to ensure the
# size of the cached file matches that on S3. Once the upload tool explicitly
# creates, this check sould be implemented- in the mean time, it's not
# looking likely to be implementable reliably.
@@ -562,11 +567,7 @@
# # print "***3 %s found in cache but not in S3 (in_cache=True)" % cache_path
# exists = True
# else:
- # # print "***4 %s does not exist (in_cache=False)" % cache_path
- # exists = False
- # # print "Checking cache for %s; returning %s" % (cache_path, exists)
- # return exists
- # # return False
+ # return False
def _pull_into_cache(self, rel_path):
# Ensure the cache directory structure exists (e.g., dataset_#_files/)
@@ -577,45 +578,39 @@
ok = self._download(rel_path)
self._fix_permissions(self._get_cache_path(rel_path_dir))
return ok
-
+
def _transfer_cb(self, complete, total):
self.transfer_progress += 10
- # print "Dataset transfer progress: %s" % self.transfer_progress
-
+
def _download(self, rel_path):
try:
log.debug("Pulling key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
key = self.bucket.get_key(rel_path)
- # Test is cache is large enough to hold the new file
- if key.size > self.cache_size:
+ # Test if cache is large enough to hold the new file
+ if self.cache_size > 0 and key.size > self.cache_size:
log.critical("File %s is larger (%s) than the cache size (%s). Cannot download." \
% (rel_path, key.size, self.cache_size))
return False
- # Test if 'axel' is available for parallel download and pull the key into cache
- try:
- ret_code = subprocess.call('axel')
- except OSError:
- ret_code = 127
- if ret_code == 127:
- self.transfer_progress = 0 # Reset transfer progress counter
- key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
- #print "(ssss1) Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
- return True
- else:
+ if self.use_axel:
+ log.debug("Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
ncores = multiprocessing.cpu_count()
url = key.generate_url(7200)
ret_code = subprocess.call("axel -a -n %s '%s'" % (ncores, url))
if ret_code == 0:
- #print "(ssss2) Parallel pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path))
return True
+ else:
+ log.debug("Pulled key '%s' into cache to %s" % (rel_path, self._get_cache_path(rel_path)))
+ self.transfer_progress = 0 # Reset transfer progress counter
+ key.get_contents_to_filename(self._get_cache_path(rel_path), cb=self._transfer_cb, num_cb=10)
+ return True
except S3ResponseError, ex:
log.error("Problem downloading key '%s' from S3 bucket '%s': %s" % (rel_path, self.bucket.name, ex))
return False
-
+
def _push_to_s3(self, rel_path, source_file=None, from_string=None):
- """
- Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
- If `source_file` is provided, push that file instead while still using
+ """
+ Push the file pointed to by `rel_path` to S3 naming the key `rel_path`.
+ If `source_file` is provided, push that file instead while still using
`rel_path` as the key name.
If `from_string` is provided, set contents of the file to the value of
the string
@@ -651,16 +646,16 @@
except S3ResponseError, ex:
log.error("Trouble pushing S3 key '%s' from file '%s': %s" % (rel_path, source_file, ex))
return False
-
+
def file_ready(self, obj, **kwargs):
- """ A helper method that checks if a file corresponding to a dataset
+ """ A helper method that checks if a file corresponding to a dataset
is ready and available to be used. Return True if so, False otherwise."""
rel_path = self._construct_path(obj, **kwargs)
# Make sure the size in cache is available in its entirety
if self._in_cache(rel_path) and os.path.getsize(self._get_cache_path(rel_path)) == self._get_size_in_s3(rel_path):
return True
return False
-
+
def exists(self, obj, **kwargs):
in_cache = in_s3 = False
rel_path = self._construct_path(obj, **kwargs)
@@ -685,10 +680,10 @@
return True
else:
return False
-
+
def create(self, obj, **kwargs):
if not self.exists(obj, **kwargs):
- #print "S3 OS creating a dataset with ID %s" % dataset_id
+ #print "S3 OS creating a dataset with ID %s" % kwargs
# Pull out locally used fields
extra_dir = kwargs.get('extra_dir', None)
extra_dir_at_root = kwargs.get('extra_dir_at_root', False)
@@ -696,7 +691,8 @@
alt_name = kwargs.get('alt_name', None)
# print "---- Processing: %s; %s" % (alt_name, locals())
# Construct hashed path
- rel_path = os.path.join(*directory_hash_id(obj))
+ rel_path = os.path.join(*directory_hash_id(obj.id))
+
# Optionally append extra_dir
if extra_dir is not None:
if extra_dir_at_root:
@@ -717,13 +713,13 @@
rel_path = os.path.join(rel_path, alt_name if alt_name else "dataset_%s.dat" % obj.id)
open(os.path.join(self.staging_path, rel_path), 'w').close()
self._push_to_s3(rel_path, from_string='')
-
+
def empty(self, obj, **kwargs):
if self.exists(obj, **kwargs):
return bool(self.size(obj, **kwargs) > 0)
else:
raise ObjectNotFound()
-
+
def size(self, obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
if self._in_cache(rel_path):
@@ -735,7 +731,7 @@
return self._get_size_in_s3(rel_path)
log.warning("Did not find dataset '%s', returning 0 for size" % rel_path)
return 0
-
+
def delete(self, obj, entire_dir=False, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
extra_dir = kwargs.get('extra_dir', None)
@@ -765,7 +761,7 @@
except OSError, ex:
log.error('%s delete error %s' % (self._get_filename(obj, **kwargs), ex))
return False
-
+
def get_data(self, obj, start=0, count=-1, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
# Check cache first and get file if not there
@@ -779,7 +775,7 @@
content = data_file.read(count)
data_file.close()
return content
-
+
def get_filename(self, obj, **kwargs):
#print "S3 get_filename for dataset: %s" % dataset_id
dir_only = kwargs.get('dir_only', False)
@@ -809,8 +805,8 @@
# return cache_path
raise ObjectNotFound()
# return cache_path # Until the upload tool does not explicitly create the dataset, return expected path
-
- def update_from_file(self, obj, file_name=None, create=False, **kwargs):
+
+ def update_from_file(self, obj, file_name=None, create=False, **kwargs):
if create:
self.create(obj, **kwargs)
if self.exists(obj, **kwargs):
@@ -833,7 +829,7 @@
self._push_to_s3(rel_path, source_file)
else:
raise ObjectNotFound()
-
+
def get_object_url(self, obj, **kwargs):
if self.exists(obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
@@ -854,7 +850,7 @@
first store where the object exists is used, objects are created in a
store selected randomly, but with weighting.
"""
-
+
def __init__(self, config):
super(DistributedObjectStore, self).__init__()
self.distributed_config = config.distributed_object_store_config_file
@@ -1003,7 +999,7 @@
first store where the object exists is used, objects are always created
in the first store.
"""
-
+
def __init__(self, backends=[]):
super(HierarchicalObjectStore, self).__init__()
@@ -1027,7 +1023,7 @@
if bytes is None:
bytes = 0
bytes = float(bytes)
-
+
if bytes >= 1099511627776:
terabytes = bytes / 1099511627776
size = '%.2fTB' % terabytes
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/958d1f8b2caf/
changeset: 958d1f8b2caf
user: Scott McManus
date: 2012-06-19 20:10:08
summary: Merge issues.
affected #: 2 files
diff -r 22eea899376f142134c30abafb6e156a514574dd -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -310,7 +310,7 @@
# Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
if ( len( self.tool.stdio_regexes ) > 0 or
- len( self.tool.exit_codes ) > 0 ):
+ len( self.tool.stdio_exit_codes ) > 0 ):
# We will check the exit code ranges in the order in which
# they were specified. Each exit_code is a ToolStdioExitCode
# that includes an applicable range. If the exit code was in
diff -r 22eea899376f142134c30abafb6e156a514574dd -r 958d1f8b2caf9595d79e66d1eb1705110a1a6784 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -77,6 +77,7 @@
self.job_file = None
self.ofile = None
self.efile = None
+ self.rcfile = None
self.runner_url = None
class DRMAAJobRunner( BaseJobRunner ):
@@ -168,6 +169,7 @@
# define job attributes
ofile = "%s.drmout" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
efile = "%s.drmerr" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
+ rcfile = "%s.drmrc" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
job_name = "g%s_%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id, job_wrapper.user )
job_name = ''.join( map( lambda x: x if x in ( string.letters + string.digits + '_' ) else '_', job_name ) )
@@ -176,6 +178,7 @@
jt.jobName = job_name
jt.outputPath = ":%s" % ofile
jt.errorPath = ":%s" % efile
+ jt.returnCodePath = ":%s" % rcfile
native_spec = self.get_native_spec( runner_url )
if native_spec is not None:
jt.nativeSpecification = native_spec
@@ -228,6 +231,7 @@
drm_job_state.job_id = job_id
drm_job_state.ofile = ofile
drm_job_state.efile = efile
+ drm_job_state.rcfile = rcfile
drm_job_state.job_file = jt.remoteCommand
drm_job_state.old_state = 'new'
drm_job_state.running = False
@@ -312,6 +316,7 @@
"""
ofile = drm_job_state.ofile
efile = drm_job_state.efile
+ rcfile = drm_job_state.rcfile
job_file = drm_job_state.job_file
# collect the output
# wait for the files to appear
@@ -377,6 +382,7 @@
drm_job_state = DRMAAJobState()
drm_job_state.ofile = "%s.drmout" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.efile = "%s.drmerr" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
+ drm_job_state.rcfile = "%s.drmrc" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
drm_job_state.job_file = "%s/galaxy_%s.sh" % (self.app.config.cluster_files_directory, job.id)
drm_job_state.job_id = str( job.job_runner_external_id )
drm_job_state.runner_url = job_wrapper.get_job_runner()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/22eea899376f/
changeset: 22eea899376f
user: Scott McManus
date: 2012-06-19 18:18:11
summary: Fixing vimdiff errors.
affected #: 1 file
diff -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 -r 22eea899376f142134c30abafb6e156a514574dd lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2361,12 +2361,12 @@
installed_tool_dependencies = self.tool_shed_repository.tool_dependencies
else:
installed_tool_dependencies = None
- for requirement in self.requirements:
- # TODO: currently only supporting requirements of type package,
- # need to implement some mechanism for mapping other types
- # back to packages
+ for requirement in self.requirements:
+ # TODO: currently only supporting requirements of type package,
+ # need to implement some mechanism for mapping other types
+ # back to packages
log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- if requirement.type == 'package':
+ if requirement.type == 'package':
script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
version=requirement.version,
type=requirement.type,
@@ -2656,7 +2656,7 @@
param_dict.update( { 'type' : 'number', 'init_value' : input.value,
'html' : urllib.quote( input.get_html( trans ) ),
'min': input.min,
- 'max': input.max
+ 'max': input.max,
'value': input.value
} )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Scott McManus: Added application of regular expressions and exit code. Pulling exit
by Bitbucket 19 Jun '12
by Bitbucket 19 Jun '12
19 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3b2dc0a51d14/
changeset: 3b2dc0a51d14
user: Scott McManus
date: 2012-06-19 17:35:36
summary: Added application of regular expressions and exit code. Pulling exit
codes from the runners will be part of a separate submission.
affected #: 3 files
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -305,10 +305,81 @@
if job.state == job.states.DELETED or job.state == job.states.ERROR:
#ERROR at this point means the job was deleted by an administrator.
return self.fail( job.info )
- if stderr:
- job.state = job.states.ERROR
+
+ err_msg = ""
+ # Check exit codes and match regular expressions against stdout and
+ # stderr if this tool was configured to do so.
+ if ( len( self.tool.stdio_regexes ) > 0 or
+ len( self.tool.exit_codes ) > 0 ):
+ # We will check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a ToolStdioExitCode
+ # that includes an applicable range. If the exit code was in
+ # that range, then apply the error level and add in a message.
+ # If we've reached a fatal error rule, then stop.
+ max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
+ for exit_code in self.tool.stdio_exit_codes:
+ # TODO: Fetch the exit code from the .rc file:
+ tool_exit_code = 0
+ if ( tool_exit_code >= exit_code.range_start and
+ tool_exit_code <= exit_code.range_end ):
+ if None != exit_code.desc:
+ err_msg += exit_code.desc
+ # TODO: Find somewhere to stick the err_msg - possibly to
+ # the source (stderr/stdout), possibly in a new db column.
+ max_error_level = max( max_error_level,
+ exit_code.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
+ if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
+ # We'll examine every regex. Each regex specifies whether
+ # it is to be run on stdout, stderr, or both. (It is
+ # possible for neither stdout nor stderr to be scanned,
+ # but those won't be scanned.) We record the highest
+ # error level, which are currently "warning" and "fatal".
+ # If fatal, then we set the job's state to ERROR.
+ # If warning, then we still set the job's state to OK
+ # but include a message. We'll do this if we haven't seen
+ # a fatal error yet
+ for regex in self.tool.stdio_regexes:
+ # If ( this regex should be matched against stdout )
+ # - Run the regex's match pattern against stdout
+ # - If it matched, then determine the error level.
+ # o If it was fatal, then we're done - break.
+ # Repeat the stdout stuff for stderr.
+ # TODO: Collapse this into a single function.
+ if ( regex.stdout_match ):
+ regex_match = re.search( regex.match, stdout )
+ if ( regex_match ):
+ err_msg += self.regex_err_msg( regex_match, regex )
+ max_error_level = max( max_error_level, regex.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr )
+ if ( regex_match ):
+ err_msg += self.regex_err_msg( regex_match, regex )
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ break
+ # If we encountered a fatal error, then we'll need to set the
+ # job state accordingly. Otherwise the job is ok:
+ if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ job.state = job.states.ERROR
+ else:
+ job.state = job.states.OK
+ # When there are no regular expressions and no exit codes to check,
+ # default to the previous behavior: when there's anything on stderr
+ # the job has an error, and the job is ok otherwise.
else:
- job.state = job.states.OK
+ if stderr:
+ job.state = job.states.ERROR
+ else:
+ job.state = job.states.OK
if self.version_string_cmd:
version_filename = self.get_version_string_path()
if os.path.exists(version_filename):
@@ -330,6 +401,7 @@
return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
+
def in_directory( file, directory ):
# Make both absolute.
directory = os.path.abspath( directory )
@@ -370,7 +442,11 @@
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
- if context['stderr']:
+ # TODO: The context['stderr'] holds stderr's contents. An error
+ # only really occurs if the job also has an error. So check the
+ # job's state:
+ #if context['stderr']:
+ if job.states.ERROR == job.state:
dataset.blurb = "error"
elif dataset.has_data():
# If the tool was expected to set the extension, attempt to retrieve it
@@ -385,7 +461,14 @@
( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \
and self.app.config.retry_metadata_internally ):
dataset.set_meta( overwrite = False )
- elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and not context['stderr']:
+ # TODO: The context['stderr'] used to indicate that there
+ # was an error. Now we must rely on the job's state instead;
+ # that indicates whether the tool relied on stderr to indicate
+ # the state or whether the tool used exit codes and regular
+ # expressions to do so. So we use
+ # job.state == job.states.ERROR to replace this same test.
+ #elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and not context['stderr']:
+ elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != job.state:
dataset._state = model.Dataset.states.FAILED_METADATA
else:
#load metadata from file
@@ -415,7 +498,12 @@
if dataset.ext == 'auto':
dataset.extension = 'txt'
self.sa_session.add( dataset )
- if context['stderr']:
+ # TODO: job.states.ERROR == job.state now replaces checking
+ # stderr for a problem:
+ #if context['stderr']:
+ if job.states.ERROR == job.state:
+ log.debug( "setting dataset state to ERROR" )
+ # TODO: This is where the state is being set to error. Change it!
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
@@ -480,6 +568,29 @@
if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ):
self.cleanup()
+ def regex_err_msg( self, match, regex ):
+ """
+ Return a message about the match on tool output using the given
+ ToolStdioRegex regex object. The regex_match is a MatchObject
+ that will contain the string matched on.
+ """
+ # Get the description for the error level:
+ err_msg = galaxy.tools.StdioErrorLevel.desc( regex.error_level ) + ": "
+ # If there's a description for the regular expression, then use it.
+ # Otherwise, we'll take the first 256 characters of the match.
+ if None != regex.desc:
+ err_msg += regex.desc
+ else:
+ mstart = match.start()
+ mend = match.end()
+ err_msg += "Matched on "
+ # TODO: Move the constant 256 somewhere else besides here.
+ if mend - mstart > 256:
+ err_msg += match.string[ mstart : mstart+256 ] + "..."
+ else:
+ err_msg += match.string[ mstart: mend ]
+ return err_msg
+
def cleanup( self ):
# remove temporary files
try:
diff -r 7c495f835a1d436ad33dff6107784f106cc24980 -r 3b2dc0a51d147c3fc8f0560d8fd3ebefc0792e99 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -37,6 +37,23 @@
log = logging.getLogger( __name__ )
+# These determine stdio-based error levels from matching on regular expressions
+# and exit codes. They are meant to be used comparatively, such as showing
+# that warning < fatal. This is really meant to just be an enum.
+class StdioErrorLevel( object ):
+ NO_ERROR = 0
+ WARNING = 1
+ FATAL = 2
+ MAX = 2
+ descs = {NO_ERROR : 'No error', WARNING : 'Warning', FATAL : 'Fatal error'}
+ @staticmethod
+ def desc( error_level ):
+ err_msg = "Unknown error"
+ if ( error_level > 0 and
+ error_level <= StdioErrorLevel.MAX ):
+ err_msg = StdioErrorLevel.descs[ error_level ]
+ return err_msg
+
class ToolNotFoundException( Exception ):
pass
@@ -1140,6 +1157,12 @@
# a warning and skip to the next.
for exit_code_elem in ( stdio_elem.findall( "exit_code" ) ):
exit_code = ToolStdioExitCode()
+ # Each exit code has an optional description that can be
+ # part of the "desc" or "description" attributes:
+ exit_code.desc = exit_code_elem.get( "desc" )
+ if None == exit_code.desc:
+ exit_code.desc = exit_code_elem.get( "description" )
+ # Parse the error level:
exit_code.error_level = (
self.parse_error_level( exit_code_elem.get( "level" )))
code_range = exit_code_elem.get( "range", "" )
@@ -1155,11 +1178,9 @@
# X:Y - Split on the colon. We do not allow a colon
# without a beginning or end, though we could.
# Also note that whitespace is eliminated.
- # TODO: Turn this into a single match - it will be
- # more efficient
- string.strip( code_range )
+ # TODO: Turn this into a single match - it should be
+ # more efficient.
code_range = re.sub( "\s", "", code_range )
- log.debug( "Code range after sub: %s" % code_range )
code_ranges = re.split( ":", code_range )
if ( len( code_ranges ) == 2 ):
if ( None == code_ranges[0] or '' == code_ranges[0] ):
@@ -1216,6 +1237,12 @@
for regex_elem in ( stdio_elem.findall( "regex" ) ):
# TODO: Fill in ToolStdioRegex
regex = ToolStdioRegex()
+ # Each regex has an optional description that can be
+ # part of the "desc" or "description" attributes:
+ regex.desc = regex_elem.get( "desc" )
+ if None == regex.desc:
+ regex.desc = regex_elem.get( "description" )
+ # Parse the error level
regex.error_level = (
self.parse_error_level( regex_elem.get( "level" ) ) )
regex.match = regex_elem.get( "match", "" )
@@ -1243,9 +1270,9 @@
# and anything to do with "err". If neither stdout nor
# stderr were specified, then raise a warning and scan both.
for src in src_list:
- if re.match( "out", src, re.IGNORECASE ):
+ if re.search( "out", src, re.IGNORECASE ):
regex.stdout_match = True
- if re.match( "err", src, re.IGNORECASE ):
+ if re.search( "err", src, re.IGNORECASE ):
regex.stderr_match = True
if (not regex.stdout_match and not regex.stderr_match):
log.warning( "Unable to determine if tool stream "
@@ -1262,24 +1289,25 @@
trace_msg = repr( traceback.format_tb( trace ) )
log.error( "Traceback: %s" % trace_msg )
+ # TODO: This method doesn't have to be part of the Tool class.
def parse_error_level( self, err_level ):
"""
Return fatal or warning depending on what's in the error level.
This will assume that the error level fatal is returned if it's
- unparsable. (This doesn't have to be part of the Tool class.)
+ unparsable.
"""
# What should the default be? I'm claiming it should be fatal:
# if you went to the trouble to write the rule, then it's
# probably a problem. I think there are easily three substantial
# camps: make it fatal, make it a warning, or, if it's missing,
- # just throw an exception and ignore it.
- return_level = "fatal"
+ # just throw an exception and ignore the exit_code element.
+ return_level = StdioErrorLevel.FATAL
try:
if ( None != err_level ):
if ( re.search( "warning", err_level, re.IGNORECASE ) ):
- return_level = "warning"
+ return_level = StdioErrorLevel.WARNING
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
- return_level = "fatal"
+ return_level = StdioErrorLevel.FATAL
except Exception, e:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
@@ -2333,16 +2361,18 @@
installed_tool_dependencies = self.tool_shed_repository.tool_dependencies
else:
installed_tool_dependencies = None
- for requirement in self.requirements:
- # TODO: currently only supporting requirements of type package,
- # need to implement some mechanism for mapping other types
- # back to packages
+ for requirement in self.requirements:
+ # TODO: currently only supporting requirements of type package,
+ # need to implement some mechanism for mapping other types
+ # back to packages
log.debug( "Building dependency shell command for dependency '%s'", requirement.name )
- if requirement.type == 'package':
+ if requirement.type == 'package':
script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( name=requirement.name,
version=requirement.version,
type=requirement.type,
installed_tool_dependencies=installed_tool_dependencies )
+ if requirement.type == 'package':
+ script_file, base_path, version = self.app.toolbox.dependency_manager.find_dep( requirement.name, requirement.version )
if script_file is None and base_path is None:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
elif script_file is None:
@@ -2617,7 +2647,7 @@
elif isinstance( input, SelectToolParameter ):
param_dict.update( { 'type' : 'select',
'html' : urllib.quote( input.get_html( trans ) ),
- 'options': input.static_options
+ 'options': input.static_options
} )
elif isinstance( input, Conditional ):
# TODO.
@@ -2626,7 +2656,7 @@
param_dict.update( { 'type' : 'number', 'init_value' : input.value,
'html' : urllib.quote( input.get_html( trans ) ),
'min': input.min,
- 'max': input.max,
+ 'max': input.max
'value': input.value
} )
else:
@@ -2798,6 +2828,7 @@
self.stderr_match = False
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
+ self.desc = ""
class ToolStdioExitCode( object ):
"""
@@ -2809,6 +2840,7 @@
self.range_end = float( "inf" )
# TODO: Define a common class or constant for error level:
self.error_level = "fatal"
+ self.desc = ""
class ToolParameterValueWrapper( object ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add the ability to browse, uninstall and reinstall tool dependencies installed with tool shed repositories.
by Bitbucket 18 Jun '12
by Bitbucket 18 Jun '12
18 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7c495f835a1d/
changeset: 7c495f835a1d
user: greg
date: 2012-06-18 18:15:04
summary: Add the ability to browse, uninstall and reinstall tool dependencies installed with tool shed repositories.
affected #: 19 files
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2701,13 +2701,8 @@
return installed_dependencies
@property
def missing_tool_dependencies( self ):
- """Return the repository's tool dependencies that are not currently installed."""
- def add_missing_dependency( missing_dependencies_dict, name, version, type, installed_changeset_revision=None ):
- missing_dependencies_dict[ name ] = dict( version=version,
- type=type,
- installed_changeset_revision=installed_changeset_revision )
- return missing_dependencies_dict
- missing_dependencies = {}
+ """Return the repository's tool dependencies that are not currently installed, and may not ever have been installed."""
+ missing_dependencies = []
# Get the dependency information from the metadata for comparison against the installed tool dependencies.
tool_dependencies = self.metadata.get( 'tool_dependencies', None )
if tool_dependencies:
@@ -2717,25 +2712,27 @@
type = requirements_dict[ 'type' ]
if self.tool_dependencies:
found = False
- for installed_dependency in self.tool_dependencies:
- if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.name==name and tool_dependency.version==version and tool_dependency.type==type:
found = True
- if installed_dependency.uninstalled:
- missing_dependencies = add_missing_dependency( missing_dependencies,
- installed_dependency.name,
- installed_dependency.version,
- installed_dependency.type,
- installed_dependency.installed_changeset_revision )
+ if tool_dependency.uninstalled:
+ missing_dependencies.append( ( tool_dependency.name, tool_dependency.version, tool_dependency.type ) )
break
if not found:
- missing_dependencies = add_missing_dependency( missing_dependencies, name, version, type )
- return missing_dependencies
- return None
+ missing_dependencies.append( ( name, version, type ) )
+ return missing_dependencies
+ @property
+ def uninstalled_tool_dependencies( self ):
+ """Return the repository's tool dependencies that have been uninstalled."""
+ uninstalled_tool_dependencies = []
+ for tool_dependency in self.tool_dependencies:
+ if tool_dependency.uninstalled:
+ uninstalled_tool_dependencies.append( tool_dependency )
+ return uninstalled_tool_dependencies
class ToolDependency( object ):
- def __init__( self, tool_shed_repository_id=None, installed_changeset_revision=None, name=None, version=None, type=None, uninstalled=False ):
+ def __init__( self, tool_shed_repository_id=None, name=None, version=None, type=None, uninstalled=False ):
self.tool_shed_repository_id = tool_shed_repository_id
- self.installed_changeset_revision = installed_changeset_revision
self.name = name
self.version = version
self.type = type
@@ -2746,7 +2743,8 @@
self.version,
self.tool_shed_repository.owner,
self.tool_shed_repository.name,
- self.installed_changeset_revision )
+ self.tool_shed_repository.installed_changeset_revision )
+
class ToolVersion( object ):
def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
self.id = id
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -395,7 +395,6 @@
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
- Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "name", TrimmedString( 255 ) ),
Column( "version", Text ),
Column( "type", TrimmedString( 40 ) ),
@@ -1681,6 +1680,7 @@
backref='tool_shed_repository' ),
tool_dependencies=relation( ToolDependency,
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
+ order_by=ToolDependency.table.c.name,
backref='tool_shed_repository' ) ) )
assign_mapper( context, ToolDependency, ToolDependency.table )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0101_drop_installed_changeset_revision_column.py
@@ -0,0 +1,39 @@
+"""
+Migration script to drop the installed_changeset_revision column from the tool_dependency table.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+import sys, logging
+from galaxy.model.custom_types import *
+from sqlalchemy.exc import *
+import datetime
+now = datetime.datetime.utcnow
+
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
+ except NoSuchTableError:
+ ToolDependency_table = None
+ log.debug( "Failed loading table tool_dependency" )
+ if ToolDependency_table:
+ try:
+ col = ToolDependency_table.c.installed_changeset_revision
+ col.drop()
+ except Exception, e:
+ log.debug( "Dropping column 'installed_changeset_revision' from tool_dependency table failed: %s" % ( str( e ) ) )
+def downgrade():
+ pass
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -120,8 +120,8 @@
if not is_displayed:
is_displayed = True
return is_displayed, tool_sections
- def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, installed_changeset_revision,
- ctx_rev, install_dependencies ):
+ def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description,
+ installed_changeset_revision, ctx_rev, install_dependencies ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -176,7 +176,6 @@
# Install tool dependencies.
status, message = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=installed_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from the InstallManager while installing tool dependencies:'
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -48,7 +48,6 @@
tree = util.parse_xml( tool_panel_config )
root = tree.getroot()
for elem in root:
- missing_tool_dependencies = []
if elem.tag == 'tool':
missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
elif elem.tag == 'section':
@@ -116,6 +115,8 @@
self.datatypes_registry = galaxy.datatypes.registry.Registry()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+ # Tool data tables
+ self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_path, self.config.tool_data_table_config_path )
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,9 +1,9 @@
import os, shutil, tarfile, urllib2
from galaxy.datatypes.checkers import *
-DIRECTORY_BUILD_COMMAND_NAMES = [ 'change_directory' ]
-MOVE_BUILD_COMMAND_NAMES = [ 'move_directory_files', 'move_file' ]
-ALL_BUILD_COMMAND_NAMES = DIRECTORY_BUILD_COMMAND_NAMES + MOVE_BUILD_COMMAND_NAMES
+MISCELLANEOUS_ACTIONS = [ 'change_directory' ]
+MOVE_ACTIONS = [ 'move_directory_files', 'move_file' ]
+ALL_ACTIONS = MISCELLANEOUS_ACTIONS + MOVE_ACTIONS
def extract_tar( file_name, file_path ):
if isgzip( file_name ) or isbz2( file_name ):
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -32,32 +32,20 @@
yield work_dir
if os.path.exists( work_dir ):
local( 'rm -rf %s' % work_dir )
-def handle_post_build_processing( tool_dependency_dir, install_dir, package_name=None ):
- cmd = "echo 'PATH=%s/bin:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( install_dir, install_dir, install_dir )
+def handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=None ):
+ cmd = "echo 'PATH=%s:$PATH; export PATH' > %s/env.sh;chmod +x %s/env.sh" % ( env_dependency_path, install_dir, install_dir )
message = ''
output = local( cmd, capture=True )
log_results( cmd, output, os.path.join( install_dir, 'env_sh.log' ) )
if output.return_code:
message = '%s %s' % ( message, str( output.stderr ) )
- """
- Since automatic dependency installation requires a version attribute in the tool's <requirement> tag, we don't have to
- create a default symlink, but we'll keep this code around for a bit just in case we need it later.
- if package_name:
- package_dir = os.path.join( tool_dependency_dir, package_name )
- package_default = os.path.join( package_dir, 'default' )
- if not os.path.islink( package_default ):
- cmd = 'ln -s %s %s' % ( install_dir, package_default )
- output = local( cmd, capture=True )
- if output.return_code:
- message = '%s\n%s' % ( message, str( output.stderr ) )
- """
return message
def install_and_build_package( params_dict ):
"""Install a Galaxy tool dependency package either via a url or a mercurial or git clone command."""
install_dir = params_dict[ 'install_dir' ]
download_url = params_dict.get( 'download_url', None )
clone_cmd = params_dict.get( 'clone_cmd', None )
- build_commands = params_dict.get( 'build_commands', None )
+ actions = params_dict.get( 'actions', None )
package_name = params_dict.get( 'package_name', None )
with make_tmp_dir() as work_dir:
with lcd( work_dir ):
@@ -75,36 +63,36 @@
if output.return_code:
return '%s. ' % str( output.stderr )
dir = package_name
- if build_commands:
+ if actions:
with lcd( dir ):
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
- for build_command_tup in build_commands:
- build_command_key, build_command_dict = build_command_tup
- if build_command_key.find( 'v^v^v' ) >= 0:
- build_command_items = build_command_key.split( 'v^v^v' )
- build_command_name = build_command_items[ 0 ]
- build_command = build_command_items[ 1 ]
- elif build_command_key in common_util.ALL_BUILD_COMMAND_NAMES:
- build_command_name = build_command_key
+ for action_tup in actions:
+ action_key, action_dict = action_tup
+ if action_key.find( 'v^v^v' ) >= 0:
+ action_items = action_key.split( 'v^v^v' )
+ action_name = action_items[ 0 ]
+ action = action_items[ 1 ]
+ elif action_key in common_util.ALL_ACTIONS:
+ action_name = action_key
else:
- build_command_name = None
- if build_command_name:
- if build_command_name == 'change_directory':
- current_dir = os.path.join( current_dir, build_command )
+ action_name = None
+ if action_name:
+ if action_name == 'change_directory':
+ current_dir = os.path.join( current_dir, action )
lcd( current_dir )
- elif build_command_name == 'move_directory_files':
+ elif action_name == 'move_directory_files':
common_util.move_directory_files( current_dir=current_dir,
- source_dir=os.path.join( build_command_dict[ 'source_directory' ] ),
- destination_dir=os.path.join( build_command_dict[ 'destination_directory' ] ) )
- elif build_command_name == 'move_file':
+ source_dir=os.path.join( action_dict[ 'source_directory' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination_directory' ] ) )
+ elif action_name == 'move_file':
common_util.move_file( current_dir=current_dir,
- source=os.path.join( build_command_dict[ 'source' ] ),
- destination_dir=os.path.join( build_command_dict[ 'destination' ] ) )
+ source=os.path.join( action_dict[ 'source' ] ),
+ destination_dir=os.path.join( action_dict[ 'destination' ] ) )
else:
- build_command = build_command_key
+ action = action_key
with settings( warn_only=True ):
- output = local( build_command, capture=True )
- log_results( build_command, output, os.path.join( install_dir, 'build_commands.log' ) )
+ output = local( action, capture=True )
+ log_results( action, output, os.path.join( install_dir, 'actions.log' ) )
if output.return_code:
return '%s. ' % str( output.stderr )
return ''
@@ -119,10 +107,10 @@
logfile = open( file_path, 'wb' )
logfile.write( "\n#############################################" )
logfile.write( '\n%s\nSTDOUT\n' % command )
+ logfile.write( str( fabric_AttributeString.stdout ) )
logfile.write( "#############################################\n" )
- logfile.write( str( fabric_AttributeString.stdout ) )
logfile.write( "\n#############################################" )
logfile.write( '\n%s\nSTDERR\n' % command )
+ logfile.write( str( fabric_AttributeString.stderr ) )
logfile.write( "#############################################\n" )
- logfile.write( str( fabric_AttributeString.stderr ) )
logfile.close()
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -11,131 +11,132 @@
from elementtree import ElementTree, ElementInclude
from elementtree.ElementTree import Element, SubElement
-def create_or_update_tool_dependency( app, tool_shed_repository, changeset_revision, name, version, type ):
- """
- This method is called from Galaxy (never the tool shed) when a new tool_shed_repository is being installed or when an ininstalled repository is
- being reinstalled.
- """
- # First see if a tool_dependency record exists for the received changeset_revision.
+def create_or_update_tool_dependency( app, tool_shed_repository, name, version, type ):
+ # Called from Galaxy (never the tool shed) when a new repository is being installed or when an uninstalled repository is being reinstalled.
+ # First see if an appropriate tool_dependency record exists for the received tool_shed_repository.
sa_session = app.model.context.current
- tool_dependency = get_tool_dependency_by_shed_changeset_revision( app, tool_shed_repository, name, version, type, changeset_revision )
+ tool_dependency = get_tool_dependency_by_name_version_type_repository( app, tool_shed_repository, name, version, type )
if tool_dependency:
tool_dependency.uninstalled = False
else:
- # Check the tool_shed_repository's set of tool_depnedency records for any that are marked uninstalled. If one is found, set uninstalled to
- # False and update the value of installed_changeset_revision.
- found = False
- for tool_dependency in tool_shed_repository.tool_dependencies:
- if tool_dependency.name == name and tool_dependency.version == version and tool_dependency.type == type and tool_dependency.uninstalled:
- found = True
- tool_dependency.uninstalled = False
- tool_dependency.installed_changeset_revision = changeset_revision
- break
- if not found:
- # Create a new tool_dependency record for the tool_shed_repository.
- tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
- installed_changeset_revision=changeset_revision,
- name=name,
- version=version,
- type=type )
+ # Create a new tool_dependency record for the tool_shed_repository.
+ tool_dependency = app.model.ToolDependency( tool_shed_repository_id=tool_shed_repository.id,
+ name=name,
+ version=version,
+ type=type )
sa_session.add( tool_dependency )
sa_session.flush()
return tool_dependency
-def get_tool_dependency_install_dir( app, repository, installed_changeset_revision, package_name, package_version ):
+def get_tool_dependency_by_name_version_type_repository( app, repository, name, version, type ):
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.ToolDependency ) \
+ .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+ app.model.ToolDependency.table.c.name == name,
+ app.model.ToolDependency.table.c.version == version,
+ app.model.ToolDependency.table.c.type == type ) ) \
+ .first()
+def get_tool_dependency_install_dir( app, repository, package_name, package_version ):
return os.path.abspath( os.path.join( app.config.tool_dependency_dir,
package_name,
package_version,
repository.owner,
repository.name,
- installed_changeset_revision ) )
-def get_tool_dependency_by_shed_changeset_revision( app, repository, dependency_name, dependency_version, dependency_type, changeset_revision ):
- sa_session = app.model.context.current
- return sa_session.query( app.model.ToolDependency ) \
- .filter( and_( app.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
- app.model.ToolDependency.table.c.name == dependency_name,
- app.model.ToolDependency.table.c.version == dependency_version,
- app.model.ToolDependency.table.c.type == dependency_type,
- app.model.ToolDependency.table.c.installed_changeset_revision == changeset_revision ) ) \
- .first()
-def install_package( app, elem, tool_shed_repository, installed_changeset_revision ):
+ repository.installed_changeset_revision ) )
+def install_package( app, elem, tool_shed_repository, name=None, version=None ):
+ # If name and version are not None, then a specific tool dependency is being installed.
+ message = ''
# The value of package_name should match the value of the "package" type in the tool config's <requirements> tag set, but it's not required.
- message = ''
package_name = elem.get( 'name', None )
package_version = elem.get( 'version', None )
if package_name and package_version:
- install_dir = get_install_dir( app, tool_shed_repository, installed_changeset_revision, package_name, package_version )
- if not_installed( install_dir ):
- for package_elem in elem:
- if package_elem.tag == 'proprietary_fabfile':
- # TODO: This is not yet working...
- # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
- if not fabric_version_checked:
- check_fabric_version()
- fabric_version_checked = True
- fabfile_name = package_elem.get( 'name', None )
- fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
- print 'Installing tool dependencies via fabric script ', fabfile_path
- elif package_elem.tag == 'fabfile':
- # Handle tool dependency installation using a fabric script provided by Galaxy. Example tag set definition:
- fabfile_path = None
- for method_elem in package_elem.findall( 'method' ):
- error_message = run_fabric_method( app,
- method_elem,
- fabfile_path,
- app.config.tool_dependency_dir,
- install_dir,
- package_name=package_name )
- if error_message:
- message += '%s' % error_message
- else:
- tool_dependency = create_or_update_tool_dependency( app,
- tool_shed_repository,
- installed_changeset_revision,
- name=package_name,
- version=package_version,
- type='package' )
- print package_name, 'version', package_version, 'installed in', install_dir
- else:
- print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
+ if ( not name and not version ) or ( name and version and name==package_name and version==package_version ):
+ install_dir = get_tool_dependency_install_dir( app, tool_shed_repository, package_name, package_version )
+ if not os.path.exists( install_dir ):
+ for package_elem in elem:
+ if package_elem.tag == 'proprietary_fabfile':
+ # TODO: This is not yet working...
+ # Handle tool dependency installation where the repository includes one or more proprietary fabric scripts.
+ if not fabric_version_checked:
+ check_fabric_version()
+ fabric_version_checked = True
+ fabfile_name = package_elem.get( 'name', None )
+ fabfile_path = os.path.abspath( os.path.join( os.path.split( tool_dependencies_config )[ 0 ], fabfile_name ) )
+ print 'Installing tool dependencies via fabric script ', fabfile_path
+ elif package_elem.tag == 'fabfile':
+ # Handle tool dependency installation using a fabric method included in the Galaxy framework.
+ fabfile_path = None
+ for method_elem in package_elem:
+ error_message = run_fabric_method( app,
+ method_elem,
+ fabfile_path,
+ app.config.tool_dependency_dir,
+ install_dir,
+ package_name=package_name )
+ if error_message:
+ message += '%s' % error_message
+ else:
+ tool_dependency = create_or_update_tool_dependency( app,
+ tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package' )
+ print package_name, 'version', package_version, 'installed in', install_dir
+ else:
+ print '\nSkipping installation of tool dependency', package_name, 'version', package_version, 'since it is installed in', install_dir, '\n'
return message
-def not_installed( install_dir ):
- # TODO: try executing a binary or something in addition to just seeing if the install_dir exists.
- return not os.path.exists( install_dir )
def run_fabric_method( app, elem, fabfile_path, tool_dependency_dir, install_dir, package_name=None, **kwd ):
"""Parse a tool_dependency.xml file's fabfile <method> tag set to build the method parameters and execute the method."""
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
+ # Default value for env_dependency_path.
+ install_path, install_directory = os.path.split( install_dir )
+ if install_directory != 'bin':
+ env_dependency_path = os.path.join( install_dir, 'bin' )
+ else:
+ env_dependency_path = install_dir
method_name = elem.get( 'name', None )
params_dict = dict( install_dir=install_dir )
- build_commands = []
+ actions = []
for param_elem in elem:
param_name = param_elem.get( 'name' )
if param_name:
- if param_name == 'build_commands':
- for build_command_elem in param_elem:
- build_command_dict = {}
- build_command_name = build_command_elem.get( 'name' )
- if build_command_name:
- if build_command_name in MOVE_BUILD_COMMAND_NAMES:
- build_command_key = build_command_name
- for move_elem in build_command_elem:
- move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
- if move_elem_text:
- build_command_dict[ move_elem.tag ] = move_elem_text
- elif build_command_elem.text:
- build_command_key = '%sv^v^v%s' % ( build_command_name, build_command_elem.text )
- else:
+ if param_name == 'actions':
+ for action_elem in param_elem:
+ action_dict = {}
+ action_type = action_elem.get( 'type', 'shell_command' )
+ if action_type == 'shell_command':
+ # Example: <action type="shell_command">make</action>
+ action_key = action_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if not action_key:
continue
+ elif action_type in MOVE_ACTIONS:
+ # Examples:
+ # <action type="move_file">
+ # <source>misc/some_file</source>
+ # <destination>$INSTALL_DIR/bin</destination>
+ # </action>
+ # <action type="move_directory_files">
+ # <source_directory>bin</source_directory>
+ # <destination_directory>$INSTALL_DIR/bin</destination_directory>
+ # </action>
+ action_key = action_type
+ for move_elem in action_elem:
+ move_elem_text = move_elem.text.replace( '$INSTALL_DIR', install_dir )
+ if move_elem_text:
+ action_dict[ move_elem.tag ] = move_elem_text
+ elif action_elem.text:
+ # Example: <action type="change_directory">bin</action>
+ action_key = '%sv^v^v%s' % ( action_type, action_elem.text )
else:
- build_command_key = build_command_elem.text.replace( '$INSTALL_DIR', install_dir )
- if not build_command_key:
- continue
- build_commands.append( ( build_command_key, build_command_dict ) )
- if build_commands:
- params_dict[ 'build_commands' ] = build_commands
+ continue
+ actions.append( ( action_key, action_dict ) )
+ if actions:
+ params_dict[ 'actions' ] = actions
+ elif param_name == 'env_dependency_path':
+ env_dependency_path = param_elem.text.replace( '$INSTALL_DIR', install_dir )
else:
if param_elem.text:
- params_dict[ param_name ] = param_elem.text
+ params_dict[ param_name ] = param_elem.text.replace( '$INSTALL_DIR', install_dir )
if package_name:
params_dict[ 'package_name' ] = package_name
if fabfile_path:
@@ -151,7 +152,7 @@
except Exception, e:
return '%s. ' % str( e )
try:
- message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name )
+ message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
except:
@@ -164,17 +165,19 @@
"""
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
+ # Default value for env_dependency_path.
+ env_dependency_path = install_dir
method_name = elem.get( 'name', None )
params_str = ''
- build_commands = []
+ actions = []
for param_elem in elem:
param_name = param_elem.get( 'name' )
if param_name:
- if param_name == 'build_commands':
- for build_command_elem in param_elem:
- build_commands.append( build_command_elem.text.replace( '$INSTALL_DIR', install_dir ) )
- if build_commands:
- params_str += 'build_commands=%s,' % tool_shed_encode( encoding_sep.join( build_commands ) )
+ if param_name == 'actions':
+ for action_elem in param_elem:
+ actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) )
+ if actions:
+ params_str += 'actions=%s,' % tool_shed_encode( encoding_sep.join( actions ) )
else:
if param_elem.text:
param_value = tool_shed_encode( param_elem.text )
@@ -190,7 +193,7 @@
return "Exception executing fabric script %s: %s. " % ( str( fabfile_path ), str( e ) )
if returncode:
return message
- message = handle_post_build_processing( tool_dependency_dir, install_dir, package_name=package_name )
+ message = handle_post_build_processing( tool_dependency_dir, install_dir, env_dependency_path, package_name=package_name )
if message:
return message
else:
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -798,11 +798,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return get_tool_shed_repository_by_shed_name_owner_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/tools/deps/__init__.py
--- a/lib/galaxy/tools/deps/__init__.py
+++ b/lib/galaxy/tools/deps/__init__.py
@@ -44,18 +44,20 @@
def _find_dep_versioned( self, name, version, type='package', installed_tool_dependencies=None ):
installed_dependency = None
if installed_tool_dependencies:
- for installed_dependency in installed_tool_dependencies:
- if not installed_dependency.uninstalled:
- if installed_dependency.name==name and installed_dependency.version==version and installed_dependency.type==type:
+ for installed_tool_dependency in installed_tool_dependencies:
+ if not installed_tool_dependency.uninstalled:
+ if installed_tool_dependency.name==name and installed_tool_dependency.version==version and installed_tool_dependency.type==type:
+ installed_dependency = installed_tool_dependency
break
for base_path in self.base_paths:
if installed_dependency:
tool_shed_repository = installed_dependency.tool_shed_repository
path = os.path.join( base_path,
- name, version,
+ name,
+ version,
tool_shed_repository.owner,
tool_shed_repository.name,
- installed_dependency.installed_changeset_revision )
+ tool_shed_repository.installed_changeset_revision )
else:
path = os.path.join( base_path, name, version )
script = os.path.join( path, 'env.sh' )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -326,7 +326,11 @@
if not owner:
owner = get_repository_owner_from_clone_url( repository_clone_url )
includes_datatypes = 'datatypes' in metadata_dict
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision )
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app,
+ tool_shed,
+ name,
+ owner,
+ installed_changeset_revision )
if tool_shed_repository:
tool_shed_repository.description = description
tool_shed_repository.changeset_revision = current_changeset_revision
@@ -425,10 +429,10 @@
for tool_dict in metadata_dict[ 'tools' ]:
requirements = tool_dict.get( 'requirements', [] )
for requirement_dict in requirements:
- requirement_name = requirement_dict.get( 'name', None )
- requirement_version = requirement_dict.get( 'version', None )
- requirement_type = requirement_dict.get( 'type', None )
- if requirement_name == tool_dependency_name and requirement_version == tool_dependency_version and requirement_type == tool_dependency_type:
+ req_name = requirement_dict.get( 'name', None )
+ req_version = requirement_dict.get( 'version', None )
+ req_type = requirement_dict.get( 'type', None )
+ if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
can_generate_dependency_metadata = True
break
if not can_generate_dependency_metadata:
@@ -1159,13 +1163,13 @@
message = str( e )
error = True
return error, message
-def handle_tool_dependencies( app, tool_shed_repository, installed_changeset_revision, tool_dependencies_config ):
+def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, name=None, version=None, type='package' ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
methods in Galaxy's tool_dependencies module. In the future, proprietary fabric scripts contained in the repository will be supported.
Future enhancements to handling tool dependencies may provide installation processes in addition to fabric based processes. The dependencies
will be installed in:
- ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repository_owner>/<repository_name>/<installed_changeset_revision>
+ ~/<app.config.tool_dependency_dir>/<package_name>/<package_version>/<repo_owner>/<repo_name>/<repo_installed_changeset_revision>
"""
status = 'ok'
message = ''
@@ -1175,8 +1179,8 @@
ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
- if elem.tag == 'package':
- error_message = install_package( app, elem, tool_shed_repository, installed_changeset_revision )
+ if elem.tag == type:
+ error_message = install_package( app, elem, tool_shed_repository, name=name, version=version )
if error_message:
message += ' %s' % error_message
if message:
@@ -1288,12 +1292,9 @@
tool_shed_repository,
current_changeset_revision,
work_dir )
- # Install dependencies for repository tools. The tool_dependency.installed_changeset_revision value will be the value of
- # tool_shed_repository.changeset_revision (this method's current_changeset_revision). This approach will allow for different
- # versions of the same tool_dependency to be installed for associated versions of tools included in the installed repository.
+ # Install dependencies for repository tools.
status, message = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
- installed_changeset_revision=current_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
if status != 'ok' and message:
print 'The following error occurred from load_repository_contents while installing tool dependencies:'
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -2,7 +2,7 @@
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
-from galaxy.tool_shed.tool_dependencies.install_util import get_tool_dependency_install_dir, not_installed
+from galaxy.tool_shed.tool_dependencies.install_util import get_tool_dependency_install_dir
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
@@ -196,6 +196,7 @@
repository = get_repository( trans, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repository_install_dir = os.path.abspath ( relative_install_dir )
+ errors = ''
if params.get( 'deactivate_or_uninstall_repository_button', False ):
if repository.includes_tools:
# Handle tool panel alterations.
@@ -218,32 +219,24 @@
removed = False
if removed:
repository.uninstalled = True
- # Remove all installed tool dependencies - this is required when uninstalling the repository.
- for tool_dependency in repository.tool_dependencies:
- dependency_install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir,
- tool_dependency.name,
- tool_dependency.version,
- repository.owner,
- repository.name,
- tool_dependency.installed_changeset_revision ) )
- try:
- shutil.rmtree( dependency_install_dir )
- log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
- removed = True
- except Exception, e:
- log.debug( "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) ) )
- removed = False
- if removed:
- tool_dependency.uninstalled = True
- trans.sa_session.add( tool_dependency )
+ # Remove all installed tool dependencies.
+ for tool_dependency in repository.installed_tool_dependencies:
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if error_message:
+ errors = '%s %s' % ( errors, error_message )
repository.deleted = True
trans.sa_session.add( repository )
trans.sa_session.flush()
if remove_from_disk_checked:
- message = 'The repository named <b>%s</b> has been uninstalled.' % repository.name
+ message = 'The repository named <b>%s</b> has been uninstalled. ' % repository.name
+ if errors:
+ message += 'Attempting to uninstall tool dependencies resulted in errors: %s' % errors
+ status = 'error'
+ else:
+ status = 'done'
else:
- message = 'The repository named <b>%s</b> has been deactivated.' % repository.name
- status = 'done'
+ message = 'The repository named <b>%s</b> has been deactivated. ' % repository.name
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
@@ -277,33 +270,6 @@
return get_repository_file_contents( file_path )
@web.expose
@web.require_admin
- def install_tool_dependencies( self, trans, **kwd ):
- """Install dependencies for tools included in the repository when the repository is being installed."""
- message = kwd.get( 'message', '' )
- status = kwd.get( 'status', 'done' )
- tool_shed_url = kwd[ 'tool_shed_url' ]
- repo_info_dict = kwd[ 'repo_info_dict' ]
- includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
- # Decode the encoded repo_info_dict param value.
- dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
- # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
- new_repo_info_dict = {}
- for name, repo_info_tuple in dict_with_tool_dependencies.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
- new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
- repo_info_dict = tool_shed_encode( new_repo_info_dict )
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
- return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_shed_url=tool_shed_url,
- repo_info_dict=repo_info_dict,
- dict_with_tool_dependencies=dict_with_tool_dependencies,
- includes_tools=includes_tools,
- install_tool_dependencies_check_box=install_tool_dependencies_check_box,
- message=message,
- status=status )
- @web.expose
- @web.require_admin
def install_missing_tool_dependencies( self, trans, **kwd ):
"""
Install dependencies for tools included in the repository that were not installed when the repository was installed or that are
@@ -327,18 +293,15 @@
install_dir=relative_install_dir )
status, message = handle_tool_dependencies( app=trans.app,
tool_shed_repository=repository,
- installed_changeset_revision=repository.installed_changeset_revision,
tool_dependencies_config=tool_dependencies_config )
try:
shutil.rmtree( work_dir )
except:
pass
- tool_dependencies_missing = status == 'error'
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=repository.description,
repo_files_dir=repo_files_dir,
- tool_dependencies_missing=tool_dependencies_missing,
message=message,
status=status )
if reinstalling and kwd.get( 'install_missing_tool_dependencies_button', False ):
@@ -352,12 +315,11 @@
if not reinstalling:
# Filter the tool_dependencies dictionary to eliminate successfully installed dependencies.
filtered_tool_dependencies = {}
- for dependency_key, requirements_dict in tool_dependencies.items():
- name = requirements_dict[ 'name' ]
- version = requirements_dict[ 'version' ]
- install_dir = get_tool_dependency_install_dir( trans.app, repository, repository.changeset_revision, name, version )
- if not_installed( install_dir ):
- filtered_tool_dependencies[ dependency_key ] = requirements_dict
+ for missing_dependency_tup in repository.missing_tool_dependencies:
+ name, version, type = missing_dependency_tup
+ dependency_key = '%s/%s' % ( name, version )
+ install_dir = get_tool_dependency_install_dir( trans.app, repository, name, version )
+ filtered_tool_dependencies[ dependency_key ] = dict( name=name, type=type, version=version )
tool_dependencies = filtered_tool_dependencies
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
@@ -557,6 +519,66 @@
status=status )
@web.expose
@web.require_admin
+ def install_tool_dependencies( self, trans, **kwd ):
+ """Install dependencies for tools included in the repository when the repository is being installed."""
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ tool_shed_url = kwd[ 'tool_shed_url' ]
+ repo_info_dict = kwd[ 'repo_info_dict' ]
+ includes_tools = util.string_as_bool( kwd.get( 'includes_tools', False ) )
+ # Decode the encoded repo_info_dict param value.
+ dict_with_tool_dependencies = tool_shed_decode( repo_info_dict )
+ # The repo_info_dict includes tool dependencies which we need to display so the user knows what will be installed.
+ new_repo_info_dict = {}
+ for name, repo_info_tuple in dict_with_tool_dependencies.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ # Create a new repo_info_dict by eliminating tool-dependencies from the repo_info_tuple.
+ new_repo_info_dict[ name ] = ( description, repository_clone_url, changeset_revision, ctx_rev )
+ repo_info_dict = tool_shed_encode( new_repo_info_dict )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=True )
+ return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
+ tool_shed_url=tool_shed_url,
+ repo_info_dict=repo_info_dict,
+ dict_with_tool_dependencies=dict_with_tool_dependencies,
+ includes_tools=includes_tools,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def install_tool_dependency( self, trans, name, version, type, repository_id, **kwd ):
+ """Install dependencies for tools included in the repository when the repository is being installed."""
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ repository = get_repository( trans, repository_id )
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ # Get the tool_dependencies.xml file from the repository.
+ work_dir = make_tmp_directory()
+ tool_dependencies_config = get_config_from_repository( trans.app,
+ 'tool_dependencies.xml',
+ repository,
+ repository.changeset_revision,
+ work_dir,
+ install_dir=relative_install_dir )
+ status, message = handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=repository,
+ tool_dependencies_config=tool_dependencies_config,
+ name=name,
+ version=version,
+ type=type )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
+ repository=repository,
+ description=repository.description,
+ repo_files_dir=repo_files_dir,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def manage_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -791,6 +813,24 @@
status=status )
@web.expose
@web.require_admin
+ def uninstall_tool_dependency( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, kwd[ 'repository_id' ] )
+ tool_dependency = get_tool_dependency( trans, kwd[ 'id' ] )
+ uninstalled, error_message = remove_tool_dependency( trans, tool_dependency )
+ if uninstalled:
+ message = "The '%s' tool dependency has been uninstalled." % tool_dependency.name
+ else:
+ message = "Error attempting to uninstall the '%s' tool dependency: %s" % ( tool_dependency.name, error_message )
+ status = 'error'
+ return trans.fill_template( '/admin/tool_shed_repository/manage_tool_dependencies.mako',
+ repository=repository,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def update_to_changeset_revision( self, trans, **kwd ):
"""Update a cloned repository to the latest revision possible."""
params = util.Params( kwd )
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/browse_repository.mako
--- a/templates/admin/tool_shed_repository/browse_repository.mako
+++ b/templates/admin/tool_shed_repository/browse_repository.mako
@@ -19,10 +19,13 @@
<div popupmenu="repository-${repository.id}-popup"><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
+ %if repository.missing_tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ %endif
</div></ul>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- a/templates/admin/tool_shed_repository/browse_tool_dependency.mako
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -17,12 +17,12 @@
<ul class="manage-table-actions"><li><a class="action-button" id="tool_dependency-${tool_dependency.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="tool_dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
- %if repository.tool_dependencies:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
- %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this tool dependency</a></div></ul>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_missing_tool_dependencies.mako
@@ -3,6 +3,23 @@
<% import os %>
+<br/><br/>
+<ul class="manage-table-actions">
+ <li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
+ <div popupmenu="repository-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.includes_tools:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
+ %endif
+ %if repository.tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a>
+ </div>
+</ul>
+
%if message:
${render_msg( message, status )}
%endif
@@ -33,10 +50,10 @@
<form name="install_missing_tool_dependencies" id="install_missing_tool_dependencies" action="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ), tool_panel_section=tool_panel_section, new_tool_panel_section=new_tool_panel_section, reinstalling=reinstalling )}" method="post" ><div style="clear: both"></div><div class="form-row">
- <label>Install tool dependencies?</label>
+ <label>Install missing tool dependencies?</label>
${install_tool_dependencies_check_box.get_html()}
<div class="toolParamHelp" style="clear: both;">
- Un-check to skip installation of these tool dependencies.
+ Un-check to skip installation of these missing tool dependencies.
</div>
## Fake the no_changes_check_box value.
%if no_changes_checked:
@@ -48,7 +65,7 @@
<div style="clear: both"></div><div class="form-row"><table class="grid">
- <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
+ <tr><td colspan="4" bgcolor="#D8D8D8"><b>Missing tool dependencies</b></td></tr><tr><th>Name</th><th>Version</th>
@@ -68,6 +85,7 @@
repository.changeset_revision )
readme_text = requirements_dict.get( 'readme', None )
%>
+ %if not os.path.exists( install_dir ):
<tr><td>${name}</td><td>${version}</td>
@@ -78,6 +96,7 @@
<tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr><tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
+ %endif
%endfor
</table><div style="clear: both"></div>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/install_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies.mako
@@ -61,6 +61,7 @@
changeset_revision )
readme_text = requirements_dict.get( 'readme', None )
%>
+ %if not os.path.exists( install_dir ):
<tr><td>${name}</td><td>${version}</td>
@@ -71,6 +72,7 @@
<tr><td colspan="4" bgcolor="#FFFFCC">${name} ${version} requirements and installation information</td></tr><tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
%endif
+ %endif
%endfor
%endfor
</table>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -14,7 +14,10 @@
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ %if repository.missing_tool_dependencies:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -85,14 +88,24 @@
<table class="grid"><tr><td><b>name</b></td>
+ <td><b>version</b></td><td><b>type</b></td>
- <td><b>version</b></td></tr>
- %for name, requirements_dict in missing_tool_dependencies.items():
+ %for index, missing_dependency_tup in enumerate( missing_tool_dependencies ):
+ <% name, version, type = missing_dependency_tup %><tr>
- <td>${requirements_dict[ 'name' ]}</td>
- <td>${requirements_dict[ 'type' ]}</td>
- <td>${requirements_dict[ 'version' ]}</td>
+ <td>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="missing_dependency-${index}-popup">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ ${name}
+ </a>
+ </div>
+ <div popupmenu="missing_dependency-${index}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ </div>
+ </td>
+ <td>${version}</td>
+ <td>${type}</td></tr>
%endfor
</table>
@@ -112,18 +125,23 @@
<table class="grid"><tr><td><b>name</b></td>
+ <td><b>version</b></td><td><b>type</b></td>
- <td><b>version</b></td></tr>
%for installed_tool_dependency in installed_tool_dependencies:
<tr><td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${installed_tool_dependency.name}
- </a>
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${installed_tool_dependency.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${installed_tool_dependency.name}
+ </a>
+ </div>
+ <div popupmenu="dependency-${installed_tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ </div></td>
+ <td>${installed_tool_dependency.version}</td><td>${installed_tool_dependency.type}</td>
- <td>${installed_tool_dependency.version}</td></tr>
%endfor
</table>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/manage_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_tool_dependencies.mako
@@ -7,6 +7,7 @@
<ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li><div popupmenu="repository-${repository.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( repository.id ) )}">Manage repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository files</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
%if repository.includes_tools:
@@ -15,7 +16,7 @@
%if repository.missing_tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_missing_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Install missing tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
@@ -24,7 +25,7 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">${repository.name} repository's tool dependencies</div>
+ <div class="toolFormTitle">Repository '${repository.name}' tool dependencies</div><div class="toolFormBody"><div class="form-row"><table class="grid">
@@ -33,32 +34,46 @@
name = tool_dependency.name
version = tool_dependency.version
type = tool_dependency.type
- installed_changeset_revision = tool_dependency.installed_changeset_revision
uninstalled = tool_dependency.uninstalled
- install_dir = os.path.abspath( os.path.join( trans.app.config.tool_dependency_dir,
- name,
- version,
- repository.owner,
- repository.name,
- installed_changeset_revision ) )
+ install_dir = tool_dependency.installation_directory( trans.app )
%>
- <tr><td bgcolor="#D8D8D8"><b>Name</b></td><td bgcolor="#D8D8D8">${name}</td></tr>
- <tr><th>Version</th><td>${version}</td></tr>
- <tr><th>Type</th><td>${type}</td></tr>
- <tr>
- <th>Install directory</th>
- <td>
+ <tr>
+ <td bgcolor="#D8D8D8">
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dependency-${tool_dependency.id}-popup">
%if uninstalled:
- This dependency is not currently installed
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">
+ <b>Name</b>
+ </a>
+ <div popupmenu="dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='install_tool_dependency', name=name, version=version, type=type, repository_id=trans.security.encode_id( repository.id ) )}">Install this dependency</a>
+ </div>
%else:
<a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${install_dir}
+ <b>Name</b></a>
+ <div popupmenu="dependency-${tool_dependency.id}-popup">
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='uninstall_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">Uninstall this dependency</a>
+ </div>
%endif
- </td>
- </tr>
- <tr><th>Installed changeset revision</th><td>${installed_changeset_revision}</td></tr>
- <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
+ </div>
+ </td>
+ <td bgcolor="#D8D8D8">${name}</td>
+ </tr>
+ <tr><th>Version</th><td>${version}</td></tr>
+ <tr><th>Type</th><td>${type}</td></tr>
+ <tr>
+ <th>Install directory</th>
+ <td>
+ %if uninstalled:
+ This dependency is not currently installed
+ %else:
+ <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
+ ${install_dir}
+ </a>
+ %endif
+ </td>
+ </tr>
+ <tr><th>Uninstalled</th><td>${uninstalled}</td></tr>
%endfor
</table><div style="clear: both"></div>
diff -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd -r 7c495f835a1d436ad33dff6107784f106cc24980 templates/admin/tool_shed_repository/view_tool_metadata.mako
--- a/templates/admin/tool_shed_repository/view_tool_metadata.mako
+++ b/templates/admin/tool_shed_repository/view_tool_metadata.mako
@@ -11,7 +11,7 @@
%if repository.tool_dependencies:
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( repository.id ) )}">Manage tool dependencies</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or uninstall repository</a></div></ul>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: afgane: Fix API history purge; a few documentation corrections
by Bitbucket 17 Jun '12
by Bitbucket 17 Jun '12
17 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/87be4c8d7f7f/
changeset: 87be4c8d7f7f
user: afgane
date: 2012-06-18 06:53:02
summary: Fix API history purge; a few documentation corrections
affected #: 3 files
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -130,6 +130,7 @@
history.deleted = True
if purge and trans.app.config.allow_user_dataset_purge:
+ # First purge all the datasets
for hda in history.datasets:
if hda.purged:
continue
@@ -143,6 +144,9 @@
except:
pass
trans.sa_session.flush()
+ # Now mark the history as purged
+ history.purged = True
+ self.sa_session.add( history )
trans.sa_session.flush()
return 'OK'
@@ -150,8 +154,8 @@
@web.expose_api
def undelete( self, trans, id, **kwd ):
"""
- POST /api/histories/deleted/{encoded_quota_id}/undelete
- Undeletes a quota
+ POST /api/histories/deleted/{encoded_history_id}/undelete
+ Undeletes a history
"""
history_id = id
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd lib/galaxy/web/api/users.py
--- a/lib/galaxy/web/api/users.py
+++ b/lib/galaxy/web/api/users.py
@@ -65,7 +65,7 @@
@web.expose_api
def create( self, trans, payload, **kwd ):
"""
- /api/users
+ POST /api/users
Creates a new Galaxy user.
"""
if not trans.app.config.allow_user_creation:
diff -r aaf5c82a55794c97125696aecb27056b5087fdb7 -r 87be4c8d7f7fa32f6023f0e251e3c9af8447f5cd scripts/api/library_upload_from_import_dir.py
--- a/scripts/api/library_upload_from_import_dir.py
+++ b/scripts/api/library_upload_from_import_dir.py
@@ -1,5 +1,8 @@
#!/usr/bin/env python
-
+"""
+Example usage:
+./library_upload_from_import_dir.py <key> http://127.0.0.1:8080/api/libraries/dda47097d9189f15/contents Fdda47097d9189f15 auto /Users/EnisAfgan/projects/pprojects/galaxy/lib_upload_dir ?
+"""
import os, sys
sys.path.insert( 0, os.path.dirname( __file__ ) )
from common import submit
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0