galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2013
- 1 participants
- 160 discussions
commit/galaxy-central: natefoo: DRMAA: Don't attempt to stop jobs that have no external ID set.
by Bitbucket 10 Jan '13
by Bitbucket 10 Jan '13
10 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c015b82b3944/
changeset: c015b82b3944
user: natefoo
date: 2013-01-10 18:25:27
summary: DRMAA: Don't attempt to stop jobs that have no external ID set.
affected #: 1 file
diff -r caaab03824478384c256fc6b5678bb39dbf8f9f2 -r c015b82b3944f967e2c859d5552c00e3e38a2da0 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -400,16 +400,18 @@
def stop_job( self, job ):
"""Attempts to delete a job from the DRM queue"""
try:
+ ext_id = job.get_job_runner_external_id()
+ assert ext_id not in ( None, 'None' ), 'External job id is None'
if self.external_killJob_script is None:
- self.ds.control( job.get_job_runner_external_id(), drmaa.JobControlAction.TERMINATE )
+ self.ds.control( ext_id, drmaa.JobControlAction.TERMINATE )
else:
# FIXME: hardcoded path
- subprocess.Popen( [ '/usr/bin/sudo', '-E', self.external_killJob_script, str( job.get_job_runner_external_id() ), str( self.userid ) ], shell=False )
- log.debug( "(%s/%s) Removed from DRM queue at user's request" % ( job.get_id(), job.get_job_runner_external_id() ) )
+ subprocess.Popen( [ '/usr/bin/sudo', '-E', self.external_killJob_script, str( ext_id ), str( self.userid ) ], shell=False )
+ log.debug( "(%s/%s) Removed from DRM queue at user's request" % ( job.get_id(), ext_id ) )
except drmaa.InvalidJobException:
- log.debug( "(%s/%s) User killed running job, but it was already dead" % ( job.get_id(), job.get_job_runner_external_id() ) )
+ log.debug( "(%s/%s) User killed running job, but it was already dead" % ( job.get_id(), ext_id ) )
except Exception, e:
- log.debug( "(%s/%s) User killed running job, but error encountered removing from DRM queue: %s" % ( job.get_id(), job.get_job_runner_external_id(), e ) )
+ log.debug( "(%s/%s) User killed running job, but error encountered removing from DRM queue: %s" % ( job.get_id(), ext_id, e ) )
def recover( self, job, job_wrapper ):
"""Recovers jobs stuck in the queued/running state when Galaxy started"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/087a08c43cb5/
changeset: 087a08c43cb5
user: jmchilton
date: 2012-12-07 18:48:07
summary: Fix upload of files to history through API (as outlined here http://dev.list.galaxyproject.org/Uploading-large-files-to-history-through-…)
affected #: 2 files
diff -r 330f05e523aee275c91f3e5de1177f8eb832d512 -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -142,6 +142,14 @@
named_args, _, _, _ = inspect.getargspec(func)
for arg in named_args:
payload.pop(arg, None)
+ for k, v in payload.iteritems():
+ if isinstance(v, (str, unicode)):
+ try:
+ payload[k] = simplejson.loads(v)
+ except:
+ # may not actually be json, just continue
+ pass
+ payload = util.recursively_stringify_dictionary_keys( payload )
else:
# Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
# should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
diff -r 330f05e523aee275c91f3e5de1177f8eb832d512 -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -72,6 +72,11 @@
# Set up inputs.
inputs = payload[ 'inputs' ]
+ # Find files coming in as multipart file data and add to inputs.
+ for k, v in payload.iteritems():
+ if k.startswith("files_"):
+ inputs[k] = v
+
# HACK: add run button so that tool.handle_input will run tool.
inputs['runtool_btn'] = 'Execute'
# TODO: encode data ids and decode ids.
https://bitbucket.org/galaxy/galaxy-central/commits/caaab0382447/
changeset: caaab0382447
user: jmchilton
date: 2013-01-10 17:51:57
summary: Merge.
affected #: 253 files
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -74,6 +74,10 @@
*/variables.less
static/june_2007_style/blue/base_sprites.less
+# Testing
+selenium-server.jar
+selenium_results.html
+
# Documentation build files.
doc/build
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -6,6 +6,9 @@
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
+UPDATEWORKDIR = /tmp/galaxySphinxUpdate
+UPDATEWORKSOURCELIB = $(UPDATEWORKDIR)/source/lib
+SPHINXAPIDOC = sphinx-apidoc
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
@@ -21,7 +24,7 @@
$(TOOLDATASHAREDDIR)/ncbi/builds.txt \
$(TOOLDATASHAREDDIR)/ucsc/publicbuilds.txt
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext updaterst
# Sphinx wants the build files to be there; Copy the sample files into
@@ -51,9 +54,13 @@
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " updaterst to update sphinx rst to reflect code structure changes"
+
+# might also want to do
+# cd source/lib; hg revert; rm *.rst.orig; or not.
clean:
- -rm -rf $(BUILDDIR)/*
+ -rm -rf $(BUILDDIR)/* $(UPDATEWORKDIR)
html: $(TOOLDATABUILDFILES)
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@@ -165,3 +172,20 @@
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
+
+# Targets for updating the structure of the Sphinx RST doc for lib/
+
+$(UPDATEWORKSOURCELIB):
+ mkdir -p $(UPDATEWORKSOURCELIB)
+
+# Create a fresh version of the RST files for the lib, and then create a
+# unified patch file (ignore all emacs leftovers).
+# Feed that to our custom version of patch.py, which applies patches that
+# are only adds, and reports everything else to the user to deal with manually
+#
+# Note: this is still a very rough process. the run of patch.py gets some
+# errors that don't mean anything to us. And the manual process is not fun.
+updaterst: $(UPDATEWORKSOURCELIB)
+ $(SPHINXAPIDOC) -o $(UPDATEWORKSOURCELIB) ../lib
+ -diff -x '*.rst~' -ru source/lib $(UPDATEWORKSOURCELIB) > $(UPDATEWORKDIR)/alldifs.patch
+ ./patch.py $(UPDATEWORKDIR)/alldifs.patch
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/patch.py
--- /dev/null
+++ b/doc/patch.py
@@ -0,0 +1,1085 @@
+#!/usr/bin/env python
+""" Patch utility to apply unified diffs
+
+ Brute-force line-by-line non-recursive parsing
+
+ Copyright (c) 2008-2012 anatoly techtonik
+ Available under the terms of MIT license
+
+ Project home: http://code.google.com/p/python-patch/
+
+
+ $Id: patch.py 181 2012-11-23 16:03:05Z techtonik $
+ $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+
+ This program needs further tweaking for how we use it at Galaxy.
+"""
+
+__author__ = "anatoly techtonik <techtonik(a)gmail.com>"
+__version__ = "1.12.11"
+
+import copy
+import logging
+import re
+# cStringIO doesn't support unicode in 2.5
+from StringIO import StringIO
+import urllib2
+
+from os.path import exists, isfile, abspath
+import os
+import shutil
+
+#------------------------------------------------
+# Logging is controlled by logger named after the
+# module name (e.g. 'patch' for patch.py module)
+
+debugmode = False
+
+logger = logging.getLogger(__name__)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+class NullHandler(logging.Handler):
+ """ Copied from Python 2.7 to avoid getting
+ `No handlers could be found for logger "patch"`
+ http://bugs.python.org/issue16539
+ """
+ def handle(self, record):
+ pass
+ def emit(self, record):
+ pass
+ def createLock(self):
+ self.lock = None
+
+logger.addHandler(NullHandler())
+
+#------------------------------------------------
+# Constants for Patch/PatchSet types
+
+DIFF = PLAIN = "plain"
+GIT = "git"
+HG = MERCURIAL = "mercurial"
+SVN = SUBVERSION = "svn"
+# mixed type is only actual when PatchSet contains
+# Patches of different type
+MIXED = MIXED = "mixed"
+
+
+#------------------------------------------------
+# Helpers (these could come with Python stdlib)
+
+# x...() function are used to work with paths in
+# cross-platform manner - all paths use forward
+# slashes even on Windows.
+
+def xisabs(filename):
+ """ Cross-platform version of `os.path.isabs()`
+ Returns True if `filename` is absolute on
+ Linux, OS X or Windows.
+ """
+ if filename.startswith('/'): # Linux/Unix
+ return True
+ elif filename.startswith('\\'): # Windows
+ return True
+ elif re.match(r'\w:[\\/]', filename): # Windows
+ return True
+ return False
+
+def xnormpath(path):
+ """ Cross-platform version of os.path.normpath """
+ return os.path.normpath(path).replace(os.sep, '/')
+
+def xstrip(filename):
+ """ Make relative path out of absolute by stripping
+ prefixes used on Linux, OS X and Windows.
+
+ This function is critical for security.
+ """
+ while xisabs(filename):
+ # strip windows drive with all slashes
+ if re.match(r'\w:[\\/]', filename):
+ filename = re.sub(r'^\w+:[\\/]+', '', filename)
+ # strip all slashes
+ elif re.match(r'[\\/]', filename):
+ filename = re.sub(r'^[\\/]+', '', filename)
+ return filename
+
+#-----------------------------------------------
+# Main API functions
+
+def fromfile(filename):
+ """ Parse patch file. If successful, returns
+ PatchSet() object. Otherwise returns False.
+ """
+ patchset = PatchSet()
+ debug("reading %s" % filename)
+ fp = open(filename, "rb")
+ res = patchset.parse(fp)
+ fp.close()
+ if res == True:
+ return patchset
+ return False
+
+
+def fromstring(s):
+ """ Parse text string and return PatchSet()
+ object (or False if parsing fails)
+ """
+ ps = PatchSet( StringIO(s) )
+ if ps.errors == 0:
+ return ps
+ return False
+
+
+def fromurl(url):
+ """ Parse patch from an URL, return False
+ if an error occured. Note that this also
+ can throw urlopen() exceptions.
+ """
+ ps = PatchSet( urllib2.urlopen(url) )
+ if ps.errors == 0:
+ return ps
+ return False
+
+
+# --- Utility functions ---
+# [ ] reuse more universal pathsplit()
+def pathstrip(path, n):
+ """ Strip n leading components from the given path """
+ pathlist = [path]
+ while os.path.dirname(pathlist[0]) != '':
+ pathlist[0:1] = os.path.split(pathlist[0])
+ return '/'.join(pathlist[n:])
+# --- /Utility function ---
+
+
+class Hunk(object):
+ """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+ def __init__(self):
+ self.startsrc=None #: line count starts with 1
+ self.linessrc=None
+ self.starttgt=None
+ self.linestgt=None
+ self.invalid=False
+ self.hasplus=False # True if any "+" lines in hunk
+ self.hasminus=False # True if any "-" lines in hunk
+ self.text=[]
+
+ def originalText(self):
+
+ return("@@ -" + str(self.startsrc) +
+ "," + str(self.linessrc) +
+ " +" + str(self.starttgt) +
+ "," + str(self.linestgt) +
+ "\n" +
+ self.printableText())
+
+ def printableText(self):
+ """Reformat text into printable text"""
+
+ # yeah, there must be a better way to do this.
+ printable = ""
+ for line in self.text:
+ printable += line
+
+ return printable
+
+
+
+# def apply(self, estream):
+# """ write hunk data into enumerable stream
+# return strings one by one until hunk is
+# over
+#
+# enumerable stream are tuples (lineno, line)
+# where lineno starts with 0
+# """
+# pass
+
+
+class Patch(object):
+ """ Patch for a single file """
+ def __init__(self):
+ self.source = None
+ self.target = None
+ self.hunks = []
+ self.hunkends = []
+ self.header = []
+
+ self.type = None
+
+
+class PatchSet(object):
+
+ def __init__(self, stream=None):
+ # --- API accessible fields ---
+
+ # name of the PatchSet (filename or ...)
+ self.name = None
+ # patch set type - one of constants
+ self.type = None
+
+ # list of Patch objects
+ self.items = []
+
+ self.errors = 0 # fatal parsing errors
+ self.warnings = 0 # non-critical warnings
+ # --- /API ---
+
+ if stream:
+ self.parse(stream)
+
+ def __len__(self):
+ return len(self.items)
+
+ def parse(self, stream):
+ """ parse unified diff
+ return True on success
+ """
+ lineends = dict(lf=0, crlf=0, cr=0)
+ nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
+
+ p = None
+ hunk = None
+ # hunkactual variable is used to calculate hunk lines for comparison
+ hunkactual = dict(linessrc=None, linestgt=None)
+
+
+ class wrapumerate(enumerate):
+ """Enumerate wrapper that uses boolean end of stream status instead of
+ StopIteration exception, and properties to access line information.
+ """
+
+ def __init__(self, *args, **kwargs):
+ # we don't call parent, it is magically created by __new__ method
+
+ self._exhausted = False
+ self._lineno = False # after end of stream equal to the num of lines
+ self._line = False # will be reset to False after end of stream
+
+ def next(self):
+ """Try to read the next line and return True if it is available,
+ False if end of stream is reached."""
+ if self._exhausted:
+ return False
+
+ try:
+ self._lineno, self._line = super(wrapumerate, self).next()
+ except StopIteration:
+ self._exhausted = True
+ self._line = False
+ return False
+ return True
+
+ @property
+ def is_empty(self):
+ return self._exhausted
+
+ @property
+ def line(self):
+ return self._line
+
+ @property
+ def lineno(self):
+ return self._lineno
+
+ # define states (possible file regions) that direct parse flow
+ headscan = True # start with scanning header
+ filenames = False # lines starting with --- and +++
+
+ hunkhead = False # @@ -R +R @@ sequence
+ hunkbody = False #
+ hunkskip = False # skipping invalid hunk mode
+
+ hunkparsed = False # state after successfully parsed hunk
+
+ # regexp to match start of hunk, used groups - 1,3,4,6
+ re_hunk_start = re.compile("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?")
+
+ self.errors = 0
+ # temp buffers for header and filenames info
+ header = []
+ srcname = None
+ tgtname = None
+
+ # start of main cycle
+ # each parsing block already has line available in fe.line
+ fe = wrapumerate(stream)
+ while fe.next():
+
+ # -- deciders: these only switch state to decide who should process
+ # -- line fetched at the start of this cycle
+ if hunkparsed:
+ hunkparsed = False
+ if re_hunk_start.match(fe.line):
+ hunkhead = True
+ elif fe.line.startswith("--- "):
+ filenames = True
+ else:
+ headscan = True
+ # -- ------------------------------------
+
+ # read out header
+ if headscan:
+ while not fe.is_empty and not fe.line.startswith("--- "):
+ header.append(fe.line)
+ fe.next()
+ if fe.is_empty:
+ if p == None:
+ debug("no patch data found") # error is shown later
+ self.errors += 1
+ else:
+ info("%d unparsed bytes left at the end of stream" % len(''.join(header)))
+ self.warnings += 1
+ # TODO check for \No new line at the end..
+ # TODO test for unparsed bytes
+ # otherwise error += 1
+ # this is actually a loop exit
+ continue
+
+ headscan = False
+ # switch to filenames state
+ filenames = True
+
+ line = fe.line
+ lineno = fe.lineno
+
+
+ # hunkskip and hunkbody code skipped until definition of hunkhead is parsed
+ if hunkbody:
+ # [x] treat empty lines inside hunks as containing single space
+ # (this happens when diff is saved by copy/pasting to editor
+ # that strips trailing whitespace)
+ if line.strip("\r\n") == "":
+ debug("expanding empty line in a middle of hunk body")
+ self.warnings += 1
+ line = ' ' + line
+
+ # process line first
+ if re.match(r"^[- \+\\]", line):
+ # gather stats about line endings
+ if line.endswith("\r\n"):
+ p.hunkends["crlf"] += 1
+ elif line.endswith("\n"):
+ p.hunkends["lf"] += 1
+ elif line.endswith("\r"):
+ p.hunkends["cr"] += 1
+
+ if line.startswith("-"):
+ hunkactual["linessrc"] += 1
+ hunk.hasminus = True
+ elif line.startswith("+"):
+ hunkactual["linestgt"] += 1
+ hunk.hasplus = True
+ elif not line.startswith("\\"):
+ hunkactual["linessrc"] += 1
+ hunkactual["linestgt"] += 1
+ hunk.text.append(line)
+ # todo: handle \ No newline cases
+ else:
+ warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target))
+ # add hunk status node
+ hunk.invalid = True
+ p.hunks.append(hunk)
+ self.errors += 1
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # check exit conditions
+ if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt:
+ warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target))
+ # add hunk status node
+ hunk.invalid = True
+ p.hunks.append(hunk)
+ self.errors += 1
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+ elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]:
+ # hunk parsed successfully
+ p.hunks.append(hunk)
+ # switch to hunkparsed state
+ hunkbody = False
+ hunkparsed = True
+
+ # detect mixed window/unix line ends
+ ends = p.hunkends
+ if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
+ warning("inconsistent line ends in patch hunks for %s" % p.source)
+ self.warnings += 1
+ if debugmode:
+ debuglines = dict(ends)
+ debuglines.update(file=p.target, hunk=nexthunkno)
+ debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+ # fetch next line
+ continue
+
+ if hunkskip:
+ if re_hunk_start.match(line):
+ # switch to hunkhead state
+ hunkskip = False
+ hunkhead = True
+ elif line.startswith("--- "):
+ # switch to filenames state
+ hunkskip = False
+ filenames = True
+ if debugmode and len(self.items) > 0:
+ debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+ if filenames:
+ if line.startswith("--- "):
+ if srcname != None:
+ # XXX testcase
+ warning("skipping false patch for %s" % srcname)
+ srcname = None
+ # XXX header += srcname
+ # double source filename line is encountered
+ # attempt to restart from this second line
+ re_filename = "^--- ([^\t]+)"
+ match = re.match(re_filename, line)
+ # todo: support spaces in filenames
+ if match:
+ srcname = match.group(1).strip()
+ else:
+ warning("skipping invalid filename at line %d" % lineno)
+ self.errors += 1
+ # XXX p.header += line
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ elif not line.startswith("+++ "):
+ if srcname != None:
+ warning("skipping invalid patch with no target for %s" % srcname)
+ self.errors += 1
+ srcname = None
+ # XXX header += srcname
+ # XXX header += line
+ else:
+ # this should be unreachable
+ warning("skipping invalid target patch")
+ filenames = False
+ headscan = True
+ else:
+ if tgtname != None:
+ # XXX seems to be a dead branch
+ warning("skipping invalid patch - double target at line %d" % lineno)
+ self.errors += 1
+ srcname = None
+ tgtname = None
+ # XXX header += srcname
+ # XXX header += tgtname
+ # XXX header += line
+ # double target filename line is encountered
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ else:
+ re_filename = "^\+\+\+ ([^\t]+)"
+ match = re.match(re_filename, line)
+ if not match:
+ warning("skipping invalid patch - no target filename at line %d" % lineno)
+ self.errors += 1
+ srcname = None
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ else:
+ if p: # for the first run p is None
+ self.items.append(p)
+ p = Patch()
+ p.source = srcname
+ srcname = None
+ p.target = match.group(1).strip()
+ p.header = header
+ header = []
+ # switch to hunkhead state
+ filenames = False
+ hunkhead = True
+ nexthunkno = 0
+ p.hunkends = lineends.copy()
+ continue
+
+ if hunkhead:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if not match:
+ if not p.hunks:
+ warning("skipping invalid patch with no hunks for file %s" % p.source)
+ self.errors += 1
+ # XXX review switch
+ # switch to headscan state
+ hunkhead = False
+ headscan = True
+ continue
+ else:
+ # TODO review condition case
+ # switch to headscan state
+ hunkhead = False
+ headscan = True
+ else:
+ hunk = Hunk()
+ hunk.startsrc = int(match.group(1))
+ hunk.linessrc = 1
+ if match.group(3): hunk.linessrc = int(match.group(3))
+ hunk.starttgt = int(match.group(4))
+ hunk.linestgt = 1
+ if match.group(6): hunk.linestgt = int(match.group(6))
+ hunk.invalid = False
+ hunk.text = []
+
+ hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+ # switch to hunkbody state
+ hunkhead = False
+ hunkbody = True
+ nexthunkno += 1
+ continue
+
+ # /while fe.next()
+
+ if p:
+ self.items.append(p)
+
+ if not hunkparsed:
+ if hunkskip:
+ warning("warning: finished with errors, some hunks may be invalid")
+ elif headscan:
+ if len(self.items) == 0:
+ warning("error: no patch data found!")
+ return False
+ else: # extra data at the end of file
+ pass
+ else:
+ warning("error: patch stream is incomplete!")
+ self.errors += 1
+ if len(self.items) == 0:
+ return False
+
+ if debugmode and len(self.items) > 0:
+ debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+ # XXX fix total hunks calculation
+ debug("total files: %d total hunks: %d" % (len(self.items),
+ sum(len(p.hunks) for p in self.items)))
+
+ # ---- detect patch and patchset types ----
+ for idx, p in enumerate(self.items):
+ self.items[idx].type = self._detect_type(p)
+
+ types = set([p.type for p in self.items])
+ if len(types) > 1:
+ self.type = MIXED
+ else:
+ self.type = types.pop()
+ # --------
+
+ self._normalize_filenames()
+
+ return (self.errors == 0)
+
+ def _detect_type(self, p):
+ """ detect and return type for the specified Patch object
+ analyzes header and filenames info
+
+ NOTE: must be run before filenames are normalized
+ """
+
+ # check for SVN
+ # - header starts with Index:
+ # - next line is ===... delimiter
+ # - filename is followed by revision number
+ # TODO add SVN revision
+ if (len(p.header) > 1 and p.header[-2].startswith("Index: ")
+ and p.header[-1].startswith("="*67)):
+ return SVN
+
+ # common checks for both HG and GIT
+ DVCS = ((p.source.startswith('a/') or p.source == '/dev/null')
+ and (p.target.startswith('b/') or p.target == '/dev/null'))
+
+ # GIT type check
+ # - header[-2] is like "diff --git a/oldname b/newname"
+ # - header[-1] is like "index <hash>..<hash><mode>"
+ # TODO add git rename diffs and add/remove diffs
+ # add git diff with spaced filename
+ # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html
+
+ # detect the start of diff header - there might be some comments before
+ if len(p.header) > 1:
+ for idx in reversed(range(len(p.header))):
+ if p.header[idx].startswith("diff --git"):
+ break
+ if re.match(r'diff --git a/[\w/.]+ b/[\w/.]+', p.header[idx]):
+ if (idx+1 < len(p.header)
+ and re.match(r'index \w{7}..\w{7} \d{6}', p.header[idx+1])):
+ if DVCS:
+ return GIT
+
+ # HG check
+ #
+ # - for plain HG format header is like "diff -r b2d9961ff1f5 filename"
+ # - for Git-style HG patches it is "diff --git a/oldname b/newname"
+ # - filename starts with a/, b/ or is equal to /dev/null
+ # - exported changesets also contain the header
+ # # HG changeset patch
+ # # User name(a)example.com
+ # ...
+ # TODO add MQ
+ # TODO add revision info
+ if len(p.header) > 0:
+ if DVCS and re.match(r'diff -r \w{12} .*', p.header[-1]):
+ return HG
+ if DVCS and p.header[-1].startswith('diff --git a/'):
+ if len(p.header) == 1: # native Git patch header len is 2
+ return HG
+ elif p.header[0].startswith('# HG changeset patch'):
+ return HG
+
+ return PLAIN
+
+
+ def _normalize_filenames(self):
+ """ sanitize filenames, normalizing paths, i.e.:
+ 1. strip a/ and b/ prefixes from GIT and HG style patches
+ 2. remove all references to parent directories (with warning)
+ 3. translate any absolute paths to relative (with warning)
+
+ [x] always use forward slashes to be crossplatform
+ (diff/patch were born as a unix utility after all)
+
+ return None
+ """
+ for i,p in enumerate(self.items):
+ if p.type in (HG, GIT):
+ # TODO: figure out how to deal with /dev/null entries
+ debug("stripping a/ and b/ prefixes")
+ if p.source != '/dev/null':
+ if not p.source.startswith("a/"):
+ warning("invalid source filename")
+ else:
+ p.source = p.source[2:]
+ if p.target != '/dev/null':
+ if not p.target.startswith("b/"):
+ warning("invalid target filename")
+ else:
+ p.target = p.target[2:]
+
+ p.source = xnormpath(p.source)
+ p.target = xnormpath(p.target)
+
+ sep = '/' # sep value can be hardcoded, but it looks nice this way
+
+ # references to parent are not allowed
+ if p.source.startswith(".." + sep):
+ warning("error: stripping parent path for source file patch no.%d" % (i+1))
+ self.warnings += 1
+ while p.source.startswith(".." + sep):
+ p.source = p.source.partition(sep)[2]
+ if p.target.startswith(".." + sep):
+ warning("error: stripping parent path for target file patch no.%d" % (i+1))
+ self.warnings += 1
+ while p.target.startswith(".." + sep):
+ p.target = p.target.partition(sep)[2]
+ # absolute paths are not allowed
+ if xisabs(p.source) or xisabs(p.target):
+ warning("error: absolute paths are not allowed - file no.%d" % (i+1))
+ self.warnings += 1
+ if xisabs(p.source):
+ warning("stripping absolute path from source name '%s'" % p.source)
+ p.source = xstrip(p.source)
+ if xisabs(p.target):
+ warning("stripping absolute path from target name '%s'" % p.target)
+ p.target = xstrip(p.target)
+
+ self.items[i].source = p.source
+ self.items[i].target = p.target
+
+
+ def diffstat(self):
+ """ calculate diffstat and return as a string
+ Notes:
+ - original diffstat ouputs target filename
+ - single + or - shouldn't escape histogram
+ """
+ names = []
+ insert = []
+ delete = []
+ namelen = 0
+ maxdiff = 0 # max number of changes for single file
+ # (for histogram width calculation)
+ for patch in self.items:
+ i,d = 0,0
+ for hunk in patch.hunks:
+ for line in hunk.text:
+ if line.startswith('+'):
+ i += 1
+ elif line.startswith('-'):
+ d += 1
+ names.append(patch.target)
+ insert.append(i)
+ delete.append(d)
+ namelen = max(namelen, len(patch.target))
+ maxdiff = max(maxdiff, i+d)
+ output = ''
+ statlen = len(str(maxdiff)) # stats column width
+ for i,n in enumerate(names):
+ # %-19s | %-4d %s
+ format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n"
+
+ hist = ''
+ # -- calculating histogram --
+ width = len(format % ('', '', ''))
+ histwidth = max(2, 80 - width)
+ if maxdiff < histwidth:
+ hist = "+"*insert[i] + "-"*delete[i]
+ else:
+ iratio = (float(insert[i]) / maxdiff) * histwidth
+ dratio = (float(delete[i]) / maxdiff) * histwidth
+
+ # make sure every entry gets at least one + or -
+ iwidth = 1 if 0 < iratio < 1 else int(iratio)
+ dwidth = 1 if 0 < dratio < 1 else int(dratio)
+ #print iratio, dratio, iwidth, dwidth, histwidth
+ hist = "+"*int(iwidth) + "-"*int(dwidth)
+ # -- /calculating +- histogram --
+ output += (format % (names[i], insert[i] + delete[i], hist))
+
+ output += (" %d files changed, %d insertions(+), %d deletions(-)"
+ % (len(names), sum(insert), sum(delete)))
+ return output
+
+
+ def apply(self, strip=0):
+ """ apply parsed patch
+ return True on success
+ """
+
+ total = len(self.items)
+ errors = 0
+ if strip:
+ # [ ] test strip level exceeds nesting level
+ # [ ] test the same only for selected files
+ # [ ] test if files end up being on the same level
+ try:
+ strip = int(strip)
+ except ValueError:
+ errors += 1
+ warning("error: strip parameter '%s' must be an integer" % strip)
+ strip = 0
+
+ #for fileno, filename in enumerate(self.source):
+ for i,p in enumerate(self.items):
+ f2patch = p.source
+ if strip:
+ debug("stripping %s leading component from '%s'" % (strip, f2patch))
+ f2patch = pathstrip(f2patch, strip)
+ if not exists(f2patch):
+ f2patch = p.target
+ if strip:
+ debug("stripping %s leading component from '%s'" % (strip, f2patch))
+ f2patch = pathstrip(f2patch, strip)
+ if not exists(f2patch):
+ warning("source/target file does not exist\n--- %s\n+++ %s" % (p.source, f2patch))
+ errors += 1
+ continue
+ if not isfile(f2patch):
+ warning("not a file - %s" % f2patch)
+ errors += 1
+ continue
+ filename = f2patch
+
+ debug("processing %d/%d:\t %s" % (i+1, total, filename))
+
+ # validate before patching
+ f2fp = open(filename)
+ hunkno = 0
+ hunk = p.hunks[hunkno]
+ hunkfind = []
+ hunkreplace = []
+ validhunks = 0
+ canpatch = False
+ for lineno, line in enumerate(f2fp):
+ if lineno+1 < hunk.startsrc:
+ continue
+ elif lineno+1 == hunk.startsrc:
+ hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+ hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
+ #pprint(hunkreplace)
+ hunklineno = 0
+
+ # todo \ No newline at end of file
+
+ # check hunks in source file
+ if lineno+1 < hunk.startsrc+len(hunkfind)-1:
+ if line.rstrip("\r\n") == hunkfind[hunklineno]:
+ hunklineno+=1
+ else:
+ info("file %d/%d:\t %s" % (i+1, total, filename))
+ info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno))
+ info(" expected: %s" % hunkfind[hunklineno])
+ info(" actual : %s" % line.rstrip("\r\n"))
+ # not counting this as error, because file may already be patched.
+ # check if file is already patched is done after the number of
+ # invalid hunks if found
+ # TODO: check hunks against source/target file in one pass
+ # API - check(stream, srchunks, tgthunks)
+ # return tuple (srcerrs, tgterrs)
+
+ # continue to check other hunks for completeness
+ hunkno += 1
+ if hunkno < len(p.hunks):
+ hunk = p.hunks[hunkno]
+ continue
+ else:
+ break
+
+ # check if processed line is the last line
+ if lineno+1 == hunk.startsrc+len(hunkfind)-1:
+ debug(" hunk no.%d for file %s -- is ready to be patched" % (hunkno+1, filename))
+ hunkno+=1
+ validhunks+=1
+ if hunkno < len(p.hunks):
+ hunk = p.hunks[hunkno]
+ else:
+ if validhunks == len(p.hunks):
+ # patch file
+ canpatch = True
+ break
+ else:
+ if hunkno < len(p.hunks):
+ warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
+ errors += 1
+
+ f2fp.close()
+
+ if validhunks < len(p.hunks):
+ if self._match_file_hunks(filename, p.hunks):
+ warning("already patched %s" % filename)
+ else:
+ warning("source file is different - %s" % filename)
+ errors += 1
+ if canpatch:
+ backupname = filename+".orig"
+ if exists(backupname):
+ warning("can't backup original file to %s - aborting" % backupname)
+ else:
+ import shutil
+ shutil.move(filename, backupname)
+ if self.write_hunks(backupname, filename, p.hunks):
+ info("successfully patched %d/%d:\t %s" % (i+1, total, filename))
+ os.unlink(backupname)
+ else:
+ errors += 1
+ warning("error patching file %s" % filename)
+ shutil.copy(filename, filename+".invalid")
+ warning("invalid version is saved to %s" % filename+".invalid")
+ # todo: proper rejects
+ shutil.move(backupname, filename)
+
+ # todo: check for premature eof
+ return (errors == 0)
+
+
+ def can_patch(self, filename):
+ """ Check if specified filename can be patched. Returns None if file can
+ not be found among source filenames. False if patch can not be applied
+ clearly. True otherwise.
+
+ :returns: True, False or None
+ """
+ filename = abspath(filename)
+ for p in self.items:
+ if filename == abspath(p.source):
+ return self._match_file_hunks(filename, p.hunks)
+ return None
+
+
+ def _match_file_hunks(self, filepath, hunks):
+ matched = True
+ fp = open(abspath(filepath))
+
+ class NoMatch(Exception):
+ pass
+
+ lineno = 1
+ line = fp.readline()
+ hno = None
+ try:
+ for hno, h in enumerate(hunks):
+ # skip to first line of the hunk
+ while lineno < h.starttgt:
+ if not len(line): # eof
+ debug("check failed - premature eof before hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+ for hline in h.text:
+ if hline.startswith("-"):
+ continue
+ if not len(line):
+ debug("check failed - premature eof on hunk: %d" % (hno+1))
+ # todo: \ No newline at the end of file
+ raise NoMatch
+ if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+ debug("file is not patched - failed hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+
+ except NoMatch:
+ matched = False
+ # todo: display failed hunk, i.e. expected/found
+
+ fp.close()
+ return matched
+
+
+ def patch_stream(self, instream, hunks):
+ """ Generator that yields stream patched with hunks iterable
+
+ Converts lineends in hunk lines to the best suitable format
+ autodetected from input
+ """
+
+ # todo: At the moment substituted lineends may not be the same
+ # at the start and at the end of patching. Also issue a
+ # warning/throw about mixed lineends (is it really needed?)
+
+ hunks = iter(hunks)
+
+ srclineno = 1
+
+ lineends = {'\n':0, '\r\n':0, '\r':0}
+ def get_line():
+ """
+ local utility function - return line from source stream
+ collecting line end statistics on the way
+ """
+ line = instream.readline()
+ # 'U' mode works only with text files
+ if line.endswith("\r\n"):
+ lineends["\r\n"] += 1
+ elif line.endswith("\n"):
+ lineends["\n"] += 1
+ elif line.endswith("\r"):
+ lineends["\r"] += 1
+ return line
+
+ for hno, h in enumerate(hunks):
+ debug("hunk %d" % (hno+1))
+ if h.hasminus:
+ warning("Change removes/replaces some text; INVESTIGATE AND APPLY (OR NOT) MANUALLY")
+ warning("Change:")
+ changeText = h.originalText()
+ if len(changeText) > 1000:
+ changeText = changeText[0:999] + "...\n"
+ warning(changeText)
+ else:
+ # skip to line just before hunk starts
+ while srclineno < h.startsrc:
+ yield get_line()
+ srclineno += 1
+
+ for hline in h.text:
+ # todo: check \ No newline at the end of file
+ if hline.startswith("-") or hline.startswith("\\"):
+ get_line()
+ srclineno += 1
+ continue
+ else:
+ if not hline.startswith("+"):
+ get_line()
+ srclineno += 1
+ line2write = hline[1:]
+ # detect if line ends are consistent in source file
+ if sum([bool(lineends[x]) for x in lineends]) == 1:
+ newline = [x for x in lineends if lineends[x] != 0][0]
+ yield line2write.rstrip("\r\n")+newline
+ else: # newlines are mixed
+ yield line2write
+
+ for line in instream:
+ yield line
+
+
+ def write_hunks(self, srcname, tgtname, hunks):
+ src = open(srcname, "rb")
+ tgt = open(tgtname, "wb")
+
+ debug("processing target file %s" % tgtname)
+
+ tgt.writelines(self.patch_stream(src, hunks))
+
+ tgt.close()
+ src.close()
+ # [ ] TODO: add test for permission copy
+ shutil.copymode(srcname, tgtname)
+ return True
+
+
+
+if __name__ == "__main__":
+ from optparse import OptionParser
+ from os.path import exists
+ import sys
+
+ opt = OptionParser(usage="1. %prog [options] unified.diff\n"
+ " 2. %prog [options] http://host/patch\n"
+ " 3. %prog [options] -- < unified.diff",
+ version="python-patch %s" % __version__)
+ opt.add_option("-q", "--quiet", action="store_const", dest="verbosity",
+ const=0, help="print only warnings and errors", default=1)
+ opt.add_option("-v", "--verbose", action="store_const", dest="verbosity",
+ const=2, help="be verbose")
+ opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+ opt.add_option("--diffstat", action="store_true", dest="diffstat",
+ help="print diffstat and exit")
+ opt.add_option("-p", "--strip", type="int", metavar='N', default=0,
+ help="strip N path components from filenames")
+ (options, args) = opt.parse_args()
+
+ if not args and sys.argv[-1:] != ['--']:
+ opt.print_version()
+ opt.print_help()
+ sys.exit()
+ readstdin = (sys.argv[-1:] == ['--'] and not args)
+
+ debugmode = options.debugmode
+
+ verbosity_levels = {0:logging.WARNING, 1:logging.INFO, 2:logging.DEBUG}
+ loglevel = verbosity_levels[options.verbosity]
+ logformat = "%(message)s"
+ if debugmode:
+ loglevel = logging.DEBUG
+ logformat = "%(levelname)8s %(message)s"
+ logger.setLevel(loglevel)
+ loghandler = logging.StreamHandler()
+ loghandler.setFormatter(logging.Formatter(logformat))
+ logger.addHandler(loghandler)
+
+
+ if readstdin:
+ patch = PatchSet(sys.stdin)
+ else:
+ patchfile = args[0]
+ urltest = patchfile.split(':')[0]
+ if (':' in patchfile and urltest.isalpha()
+ and len(urltest) > 1): # one char before : is a windows drive letter
+ patch = fromurl(patchfile)
+ else:
+ if not exists(patchfile) or not isfile(patchfile):
+ sys.exit("patch file does not exist - %s" % patchfile)
+ patch = fromfile(patchfile)
+
+ if options.diffstat:
+ print patch.diffstat()
+ sys.exit(0)
+
+ #pprint(patch)
+ patch.apply(options.strip) or sys.exit(-1)
+
+ # todo: document and test line ends handling logic - patch.py detects proper line-endings
+ # for inserted hunks and issues a warning if patched file has incosistent line ends
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/conf.py
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -274,6 +274,6 @@
return Mock()
# adding pbs_python, DRMAA_python, markupsafe, and drmaa here had no effect.
-MOCK_MODULES = ['tables', 'decorator']
+MOCK_MODULES = ['tables', 'decorator', 'numpy']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/index.rst
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,12 +1,41 @@
Galaxy Code Documentation
*************************
-Galaxy is an open, web-based platform for accessible, reproducible, and
+Galaxy_ is an open, web-based platform for accessible, reproducible, and
transparent computational biomedical research.
-- Accessible: Users without programming experience can easily specify parameters and run tools and workflows.
-- Reproducible: Galaxy captures information so that any user can repeat and understand a complete computational analysis.
-- Transparent: Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+- *Accessible:* Users without programming experience can easily specify parameters and run tools and workflows.
+- *Reproducible:* Galaxy captures information so that any user can repeat and understand a complete computational analysis.
+- *Transparent:* Users share and publish analyses via the web and create Pages, interactive, web-based documents that describe a complete analysis.
+
+Two copies of the Galaxy code doumentation are published by the Galaxy Project
+
+- Galaxy-Dist_: This describes the code in the `most recent official release`_ of Galaxy.
+- Galaxy-Central_: Describes the `current code in the development branch`_ of Galaxy. This is the latest checkin, bleeding edge version of the code. The documentation should never be more than an hour behind the code.
+
+Both copies are hosted at ReadTheDocs_, a publicly supported web site for hosting project documentation.
+
+If you have your own copy of the Galaxy source code, you can also generate your own version of this documentation:
+
+::
+
+ $ cd doc
+ $ make html
+
+The generated documentation will be in ``doc/build/html/`` and can be viewed with a web browser. Note that you will need to install Sphinx and a fair number of module dependencies before this will produce output.
+
+.. _Galaxy: http://galaxyproject.org/
+.. _Galaxy-Dist: https://galaxy-dist.readthedocs.org/
+.. _most recent official release: https://bitbucket.org/galaxy/galaxy-dist
+.. _Galaxy-Central: https://galaxy-central.readthedocs.org/
+.. _current code in the development branch: https://bitbucket.org/galaxy/galaxy-central
+.. _ReadTheDocs: https://readthedocs.org/
+
+
+For more on the Galaxy Project, please visit the `project home page`_.
+
+.. _project home page: http://galaxyproject.org/
+
Contents
========
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.jobs.rst
--- a/doc/source/lib/galaxy.jobs.rst
+++ b/doc/source/lib/galaxy.jobs.rst
@@ -48,6 +48,7 @@
galaxy.jobs.actions
galaxy.jobs.deferred
+ galaxy.jobs.rules
galaxy.jobs.runners
galaxy.jobs.splitters
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.jobs.runners.rst
--- a/doc/source/lib/galaxy.jobs.runners.rst
+++ b/doc/source/lib/galaxy.jobs.runners.rst
@@ -57,14 +57,6 @@
:undoc-members:
:show-inheritance:
-:mod:`sge` Module
------------------
-
-.. automodule:: galaxy.jobs.runners.sge
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`tasks` Module
-------------------
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.tool_shed.rst
--- a/doc/source/lib/galaxy.tool_shed.rst
+++ b/doc/source/lib/galaxy.tool_shed.rst
@@ -9,6 +9,14 @@
:undoc-members:
:show-inheritance:
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: galaxy.tool_shed.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`encoding_util` Module
---------------------------
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.util.rst
--- a/doc/source/lib/galaxy.util.rst
+++ b/doc/source/lib/galaxy.util.rst
@@ -25,6 +25,14 @@
:undoc-members:
:show-inheritance:
+:mod:`debugging` Module
+-----------------------
+
+.. automodule:: galaxy.util.debugging
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`expressions` Module
-------------------------
@@ -113,6 +121,14 @@
:undoc-members:
:show-inheritance:
+:mod:`shed_util_common` Module
+------------------------------
+
+.. automodule:: galaxy.util.shed_util_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`streamball` Module
------------------------
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.webapps.community.rst
--- a/doc/source/lib/galaxy.webapps.community.rst
+++ b/doc/source/lib/galaxy.webapps.community.rst
@@ -42,4 +42,5 @@
galaxy.webapps.community.framework
galaxy.webapps.community.model
galaxy.webapps.community.security
+ galaxy.webapps.community.util
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.webapps.community.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.util.rst
@@ -0,0 +1,27 @@
+util Package
+============
+
+:mod:`container_util` Module
+----------------------------
+
+.. automodule:: galaxy.webapps.community.util.container_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hgweb_config` Module
+--------------------------
+
+.. automodule:: galaxy.webapps.community.util.hgweb_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`shed_statistics` Module
+-----------------------------
+
+.. automodule:: galaxy.webapps.community.util.shed_statistics
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 doc/source/lib/galaxy.webapps.galaxy.api.rst
--- a/doc/source/lib/galaxy.webapps.galaxy.api.rst
+++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst
@@ -293,6 +293,14 @@
:undoc-members:
:show-inheritance:
+:mod:`item_tags` Module
+-----------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.item_tags
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`libraries` Module
-----------------------
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -29,6 +29,7 @@
simplejson = 2.1.1
threadframe = 0.2
guppy = 0.1.8
+; msgpack_python = 0.2.4
[eggs:noplatform]
amqplib = 0.6.1
@@ -65,6 +66,7 @@
Babel = 0.9.4
wchartype = 0.1
Whoosh = 0.3.18
+; fluent_logger = 0.3.3
; extra version information
[tags]
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -23,11 +23,13 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.name = 'galaxy'
self.new_installation = False
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
self.config.check()
config.configure_logging( self.config )
+ self.configure_fluent_log()
# Determine the database url
if self.config.database_connection:
db_url = self.config.database_connection
@@ -53,7 +55,8 @@
db_url,
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
- object_store = self.object_store )
+ object_store = self.object_store,
+ trace_logger=self.trace_logger )
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
@@ -148,6 +151,7 @@
self.job_stop_queue = self.job_manager.job_stop_queue
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
+
def shutdown( self ):
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -160,3 +164,10 @@
os.unlink( self.datatypes_registry.integrated_datatypes_configs )
except:
pass
+
+ def configure_fluent_log( self ):
+ if self.config.fluent_log:
+ from galaxy.util.log.fluent_log import FluentTraceLogger
+ self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ else:
+ self.trace_logger = None
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -261,6 +261,10 @@
self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
# This is for testing new library browsing capabilities.
self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ # Logging with fluentd
+ self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
+ self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
+ self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
def __read_tool_job_config( self, global_conf_parser, section, key ):
try:
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -114,7 +114,8 @@
self.datatype_elems.remove( in_memory_elem )
else:
# Keep an in-memory list of datatype elems to enable persistence.
- self.datatype_elems.append( elem )
+ if extension not in self.datatypes_by_extension:
+ self.datatype_elems.append( elem )
if extension and extension in self.datatypes_by_extension and deactivate:
# We are deactivating an installed tool shed repository, so eliminate the datatype from the registry.
# TODO: Handle deactivating datatype converters, etc before removing from self.datatypes_by_extension.
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -267,20 +267,20 @@
def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None):
if chunk:
return self.get_chunk(trans, dataset, chunk)
+ elif to_ext or not preview:
+ return self._serve_raw(trans, dataset, to_ext)
elif dataset.metadata.columns > 50:
#Fancy tabular display is only suitable for datasets without an incredibly large number of columns.
#We should add a new datatype 'matrix', with it's own draw method, suitable for this kind of data.
#For now, default to the old behavior, ugly as it is. Remove this after adding 'matrix'.
max_peek_size = 1000000 # 1 MB
- if not preview or os.stat( dataset.file_name ).st_size < max_peek_size:
+ if os.stat( dataset.file_name ).st_size < max_peek_size:
return open( dataset.file_name )
else:
trans.response.set_content_type( "text/html" )
return trans.stream_template_mako( "/dataset/large_file.mako",
truncated_data = open( dataset.file_name ).read(max_peek_size),
data = dataset)
- elif to_ext or not preview:
- return self._serve_raw(trans, dataset, to_ext)
else:
column_names = 'null'
if dataset.metadata.column_names:
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -686,7 +686,7 @@
if self.app.config.set_metadata_externally:
self.external_output_metadata.cleanup_external_metadata( self.sa_session )
galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
- galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
+ galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job()
galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app )
self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id))
except:
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -61,7 +61,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
def start( self ):
"""
@@ -353,7 +354,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobHandlerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job handler stop queue started" )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -68,7 +68,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.__monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerQueue.monitor_thread", target=self.__monitor )
+ self.monitor_thread.setDaemon( True )
# Recover jobs at startup
self.__check_jobs_at_startup()
# Start the queue
@@ -219,7 +220,8 @@
# Helper for interruptable sleep
self.sleeper = Sleeper()
self.running = True
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="JobManagerStopQueue.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
log.info( "job manager stop queue started" )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -105,13 +105,14 @@
self.monitor_queue = Queue()
self.ds = drmaa.Session()
self.ds.initialize()
- self.monitor_thread = threading.Thread( target=self.monitor )
+ self.monitor_thread = threading.Thread( name="DRMAAJobRunner.monitor_thread", target=self.monitor )
+ self.monitor_thread.setDaemon( True )
self.monitor_thread.start()
self.work_queue = Queue()
self.work_threads = []
nworkers = app.config.cluster_job_queue_workers
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "DRMAAJobRunner.work_threads-%d" % i ), target=self.run_next )
worker.start()
self.work_threads.append( worker )
log.debug( "%d workers ready" % nworkers )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -37,7 +37,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "LocalJobRunner.threads-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -229,7 +229,8 @@
nworkers = app.config.local_job_queue_workers
log.info( "starting workers" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( ( name="LwrJobRunner.thread-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -29,7 +29,8 @@
nworkers = app.config.local_task_queue_workers
log.info( "Starting tasked-job runners" )
for i in range( nworkers ):
- worker = threading.Thread( target=self.run_next )
+ worker = threading.Thread( name=( "TaskedJobRunner-%d" % i ), target=self.run_next )
+ worker.setDaemon( True )
worker.start()
self.threads.append( worker )
log.debug( "%d workers ready", nworkers )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -1335,7 +1335,9 @@
# Loop through sources until viable one is found.
for source in source_list:
msg = self.convert_dataset( trans, source )
- if msg == self.conversion_messages.PENDING:
+ # No message or PENDING means that source is viable. No
+ # message indicates conversion was done and is successful.
+ if not msg or msg == self.conversion_messages.PENDING:
data_source = source
break
@@ -3014,6 +3016,7 @@
installation_status = Bunch( NEW='New',
CLONING='Cloning',
SETTING_TOOL_VERSIONS='Setting tool versions',
+ INSTALLING_REPOSITORY_DEPENDENCIES='Installing repository dependencies',
INSTALLING_TOOL_DEPENDENCIES='Installing tool dependencies',
LOADING_PROPRIETARY_DATATYPES='Loading proprietary datatypes',
INSTALLED='Installed',
@@ -3148,20 +3151,89 @@
def can_reinstall_or_activate( self ):
return self.deleted
@property
+ def has_repository_dependencies( self ):
+ if self.metadata:
+ return 'repository_dependencies' in self.metadata
+ return False
+ @property
def includes_tools( self ):
- return self.metadata and 'tools' in self.metadata
+ if self.metadata:
+ return 'tools' in self.metadata
+ return False
@property
def includes_tool_dependencies( self ):
- return self.metadata and 'tool_dependencies' in self.metadata
+ if self.metadata:
+ return 'tool_dependencies' in self.metadata
+ return False
@property
def includes_workflows( self ):
- return self.metadata and 'workflows' in self.metadata
+ if self.metadata:
+ return 'workflows' in self.metadata
+ return False
@property
def in_error_state( self ):
return self.status == self.installation_status.ERROR
@property
def has_readme_files( self ):
- return self.metadata and 'readme_files' in self.metadata
+ if self.metadata:
+ return 'readme_files' in self.metadata
+ return False
+ @property
+ def repository_dependencies( self ):
+ required_repositories = []
+ for rrda in self.required_repositories:
+ repository_dependency = rrda.repository_dependency
+ required_repository = repository_dependency.repository
+ required_repositories.append( required_repository )
+ return required_repositories
+ @property
+ def installed_repository_dependencies( self ):
+ """Return the repository's repository dependencies that are currently installed."""
+ installed_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.INSTALLED:
+ installed_required_repositories.append( required_repository )
+ return installed_required_repositories
+ @property
+ def missing_repository_dependencies( self ):
+ """Return the repository's repository dependencies that are not currently installed, and may not ever have been installed."""
+ missing_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status not in [ self.installation_status.INSTALLED ]:
+ missing_required_repositories.append( required_repository )
+ return missing_required_repositories
+ @property
+ def repository_dependencies_being_installed( self ):
+ required_repositories_being_installed = []
+ for required_repository in self.repository_dependencies:
+ if tool_dependency.status == ToolDependency.installation_status.INSTALLING:
+ required_repositories_being_installed.append( required_repository )
+ return required_repositories_being_installed
+ @property
+ def repository_dependencies_missing_or_being_installed( self ):
+ required_repositories_missing_or_being_installed = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status in [ self.installation_status.ERROR,
+ self.installation_status.INSTALLING,
+ self.installation_status.NEVER_INSTALLED,
+ self.installation_status.UNINSTALLED ]:
+ required_repositories_missing_or_being_installed.append( required_repository )
+ return required_repositories_missing_or_being_installed
+ @property
+ def repository_dependencies_with_installation_errors( self ):
+ required_repositories_with_installation_errors = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.ERROR:
+ required_repositories_with_installation_errors.append( required_repository )
+ return required_repositories_with_installation_errors
+ @property
+ def uninstalled_repository_dependencies( self ):
+ """Return the repository's repository dependencies that have been uninstalled."""
+ uninstalled_required_repositories = []
+ for required_repository in self.repository_dependencies:
+ if required_repository.status == self.installation_status.UNINSTALLED:
+ uninstalled_required_repositories.append( required_repository )
+ return uninstalled_required_repositories
@property
def installed_tool_dependencies( self ):
"""Return the repository's tool dependencies that are currently installed."""
@@ -3211,6 +3283,15 @@
uninstalled_tool_dependencies.append( tool_dependency )
return uninstalled_tool_dependencies
+class RepositoryRepositoryDependencyAssociation( object ):
+ def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+ self.repository_dependency_id = repository_dependency_id
+
+class RepositoryDependency( object ):
+ def __init__( self, tool_shed_repository_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+
class ToolDependency( object ):
installation_status = Bunch( NEVER_INSTALLED='Never installed',
INSTALLING='Installing',
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -409,6 +409,19 @@
Column( "status", TrimmedString( 255 ) ),
Column( "error_message", TEXT ) )
+RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+ Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+RepositoryDependency.table = Table( "repository_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
ToolDependency.table = Table( "tool_dependency", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -1744,7 +1757,19 @@
tool_dependencies=relation( ToolDependency,
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
order_by=ToolDependency.table.c.name,
- backref='tool_shed_repository' ) ) )
+ backref='tool_shed_repository' ),
+ required_repositories=relation( RepositoryRepositoryDependencyAssociation,
+ primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+
+assign_mapper( context, RepositoryRepositoryDependencyAssociation, RepositoryRepositoryDependencyAssociation.table,
+ properties=dict( repository=relation( ToolShedRepository,
+ primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ),
+ repository_dependency=relation( RepositoryDependency,
+ primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == RepositoryDependency.table.c.id ) ) ) )
+
+assign_mapper( context, RepositoryDependency, RepositoryDependency.table,
+ properties=dict( repository=relation( ToolShedRepository,
+ primaryjoin=( RepositoryDependency.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ) ) )
assign_mapper( context, ToolDependency, ToolDependency.table )
@@ -1925,7 +1950,7 @@
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None ):
+def init( file_path, url, engine_options={}, create_tables=False, database_query_profiling_proxy=False, object_store=None, trace_logger=None ):
"""Connect mappings to the database"""
# Connect dataset to the file path
Dataset.file_path = file_path
@@ -1937,6 +1962,10 @@
if database_query_profiling_proxy:
import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
proxy = logging_connection_proxy.LoggingProxy()
+ # If metlog is enabled, do micrologging
+ elif trace_logger:
+ import galaxy.model.orm.logging_connection_proxy as logging_connection_proxy
+ proxy = logging_connection_proxy.TraceLoggerProxy( trace_logger )
else:
proxy = None
# Create the database engine
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
@@ -0,0 +1,58 @@
+"""
+Migration script to add the repository_dependency and repository_repository_dependency_association tables.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+import sys, logging
+from galaxy.model.custom_types import *
+from sqlalchemy.exc import *
+import datetime
+now = datetime.datetime.utcnow
+
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+
+RepositoryDependency_table = Table( "repository_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
+RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+ Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ RepositoryDependency_table.create()
+ except Exception, e:
+ log.debug( "Creating repository_dependency table failed: %s" % str( e ) )
+ try:
+ RepositoryRepositoryDependencyAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ try:
+ RepositoryRepositoryDependencyAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) )
+ try:
+ RepositoryDependency_table.drop()
+ except Exception, e:
+ log.debug( "Dropping repository_dependency table failed: %s" % str( e ) )
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/model/orm/logging_connection_proxy.py
--- a/lib/galaxy/model/orm/logging_connection_proxy.py
+++ b/lib/galaxy/model/orm/logging_connection_proxy.py
@@ -18,13 +18,31 @@
rval = []
for frame, fname, line, funcname, _, _ in inspect.stack()[2:]:
rval.append( "%s:%s@%d" % ( stripwd( fname ), funcname, line ) )
- return " > ".join( rval )
+ return rval
class LoggingProxy(ConnectionProxy):
+ """
+ Logs SQL statements using standard logging module
+ """
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
start = time.clock()
rval = execute(cursor, statement, parameters, context)
duration = time.clock() - start
log.debug( "statement: %r parameters: %r executemany: %r duration: %r stack: %r",
- statement, parameters, executemany, duration, pretty_stack() )
+ statement, parameters, executemany, duration, " > ".join( pretty_stack() ) )
return rval
+
+class TraceLoggerProxy(ConnectionProxy):
+ """
+ Logs SQL statements using a metlog client
+ """
+ def __init__( self, trace_logger ):
+ self.trace_logger = trace_logger
+ def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
+ start = time.clock()
+ rval = execute(cursor, statement, parameters, context)
+ duration = time.clock() - start
+ self.trace_logger.log( "sqlalchemy_query",
+ message="Query executed", statement=statement, parameters=parameters,
+ executemany=executemany, duration=duration )
+ return rval
\ No newline at end of file
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -3,7 +3,8 @@
"""
import os
import galaxy.util.shed_util
-from galaxy.model.orm import *
+import galaxy.util.shed_util_common
+from galaxy.model.orm import and_
from galaxy import eggs
import pkg_resources
@@ -27,7 +28,7 @@
ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
if tool_path:
- tool_shed = galaxy.util.shed_util.clean_tool_shed_url( tool_shed_repository.tool_shed )
+ tool_shed = galaxy.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
relative_path = os.path.join( tool_path,
tool_shed,
'repos',
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/tool_shed/common_util.py
--- a/lib/galaxy/tool_shed/common_util.py
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -1,7 +1,7 @@
import os, urllib2
from galaxy import util
from galaxy.util.odict import odict
-from galaxy.tool_shed.encoding_util import *
+from galaxy.tool_shed import encoding_util
REPOSITORY_OWNER = 'devteam'
@@ -36,7 +36,7 @@
print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
if tool_shed_accessible:
if text:
- tool_dependencies_dict = tool_shed_decode( text )
+ tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
diff -r 087a08c43cb5ac21b5c3f73240dbcde1355fa089 -r caaab03824478384c256fc6b5678bb39dbf8f9f2 lib/galaxy/tool_shed/encoding_util.py
--- a/lib/galaxy/tool_shed/encoding_util.py
+++ b/lib/galaxy/tool_shed/encoding_util.py
@@ -1,5 +1,5 @@
-import binascii
-from galaxy.util.hash_util import *
+import binascii, logging
+from galaxy.util.hash_util import hmac_new
from galaxy.util.json import json_fix
from galaxy import eggs
@@ -8,7 +8,10 @@
pkg_resources.require( "simplejson" )
import simplejson
+log = logging.getLogger( __name__ )
+
encoding_sep = '__esep__'
+encoding_sep2 = '__esepii__'
def tool_shed_decode( value ):
# Extract and verify hash
@@ -21,12 +24,12 @@
try:
values = simplejson.loads( value )
except Exception, e:
- log.debug( "Decoding json value from tool shed threw exception: %s" % str( e ) )
+ log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
if values is not None:
try:
return json_fix( values )
except Exception, e:
- log.debug( "Fixing decoded json value from tool shed threw exception: %s" % str( e ) )
+ log.debug( "Fixing decoded json values '%s' from tool shed threw exception: %s" % ( str( values ), str( e ) ) )
fixed_values = values
if values is None:
values = value
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Handle deactivated repository dependencies when installing or reinstalling a tool shed repository.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bcb446fd0248/
changeset: bcb446fd0248
user: greg
date: 2013-01-10 00:08:51
summary: Handle deactivated repository dependencies when installing or reinstalling a tool shed repository.
affected #: 2 files
diff -r a91d8ea3c9e2e9d6cc8cfd85c7085bf1175b4f5c -r bcb446fd02481aaba2bbe72d8298d93ff6698c99 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -23,6 +23,38 @@
log = logging.getLogger( __name__ )
+def activate_repository( trans, repository ):
+ repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ repository.deleted = False
+ repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
+ if repository.includes_tools:
+ metadata = repository.metadata
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata )
+ # Reload tools into the appropriate tool panel section.
+ tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
+ add_to_tool_panel( trans.app,
+ repository.name,
+ repository_clone_url,
+ repository.installed_changeset_revision,
+ repository_tools_tups,
+ repository.owner,
+ shed_tool_conf,
+ tool_panel_dict,
+ new_install=False )
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ if repository.includes_datatypes:
+ if tool_path:
+ repository_install_dir = os.path.abspath ( os.path.join( tool_path, relative_install_dir ) )
+ else:
+ repository_install_dir = os.path.abspath ( relative_install_dir )
+ # Activate proprietary datatypes.
+ installed_repository_dict = load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
+ if installed_repository_dict and 'converter_path' in installed_repository_dict:
+ load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
+ if installed_repository_dict and 'display_path' in installed_repository_dict:
+ load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -227,6 +259,10 @@
installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
metadata_dict = installed_tool_shed_repository.metadata
dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ elif installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ # The current tool shed repository is deactivated, so updating it's database record is not necessary - just activate it.
+ activate_repository( trans, installed_tool_shed_repository )
+ can_update = False
else:
# The tool shed repository currently being processed is already installed or is in the process of being installed, so it's record
# in the database cannot be updated.
diff -r a91d8ea3c9e2e9d6cc8cfd85c7085bf1175b4f5c -r bcb446fd02481aaba2bbe72d8298d93ff6698c99 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -321,43 +321,19 @@
"""Activate a repository that was deactivated but not uninstalled."""
repository_id = kwd[ 'id' ]
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, repository )
- repository.deleted = False
- repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
- if repository.includes_tools:
- metadata = repository.metadata
- try:
- repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata )
- except Exception, e:
- error = "Error activating repository %s: %s" % ( repository.name, str( e ) )
- log.debug( error )
- return trans.show_error_message( '%s.<br/>You may be able to resolve this by uninstalling and then reinstalling the repository. Click <a href="%s">here</a> to uninstall the repository.'
- % ( error, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) ) )
- # Reload tools into the appropriate tool panel section.
- tool_panel_dict = repository.metadata[ 'tool_panel_section' ]
- shed_util.add_to_tool_panel( trans.app,
- repository.name,
- repository_clone_url,
- repository.installed_changeset_revision,
- repository_tools_tups,
- repository.owner,
- shed_tool_conf,
- tool_panel_dict,
- new_install=False )
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- if repository.includes_datatypes:
- if tool_path:
- repository_install_dir = os.path.abspath ( os.path.join( tool_path, relative_install_dir ) )
- else:
- repository_install_dir = os.path.abspath ( relative_install_dir )
- # Activate proprietary datatypes.
- installed_repository_dict = shed_util.load_installed_datatypes( trans.app, repository, repository_install_dir, deactivate=False )
- if installed_repository_dict and 'converter_path' in installed_repository_dict:
- shed_util.load_installed_datatype_converters( trans.app, installed_repository_dict, deactivate=False )
- if installed_repository_dict and 'display_path' in installed_repository_dict:
- shed_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
+ try:
+ shed_util.activate_repository( trans, repository )
+ except Exception, e:
+ error_message = "Error activating repository %s: %s" % ( repository.name, str( e ) )
+ log.debug( error_message )
+ message = '%s.<br/>You may be able to resolve this by uninstalling and then reinstalling the repository. Click <a href="%s">here</a> to uninstall the repository.' \
+ % ( error_message, web.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) ) )
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_repository',
+ id=repository_id,
+ message=message,
+ status=status ) )
message = 'The <b>%s</b> repository has been activated.' % repository.name
status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional tests for installing repositories with circular dependencies into separate tool panel sections.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a91d8ea3c9e2/
changeset: a91d8ea3c9e2
user: inithello
date: 2013-01-09 22:20:07
summary: Tool shed functional tests for installing repositories with circular dependencies into separate tool panel sections.
affected #: 2 files
diff -r a7049f42a9d9b4bef6804dee53a2bc8bcc1a2916 -r a91d8ea3c9e2e9d6cc8cfd85c7085bf1175b4f5c test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -60,6 +60,10 @@
def check_for_valid_tools( self, repository, strings_displayed=[], strings_not_displayed=[] ):
strings_displayed.append( 'Valid tools' )
self.display_manage_repository_page( repository, strings_displayed, strings_not_displayed )
+ def check_galaxy_repository_db_status( self, repository_name, owner, expected_status ):
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, owner )
+ assert installed_repository.status == expected_status, 'Status in database is %s, expected %s' % \
+ ( installed_repository.status, expected_status )
def check_galaxy_repository_tool_panel_section( self, repository, expected_tool_panel_section ):
metadata = repository.metadata
assert 'tools' in metadata, 'Tools not found in metadata: %s' % metadata
diff -r a7049f42a9d9b4bef6804dee53a2bc8bcc1a2916 -r a91d8ea3c9e2e9d6cc8cfd85c7085bf1175b4f5c test/tool_shed/functional/test_1040_install_repository_basic_circular_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1040_install_repository_basic_circular_dependencies.py
@@ -0,0 +1,149 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+freebayes_repository_name = 'freebayes_0040'
+freebayes_repository_description = "Galaxy's freebayes tool"
+freebayes_repository_long_description = "Long description of Galaxy's freebayes tool"
+
+filtering_repository_name = 'filtering_0040'
+filtering_repository_description = "Galaxy's filtering tool"
+filtering_repository_long_description = "Long description of Galaxy's filtering tool"
+
+category_name = 'test_0040_repository_circular_dependencies'
+
+running_standalone = False
+
+class TestInstallingCircularDependencies( ShedTwillTestCase ):
+ '''Verify that the code correctly handles installing repositories with circular dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_freebayes_repository( self ):
+ '''Create and populate freebayes_0040.'''
+ global running_standalone
+ category = self.create_category( name=category_name, description='Testing handling of circular repository dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=freebayes_repository_name,
+ description=freebayes_repository_description,
+ long_description=freebayes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'freebayes/freebayes.tar',
+ strings_displayed=[],
+ commit_message='Uploaded freebayes.tar.' )
+ def test_0015_create_filtering_repository( self ):
+ '''Create and populate filtering_0040.'''
+ global running_standalone
+ category = self.create_category( name=category_name, description='Testing handling of circular repository dependencies.' )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=filtering_repository_name,
+ description=filtering_repository_description,
+ long_description=filtering_repository_long_description,
+ owner=common.test_user_1_name,
+ categories=[ 'test_0040_repository_circular_dependencies' ],
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'filtering/filtering_1.1.0.tar',
+ strings_displayed=[],
+ commit_message='Uploaded filtering.tar.' )
+ def test_0020_create_repository_dependencies( self ):
+ '''Set up the filtering and freebayes repository dependencies.'''
+ # The dependency structure should look like:
+ # Filtering revision 0 -> freebayes revision 0.
+ # Freebayes revision 0 -> filtering revision 1.
+ # Filtering will have two revisions, one with just the filtering tool, and one with the filtering tool and a dependency on freebayes.
+ global running_standalone
+ if running_standalone:
+ repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'filtering' ] )
+ self.generate_repository_dependency_xml( [ repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Filtering 1.1.0 depends on the freebayes repository.' )
+ self.upload_file( filtering_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on freebayes' )
+ repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_0040', additional_paths=[ 'freebayes' ] )
+ self.generate_repository_dependency_xml( [ repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Freebayes depends on the filtering repository.' )
+ self.upload_file( freebayes_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on filtering' )
+ def test_0025_install_freebayes_repository( self ):
+ '''Install freebayes with blank tool panel section, without tool dependencies but with repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ strings_displayed = [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
+ self.install_repository( freebayes_repository_name,
+ common.test_user_1_name,
+ category_name,
+ strings_displayed=strings_displayed,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True )
+ def test_0030_uninstall_freebayes_repository( self ):
+ '''Uninstall freebayes, verify tool panel section and missing repository dependency.'''
+ installed_freebayes_repository = test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+ installed_filtering_repository = test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_freebayes_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_freebayes_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_freebayes_repository, '' )
+ strings_displayed = [ 'Missing repository', 'freebayes' ]
+ self.display_installed_repository_manage_page( installed_filtering_repository, strings_displayed=strings_displayed )
+ self.check_galaxy_repository_db_status( freebayes_repository_name,
+ common.test_user_1_name,
+ 'Uninstalled' )
+ def test_0035_reinstall_freebayes_repository( self ):
+ '''Reinstall freebayes into 'freebayes' tool panel section.'''
+ installed_freebayes_repository = test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_freebayes_repository,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ new_tool_panel_section='freebayes',
+ no_changes=False )
+ def test_0040_uninstall_filtering_repository( self ):
+ '''Uninstall filtering, verify tool panel section.'''
+ installed_filtering_repository = test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+ installed_freebayes_repository = test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_filtering_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_filtering_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_filtering_repository, '' )
+ strings_displayed = [ 'Missing repository', 'filtering' ]
+ self.display_installed_repository_manage_page( installed_freebayes_repository, strings_displayed=strings_displayed )
+ self.check_galaxy_repository_db_status( filtering_repository_name,
+ common.test_user_1_name,
+ 'Uninstalled' )
+ def test_0045_uninstall_freebayes_repository( self ):
+ '''Uninstall freebayes, verify tool panel section and missing repository dependency.'''
+ installed_freebayes_repository = test_db_util.get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name )
+ installed_filtering_repository = test_db_util.get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name )
+ self.uninstall_repository( installed_freebayes_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_freebayes_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_freebayes_repository, 'freebayes' )
+ strings_displayed = [ 'Missing repository', 'freebayes' ]
+ self.display_installed_repository_manage_page( installed_filtering_repository, strings_displayed=strings_displayed )
+ self.check_galaxy_repository_db_status( freebayes_repository_name,
+ common.test_user_1_name,
+ 'Uninstalled' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Add parameter to Tophat2 so that output BAM is ordered using reference contigs ordering rather than sorted lexicographically.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a7049f42a9d9/
changeset: a7049f42a9d9
user: jgoecks
date: 2013-01-09 19:36:33
summary: Add parameter to Tophat2 so that output BAM is ordered using reference contigs ordering rather than sorted lexicographically.
affected #: 1 file
diff -r 184c5e409425da36556de7edda910a7ffe6bc9d0 -r a7049f42a9d9b4bef6804dee53a2bc8bcc1a2916 tools/ngs_rna/tophat2_wrapper.py
--- a/tools/ngs_rna/tophat2_wrapper.py
+++ b/tools/ngs_rna/tophat2_wrapper.py
@@ -134,7 +134,7 @@
index_path = options.index_path
# Build tophat command.
- cmd = 'tophat2 %s %s %s'
+ cmd = 'tophat2 --keep-fasta-order %s %s %s'
reads = options.input1
if options.input2:
reads += ' ' + options.input2
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Tool shed functional test enhancements - verify tool dependency display for all relevant repositories.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/184c5e409425/
changeset: 184c5e409425
user: inithello
date: 2013-01-09 19:27:44
summary: Tool shed functional test enhancements - verify tool dependency display for all relevant repositories.
affected #: 10 files
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -63,12 +63,19 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded repository_dependencies.xml' )
- def test_0030_verify_emboss_5_repository_dependency_on_emboss_datatypes( self ):
+ def test_0030_verify_emboss_5_dependencies( self ):
'''Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.'''
repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
changeset_revision = self.get_repository_tip( datatypes_repository )
- strings_displayed = [ datatypes_repository_name, common.test_user_1_name, changeset_revision, 'Repository dependencies' ]
+ strings_displayed = [ 'Tool dependencies',
+ 'emboss',
+ '5.0.0',
+ 'package',
+ datatypes_repository_name,
+ common.test_user_1_name,
+ changeset_revision,
+ 'Repository dependencies' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
def test_0040_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -108,17 +108,18 @@
datatypes_tip = self.get_repository_tip( datatypes_repository )
# Iterate through all metadata revisions and check for repository dependencies.
for metadata, changeset_revision in repository_metadata:
- repository_dependency_metadata = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ][ 0 ]
+ strings_displayed = [ str( metadata_elem ) for metadata_elem in metadata[ 'repository_dependencies' ][ 'repository_dependencies' ][ 0 ] ]
# Remove the tool shed URL, because that's not displayed on the page (yet?)
- repository_dependency_metadata.pop( repository_dependency_metadata.index( self.url ) )
+ strings_displayed.pop( strings_displayed.index( self.url ) )
# Add the dependency description and datatypes repository details to the strings to check.
- repository_dependency_metadata.extend( [ metadata[ 'repository_dependencies' ][ 'description' ],
- datatypes_repository_name,
- datatypes_repository.user.username,
- datatypes_tip ] )
+ strings_displayed.extend( [ metadata[ 'repository_dependencies' ][ 'description' ],
+ datatypes_repository_name,
+ datatypes_repository.user.username,
+ datatypes_tip ] )
+ strings_displayed.extend( [ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
self.display_manage_repository_page( repository,
changeset_revision=changeset_revision,
- strings_displayed=[ str( metadata ) for metadata in repository_dependency_metadata ] )
+ strings_displayed=strings_displayed )
def test_0040_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -103,3 +103,9 @@
filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
for repository in [ freebayes_repository, filtering_repository ]:
self.verify_unchanged_repository_metadata( repository )
+ def test_0040_verify_tool_dependencies( self ):
+ '''Verify that freebayes displays tool dependencies.'''
+ repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( repository,
+ strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Valid tools' ],
+ strings_not_displayed=[ 'Invalid tools' ] )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
--- a/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_dependencies_4_levels.py
@@ -148,7 +148,14 @@
self.check_repository_dependency( freebayes_repository, emboss_repository, changeset_revision )
self.display_manage_repository_page( freebayes_repository,
strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
- def test_0035_verify_repository_metadata( self ):
+ def test_0035_verify_tool_dependencies( self ):
+ '''Check that freebayes and emboss display tool dependencies.'''
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( freebayes_repository,
+ strings_displayed=[ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18', 'Tool dependencies' ] )
+ self.display_manage_repository_page( emboss_repository, strings_displayed=[ 'Tool dependencies', 'emboss', '5.0.0', 'package' ] )
+ def test_0040_verify_repository_metadata( self ):
'''Verify that resetting the metadata does not change it.'''
emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -1,9 +1,11 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
-
+import logging
repository_name = 'freebayes_0010'
-repository_description="Galaxy's freebayes tool",
-repository_long_description="Long description of Galaxy's freebayes tool",
+repository_description="Galaxy's freebayes tool"
+repository_long_description="Long description of Galaxy's freebayes tool"
+category_name = 'Test 0010 Repository With Tool Dependencies'
+log = logging.getLogger( __name__ )
class ToolWithToolDependencies( ShedTwillTestCase ):
'''Test installing a repository with tool dependencies.'''
@@ -26,7 +28,7 @@
admin_user_private_role = test_db_util.get_private_role( admin_user )
def test_0005_ensure_repositories_and_categories_exist( self ):
'''Create the 0010 category and upload the freebayes repository to it, if necessary.'''
- category = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
+ category = self.create_category( name=category_name, description='Tests for a repository with tool dependencies.' )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=repository_name,
@@ -63,16 +65,18 @@
"""Browse the available tool sheds in this Galaxy instance and preview the freebayes tool."""
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
- self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0010 Repository With Tool Dependencies' ] )
- category = test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
+ self.browse_tool_shed( url=self.url, strings_displayed=[ category_name ] )
+ category = test_db_util.get_category_by_name( category_name )
self.browse_category( category, strings_displayed=[ repository_name ] )
- self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=[ repository_name, 'Valid tools', 'Tool dependencies' ] )
+ strings_displayed = [ repository_name, 'Valid tools', 'Tool dependencies' ]
+ self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=strings_displayed )
def test_0015_install_freebayes_repository( self ):
'''Install the freebayes repository without installing tool dependencies.'''
strings_displayed=[ 'set your tool_dependency_dir', 'can be automatically installed', 'Set the tool_dependency_dir' ]
+ strings_displayed.extend( [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
self.install_repository( repository_name,
common.test_user_1_name,
- 'Test 0010 Repository With Tool Dependencies',
+ category_name,
strings_displayed=strings_displayed,
install_tool_dependencies=False,
new_tool_panel_section='test_1010' )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -74,9 +74,11 @@
'''Install the emboss repository without installing tool dependencies.'''
global repository_datatypes_count
global base_datatypes_count
+ strings_displayed = [ 'Handle', 'Missing', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
self.install_repository( 'emboss_0020',
common.test_user_1_name,
- 'Test 0020 Basic Repository Dependencies',
+ 'Test 0020 Basic Repository Dependencies',
+ strings_displayed=strings_displayed,
install_tool_dependencies=False,
new_tool_panel_section='test_1020' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0020', common.test_user_1_name )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -113,9 +113,11 @@
global base_datatypes_count
global running_standalone
base_datatypes_count = int( self.get_datatypes_count() )
+ strings_displayed = [ 'Handle', 'Missing', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
self.install_repository( 'emboss_0030',
common.test_user_1_name,
- 'Test 0030 Repository Dependency Revisions',
+ 'Test 0030 Repository Dependency Revisions',
+ strings_displayed=strings_displayed,
install_tool_dependencies=False,
new_tool_panel_section='test_1030' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -59,9 +59,11 @@
'''Install the freebayes repository into the Galaxy instance.'''
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ strings_displayed = [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ]
self.install_repository( 'freebayes_0010',
common.test_user_1_name,
'Test 0010 Repository With Tool Dependencies',
+ strings_displayed=strings_displayed,
new_tool_panel_section='test_1210' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
strings_displayed = [ installed_repository.name,
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1220_uninstall_reinstall_repository_with_repository_dependencies.py
@@ -70,9 +70,11 @@
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
base_datatypes_count = int( self.get_datatypes_count() )
+ strings_displayed = [ 'Handle', 'Missing', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
self.install_repository( emboss_repository_name,
common.test_user_1_name,
- 'Test 0020 Basic Repository Dependencies',
+ 'Test 0020 Basic Repository Dependencies',
+ strings_displayed=strings_displayed,
new_tool_panel_section='test_1210' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
strings_displayed = [ installed_repository.name,
diff -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 -r 184c5e409425da36556de7edda910a7ffe6bc9d0 test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py
@@ -107,9 +107,11 @@
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
base_datatypes_count = int( self.get_datatypes_count() )
+ strings_displayed = [ 'Handle', 'Missing', 'tool dependencies', 'emboss', '5.0.0', 'package' ]
self.install_repository( emboss_repository_name,
common.test_user_1_name,
'Test 0030 Repository Dependency Revisions',
+ strings_displayed=strings_displayed,
new_tool_panel_section='test_1210' )
installed_repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
strings_displayed = [ installed_repository.name,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Create and use function for checking a slug's validity.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b97bdb63115c/
changeset: b97bdb63115c
user: jgoecks
date: 2013-01-09 17:27:53
summary: Create and use function for checking a slug's validity.
affected #: 3 files
diff -r 757217fbae1981bc66d05076ab93dcbc92dca072 -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -28,8 +28,12 @@
# States for passing messages
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-# RE that tests for valid slug.
-VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
+def _is_valid_slug( slug ):
+ """ Returns true if slug is valid. """
+
+ VALID_SLUG_RE = re.compile( "^[a-z0-9\-]+$" )
+ return VALID_SLUG_RE.match( slug )
+
class BaseController( object ):
"""
@@ -186,6 +190,7 @@
self.type_extension = type_extension
self.mimetype = mimetype
self.display_in_upload = display_in_upload
+
#
# -- Mixins for working with Galaxy objects. --
#
@@ -210,6 +215,7 @@
raise ItemAccessibilityException( "%s is not accessible to the current user" % item.__class__.__name__, type='error' )
return item
+
class UsesHistoryDatasetAssociationMixin:
""" Mixin for controllers that use HistoryDatasetAssociation objects. """
@@ -552,7 +558,7 @@
title_err = slug_err = ""
if not title:
title_err = "visualization name is required"
- elif slug and not VALID_SLUG_RE.match( slug ):
+ elif slug and not _is_valid_slug( slug ):
slug_err = "visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
elif slug and trans.sa_session.query( trans.model.Visualization ).filter_by( user=user, slug=slug, deleted=False ).first():
slug_err = "visualization identifier must be unique"
@@ -1465,6 +1471,10 @@
""" Mixin for a controller that manages an item that can be shared. """
# -- Implemented methods. --
+
+ def _is_valid_slug( self, slug ):
+ """ Returns true if slug is valid. """
+ return _is_valid_slug( slug )
@web.expose
@web.require_login( "share Galaxy items" )
diff -r 757217fbae1981bc66d05076ab93dcbc92dca072 -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 lib/galaxy/webapps/galaxy/controllers/page.py
--- a/lib/galaxy/webapps/galaxy/controllers/page.py
+++ b/lib/galaxy/webapps/galaxy/controllers/page.py
@@ -2,7 +2,7 @@
from galaxy import model, web
from galaxy.web import error, url_for
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
-from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesHistoryMixin, UsesStoredWorkflowMixin, UsesVisualizationMixin, VALID_SLUG_RE
+from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesHistoryMixin, UsesStoredWorkflowMixin, UsesVisualizationMixin
from galaxy.web.framework.helpers import time_ago, grids
from galaxy import util
from galaxy.util.sanitize_html import sanitize_html, _BaseHTMLProcessor
@@ -342,7 +342,7 @@
page_title_err = "Page name is required"
elif not page_slug:
page_slug_err = "Page id is required"
- elif not VALID_SLUG_RE.match( page_slug ):
+ elif not self._is_valid_slug( page_slug ):
page_slug_err = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
elif trans.sa_session.query( model.Page ).filter_by( user=user, slug=page_slug, deleted=False ).first():
page_slug_err = "Page id must be unique"
@@ -398,7 +398,7 @@
page_title_err = "Page name is required"
elif not page_slug:
page_slug_err = "Page id is required"
- elif not VALID_SLUG_RE.match( page_slug ):
+ elif not self._is_valid_slug( page_slug ):
page_slug_err = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
elif page_slug != page.slug and trans.sa_session.query( model.Page ).filter_by( user=user, slug=page_slug, deleted=False ).first():
page_slug_err = "Page id must be unique"
diff -r 757217fbae1981bc66d05076ab93dcbc92dca072 -r b97bdb63115cf50562e423ac165cffcdd5bb64e1 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -3,7 +3,7 @@
from sqlalchemy import desc, or_, and_
from galaxy import model, web
from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings
-from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin, VALID_SLUG_RE
+from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesVisualizationMixin
from galaxy.web.framework.helpers import time_ago, grids, iff
from galaxy import util
from galaxy.util.json import from_json_string
@@ -636,7 +636,7 @@
visualization_title_err = "Visualization name is required"
elif not visualization_slug:
visualization_slug_err = "Visualization id is required"
- elif not VALID_SLUG_RE.match( visualization_slug ):
+ elif not self._is_valid_slug( visualization_slug ):
visualization_slug_err = "Visualization identifier must consist of only lowercase letters, numbers, and the '-' character"
elif visualization_slug != visualization.slug and trans.sa_session.query( model.Visualization ).filter_by( user=visualization.user, slug=visualization_slug, deleted=False ).first():
visualization_slug_err = "Visualization id must be unique"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fix bug that prevented datasources from being returned correctly.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/757217fbae19/
changeset: 757217fbae19
user: jgoecks
date: 2013-01-09 17:15:11
summary: Fix bug that prevented datasources from being returned correctly.
affected #: 1 file
diff -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 -r 757217fbae1981bc66d05076ab93dcbc92dca072 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -1335,7 +1335,9 @@
# Loop through sources until viable one is found.
for source in source_list:
msg = self.convert_dataset( trans, source )
- if msg == self.conversion_messages.PENDING:
+ # No message or PENDING means that source is viable. No
+ # message indicates conversion was done and is successful.
+ if not msg or msg == self.conversion_messages.PENDING:
data_source = source
break
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0b2c782ca2d7/
changeset: 0b2c782ca2d7
user: inithello
date: 2013-01-09 15:51:53
summary: Fix colspan on installed repository dependency folder.
affected #: 1 file
diff -r 857a60e69c7aee92f67f37311a40a825b40a6481 -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -209,7 +209,7 @@
else:
folder_label = "%s<i> - this repository's tools require handling of these missing dependencies</i>" % folder_label
col_span_str = 'colspan="5"'
- elif folder.label in [ 'Repository dependencies', 'Missing repository dependencies' ]:
+ elif folder.label in [ 'Installed repository dependencies', 'Repository dependencies', 'Missing repository dependencies' ]:
if folder.description:
folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
else:
@@ -245,7 +245,7 @@
</a></div></span>
- <td>
+ </td></tr><%
my_row = row_counter.count
https://bitbucket.org/galaxy/galaxy-central/commits/6ef9b038e329/
changeset: 6ef9b038e329
user: inithello
date: 2013-01-09 15:52:46
summary: Tool shed functional test enhancements - circular repository dependencies, tool panel sections.
affected #: 5 files
diff -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -47,6 +47,9 @@
def browse_tool_shed( self, url, strings_displayed=[], strings_not_displayed=[] ):
self.visit_galaxy_url( '/admin_toolshed/browse_tool_shed?tool_shed_url=%s' % url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def check_count_of_metadata_revisions_associated_with_repository( self, repository, metadata_count ):
+ self.check_repository_changelog( repository )
+ self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
def check_for_strings( self, strings_displayed=[], strings_not_displayed=[] ):
if strings_displayed:
for string in strings_displayed:
@@ -57,9 +60,24 @@
def check_for_valid_tools( self, repository, strings_displayed=[], strings_not_displayed=[] ):
strings_displayed.append( 'Valid tools' )
self.display_manage_repository_page( repository, strings_displayed, strings_not_displayed )
- def check_count_of_metadata_revisions_associated_with_repository( self, repository, metadata_count ):
- self.check_repository_changelog( repository )
- self.check_string_count_in_page( 'Repository metadata is associated with this change set.', metadata_count )
+ def check_galaxy_repository_tool_panel_section( self, repository, expected_tool_panel_section ):
+ metadata = repository.metadata
+ assert 'tools' in metadata, 'Tools not found in metadata: %s' % metadata
+ tool_metadata = metadata[ 'tools' ]
+ # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools
+ # from the same repository in different tool panel sections. Getting the first tool guid is ok, because
+ # currently all tools contained in a single repository will be loaded into the same tool panel section.
+ tool_guid = tool_metadata[ 0 ][ 'guid' ]
+ assert 'tool_panel_section' in metadata, 'Tool panel section not found in metadata: %s' % metadata
+ tool_panel_section_metadata = metadata[ 'tool_panel_section' ]
+ # tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+ # id=section_id,
+ # name=section_name,
+ # version=section_version )
+ # This dict is appended to tool_panel_section_metadata[ tool_guid ]
+ tool_panel_section = tool_panel_section_metadata[ tool_guid ][ 0 ][ 'name' ]
+ assert tool_panel_section == expected_tool_panel_section, 'Expected tool panel section %s, found %s\nMetadata: %s\n' % \
+ ( expected_tool_panel_section, tool_panel_section, metadata )
def check_installed_repository_tool_dependencies( self, installed_repository, dependencies_installed=False ):
# Tool dependencies are not being installed in these functional tests. If this is changed, the test method will also need to be updated.
strings_not_displayed = []
@@ -150,6 +168,12 @@
self.visit_url( '/admin/manage_categories?operation=create' )
self.submit_form( form_no=1, button="create_category_button", **kwd )
return test_db_util.get_category_by_name( kwd[ 'name' ] )
+ def create_checkbox_query_string( self, field_name, value ):
+ field_value = str( value ).lower()
+ if value:
+ return '%s=%s&%s=%s' % ( field_name, field_value, field_name, field_value )
+ else:
+ return '%s=%s' % ( field_name, field_value )
def create_user_in_galaxy( self, cntrller='user', email='test(a)bx.psu.edu', password='testuser', username='admin-user', redirect='' ):
self.visit_galaxy_url( "/user/create?cntrller=%s&use_panels=False" % cntrller )
tc.fv( '1', 'email', email )
@@ -306,7 +330,6 @@
self.visit_galaxy_url( "/user/logout" )
self.check_page_for_string( "You have been logged out" )
self.home()
-
def generate_repository_dependency_xml( self, repositories, xml_filename, dependency_description='' ):
file_path = os.path.split( xml_filename )[0]
if not os.path.exists( file_path ):
@@ -369,7 +392,6 @@
request_param_path = base_path
else:
request_param_path = os.path.join( base_path, current_path )
- #request_param_path = request_param_path.replace( '/', '%2f' )
# Get the current folder's contents.
url = '/repository/open_folder?folder_path=%s' % request_param_path
self.visit_url( url )
@@ -431,7 +453,11 @@
if workflow_name not in strings_displayed:
strings_displayed.append( workflow_name )
self.check_for_strings( strings_displayed, strings_not_displayed )
- def initiate_installation_process( self, install_tool_dependencies=False, install_repository_dependencies=True ):
+ def initiate_installation_process( self,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ no_changes=True,
+ new_tool_panel_section=None ):
html = self.last_page()
# Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the
# installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in
@@ -441,22 +467,21 @@
if install_parameters:
iri_ids = install_parameters.group(1)
# In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']"
- # This ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id will handle them correctly.
+ # This regex ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id
+ # will handle them correctly. It's safe to pass the cleaned list to manage_repositories, because it can parse
+ # comma-separated values.
repository_ids = str( iri_ids )
repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids )
- decoded_kwd = tool_shed_decode( install_parameters.group(2) )
- if 'install_tool_dependencies' in decoded_kwd:
- decoded_kwd[ 'install_tool_dependencies' ] = install_tool_dependencies
- if 'install_repository_dependencies' in decoded_kwd:
- decoded_kwd[ 'install_repository_dependencies' ] = install_repository_dependencies
+ encoded_kwd = install_parameters.group(2)
reinstalling = install_parameters.group(3)
url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \
- ( ','.join( util.listify( repository_ids ) ), tool_shed_encode( decoded_kwd ), reinstalling )
+ ( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
return util.listify( repository_ids )
def install_repository( self, name, owner, category_name, install_tool_dependencies=False,
- changeset_revision=None, strings_displayed=[], strings_not_displayed=[],
- preview_strings_displayed=[], post_submit_strings_displayed=[], **kwd ):
+ install_repository_dependencies=True, changeset_revision=None,
+ strings_displayed=[], strings_not_displayed=[], preview_strings_displayed=[],
+ post_submit_strings_displayed=[], new_tool_panel_section=None, **kwd ):
self.browse_tool_shed( url=self.url )
self.browse_category( test_db_util.get_category_by_name( category_name ) )
self.preview_repository_in_tool_shed( name, owner, strings_displayed=preview_strings_displayed )
@@ -468,6 +493,8 @@
( changeset_revision, repository_id, self.galaxy_url )
self.visit_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ # This section is tricky, due to the way twill handles form submission. The tool dependency checkbox needs to
+ # be hacked in through tc.browser, putting the form field in kwd doesn't work.
if 'install_tool_dependencies' in self.last_page():
form = tc.browser.get_form( 'select_tool_panel_section' )
checkbox = form.find_control( id="install_tool_dependencies" )
@@ -476,11 +503,15 @@
checkbox.selected = True
else:
checkbox.selected = False
+ if 'install_repository_dependencies' in self.last_page():
+ kwd[ 'install_repository_dependencies' ] = str( install_repository_dependencies ).lower()
if 'shed_tool_conf' not in kwd:
kwd[ 'shed_tool_conf' ] = self.shed_tool_conf
+ if new_tool_panel_section:
+ kwd[ 'new_tool_panel_section' ] = new_tool_panel_section
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
- repository_ids = self.initiate_installation_process( install_tool_dependencies )
+ repository_ids = self.initiate_installation_process( new_tool_panel_section=new_tool_panel_section )
self.wait_for_repository_installation( repository_ids )
def load_invalid_tool_page( self, repository, tool_xml, changeset_revision, strings_displayed=[], strings_not_displayed=[] ):
url = '/repository/load_invalid_tool?repository_id=%s&tool_config=%s&changeset_revision=%s' % \
@@ -523,14 +554,40 @@
self.check_for_strings( strings_displayed, [] )
def reinstall_repository( self,
installed_repository,
- install_repository_dependencies='true',
- install_tool_dependencies='false' ):
+ install_repository_dependencies=True,
+ install_tool_dependencies=False,
+ no_changes=True,
+ new_tool_panel_section='' ):
url = '/admin_toolshed/reselect_tool_panel_section?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
- url = '/admin_toolshed/reinstall_repository?id=%s&install_repository_dependencies=%s&install_repository_dependencies=%s' % \
- ( self.security.encode_id( installed_repository.id ), install_repository_dependencies, install_repository_dependencies )
+ # From galaxy.web.form_builder.CheckboxField:
+ # The hidden field is necessary because if the check box is not checked on the form, it will
+ # not be included in the request params. The hidden field ensure that this will happen. When
+ # parsing the request, the value 'true' in the hidden field actually means it is NOT checked.
+ # See the is_checked() method below. The prefix is necessary in each case to ensure functional
+ # correctness when the param is inside a conditional.
+ #
+ # This may look strange upon initial inspection, but see the comments in the get_html() method
+ # above for clarification. Basically, if value is not True, then it will always be a list with
+ # 2 input fields ( a checkbox and a hidden field ) if the checkbox is checked. If it is not
+ # checked, then value will be only the hidden field.
+ #
+ # The create_checkbox_query_string method emulates the described behavior with URL query parameters.
+ # This is currently necessary because twill does not correctly parse the reselect tool panel section
+ # form, so the test method has to visit the intended form target "manually".
+ repo_dependencies = self.create_checkbox_query_string( field_name='install_repository_dependencies', value=install_repository_dependencies )
+ tool_dependencies = self.create_checkbox_query_string( field_name='install_tool_dependencies', value=install_tool_dependencies )
+ encoded_repository_id = self.security.encode_id( installed_repository.id )
+ url = '/admin_toolshed/reinstall_repository?id=%s&%s&%s&no_changes=%s&new_tool_panel_section=%s' % \
+ ( encoded_repository_id, repo_dependencies, tool_dependencies, str( no_changes ), new_tool_panel_section )
self.visit_galaxy_url( url )
- repository_ids = self.initiate_installation_process( install_tool_dependencies, install_repository_dependencies )
+ # Then manually initiate the install process, as with installing a repository. See comments in the
+ # initiate_installation_process method for details.
+ repository_ids = self.initiate_installation_process( install_tool_dependencies,
+ install_repository_dependencies,
+ no_changes,
+ new_tool_panel_section )
+ # Finally, wait until all repositories are in a final state (either Error or Installed) before returning.
self.wait_for_repository_installation( repository_ids )
def repository_is_new( self, repository ):
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
@@ -619,6 +676,14 @@
def verify_installed_repository_data_table_entries( self, data_tables=[] ):
data_table = util.parse_xml( self.shed_tool_data_table_conf )
found = False
+ # Tool data table xml structure:
+ # <tables>
+ # <!-- Locations of all fasta files under genome directory -->
+ # <table name="all_fasta" comment_char="#">
+ # <columns>value, dbkey, name, path</columns>
+ # <file path="tool-data/all_fasta.loc" />
+ # </table>
+ # </tables>
for table_elem in data_table.findall( 'table' ):
for data_table in data_tables:
if 'name' in table_elem.attrib and table_elem.attrib[ 'name' ] == data_table:
@@ -643,6 +708,8 @@
self.reset_repository_metadata( repository )
for metadata in self.get_repository_metadata( repository ):
new_metadata[ metadata.changeset_revision ] = metadata.metadata
+ # Python's dict comparison recursively compares sorted key => value pairs and returns true if any key or value differs,
+ # or if the number of keys differs.
assert old_metadata == new_metadata, 'Metadata changed after reset on repository %s.' % repository.name
def view_installed_workflow( self, repository, workflow_name, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/view_workflow?repository_id=%s&workflow_name=%s' % \
@@ -655,6 +722,8 @@
def wait_for_repository_installation( self, repository_ids ):
final_states = [ galaxy_model.ToolShedRepository.installation_status.ERROR,
galaxy_model.ToolShedRepository.installation_status.INSTALLED ]
+ # Wait until all repositories are in a final state before returning. This ensures that subsequent tests
+ # are running against an installed repository, and not one that is still in the process of installing.
if repository_ids:
for repository_id in repository_ids:
galaxy_repository = test_db_util.get_installed_repository_by_id( self.security.decode_id( repository_id ) )
@@ -662,6 +731,7 @@
while galaxy_repository.status not in final_states:
test_db_util.ga_refresh( galaxy_repository )
timeout_counter = timeout_counter + 1
+ # This timeout currently defaults to 180 seconds, or 3 minutes.
if timeout_counter > common.repository_installation_timeout:
raise AssertionError( 'Repository installation timed out, %d seconds elapsed, repository state is %s.' % \
( timeout_counter, repository.status ) )
diff -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -1,6 +1,10 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
+repository_name = 'freebayes_0010'
+repository_description="Galaxy's freebayes tool",
+repository_long_description="Long description of Galaxy's freebayes tool",
+
class ToolWithToolDependencies( ShedTwillTestCase ):
'''Test installing a repository with tool dependencies.'''
def test_0000_initiate_users( self ):
@@ -25,9 +29,9 @@
category = self.create_category( name='Test 0010 Repository With Tool Dependencies', description='Tests for a repository with tool dependencies.' )
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
- repository = self.get_or_create_repository( name='freebayes_0010',
- description="Galaxy's freebayes tool",
- long_description="Long description of Galaxy's freebayes tool",
+ repository = self.get_or_create_repository( name=repository_name,
+ description=repository_description,
+ long_description=repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id( category.id ) )
if self.repository_is_new( repository ):
@@ -61,18 +65,18 @@
self.galaxy_login( email=common.admin_email, username=common.admin_username )
self.browse_tool_shed( url=self.url, strings_displayed=[ 'Test 0010 Repository With Tool Dependencies' ] )
category = test_db_util.get_category_by_name( 'Test 0010 Repository With Tool Dependencies' )
- self.browse_category( category, strings_displayed=[ 'freebayes_0010' ] )
- self.preview_repository_in_tool_shed( 'freebayes_0010', common.test_user_1_name, strings_displayed=[ 'freebayes_0010', 'Valid tools', 'Tool dependencies' ] )
+ self.browse_category( category, strings_displayed=[ repository_name ] )
+ self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=[ repository_name, 'Valid tools', 'Tool dependencies' ] )
def test_0015_install_freebayes_repository( self ):
'''Install the freebayes repository without installing tool dependencies.'''
strings_displayed=[ 'set your tool_dependency_dir', 'can be automatically installed', 'Set the tool_dependency_dir' ]
- self.install_repository( 'freebayes_0010',
+ self.install_repository( repository_name,
common.test_user_1_name,
'Test 0010 Repository With Tool Dependencies',
strings_displayed=strings_displayed,
install_tool_dependencies=False,
new_tool_panel_section='test_1010' )
- installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( repository_name, common.test_user_1_name )
strings_displayed = [ installed_repository.name,
installed_repository.description,
installed_repository.owner,
@@ -85,7 +89,7 @@
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0020_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
- self.verify_installed_repository_metadata_unchanged( 'freebayes_0010', common.test_user_1_name )
+ self.verify_installed_repository_metadata_unchanged( repository_name, common.test_user_1_name )
def test_0025_verify_sample_files( self ):
'''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
self.verify_installed_repository_data_table_entries( data_tables=[ 'sam_fa_indexes' ] )
diff -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
--- a/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
+++ b/test/tool_shed/functional/test_1080_advanced_circular_dependency_installation.py
@@ -1,4 +1,4 @@
-from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
import tool_shed.base.test_db_util as test_db_util
column_repository_name = 'column_maker_0080'
@@ -12,6 +12,10 @@
category_name = 'Test 0080 Advanced Circular Dependencies'
category_description = 'Test circular dependency features'
+log = logging.getLogger( __name__ )
+
+running_standalone = False
+
class TestRepositoryDependencies( ShedTwillTestCase ):
'''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
def test_0000_create_or_login_admin_user( self ):
@@ -31,9 +35,10 @@
admin_user = test_db_util.get_user( common.admin_email )
assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
admin_user_private_role = test_db_util.get_private_role( admin_user )
- def test_0005_initiate_test_data( self ):
- """Create a category for this test suite and add repositories to it."""
+ def test_0005_create_and_populate_column_repository( self ):
+ """Create the category for this test suite, then create and populate column_maker."""
category = self.create_category( name=category_name, description=category_description )
+ global running_standalone
self.logout()
self.login( email=common.test_user_1_email, username=common.test_user_1_name )
repository = self.get_or_create_repository( name=column_repository_name,
@@ -47,16 +52,31 @@
'column_maker/column_maker.tar',
strings_displayed=[],
commit_message='Uploaded column_maker.tar.' )
- repository = self.get_or_create_repository( name=convert_repository_name,
- description=convert_repository_description,
- long_description=convert_repository_long_description,
- owner=common.test_user_1_name,
- category_id=self.security.encode_id( category.id ),
- strings_displayed=[] )
+ running_standalone = True
+ def test_0010_create_and_populate_convert_repository( self ):
+ '''Create and populate the convert_chars repository.'''
+ global running_standalone
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
self.upload_file( repository,
'convert_chars/convert_chars.tar',
strings_displayed=[],
commit_message='Uploaded convert_chars.tar.' )
+ running_standalone = True
+ def test_0015_upload_dependency_xml_if_needed( self ):
+ '''If this test is being run by itself, it will not have repository dependencies configured yet.'''
+ global running_standalone
+ if running_standalone:
convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
repository_dependencies_path = self.generate_temp_path( 'test_1080', additional_paths=[ 'convert' ] )
@@ -77,20 +97,20 @@
'repository_dependencies.xml',
filepath=repository_dependencies_path,
commit_message='Uploaded dependency on column' )
- def test_0010_install_repositories( self ):
- '''Install convert_chars with repository dependencies check box - this should install both convert_chars and column_maker.'''
+ def test_0020_install_convert_repository( self ):
+ '''Install convert_chars without repository dependencies into convert_chars tool panel section.'''
self.galaxy_logout()
self.galaxy_login( email=common.admin_email, username=common.admin_username )
self.install_repository( convert_repository_name,
common.test_user_1_name,
category_name,
install_tool_dependencies=False,
- install_repository_dependencies='Yes',
- new_tool_panel_section='test_1080' )
+ install_repository_dependencies=False,
+ new_tool_panel_section='convert_chars' )
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
- common.test_user_1_name )
+ common.test_user_1_name )
browse_strings_displayed = [ installed_convert_repository.name,
installed_convert_repository.description,
installed_convert_repository.tool_shed,
@@ -101,13 +121,41 @@
installed_convert_repository.installed_changeset_revision,
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
- 'Installed' ]
- self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
- strings_displayed.append( 'Installed repository dependencies' )
+ 'Missing repository dependencies' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0015_deactivate_convert_repository( self ):
- '''Deactivate convert_chars - this should display column_maker as installed but missing repository dependencies'''
+ def test_0025_install_column_repository( self ):
+ '''Install column maker with repository dependencies into column_maker tool panel section.'''
+ self.install_repository( column_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_repository_dependencies=True,
+ new_tool_panel_section='column_maker',
+ strings_displayed=[ 'install_repository_dependencies' ] )
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ browse_strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.installed_changeset_revision ]
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision,
+ installed_convert_repository.name,
+ installed_convert_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0030_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars, verify that column_maker is installed and missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -123,8 +171,8 @@
'Deactivated' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
- def test_0020_reactivate_convert_repository( self ):
- '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ def test_0035_reactivate_convert_repository( self ):
+ '''Reactivate convert_chars, both convert_chars and column_maker should now show as installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -137,8 +185,8 @@
installed_column_repository.name,
installed_column_repository.installed_changeset_revision,
'Installed repository dependencies' ]
- def test_0025_deactivate_column_repository( self ):
- '''Deactivate column_maker - this should display convert_chars installed but missing repository dependencies'''
+ def test_0040_deactivate_column_repository( self ):
+ '''Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -154,8 +202,8 @@
'Deactivated' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0030_deactivate_convert_repository( self ):
- '''Deactivate convert_chars - both convert_chars and column_maker are deactivated'''
+ def test_0045_deactivate_convert_repository( self ):
+ '''Deactivate convert_chars, verify that both convert_chars and column_maker are deactivated.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -168,8 +216,8 @@
installed_convert_repository.description,
installed_convert_repository.installed_changeset_revision ]
self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed )
- def test_0035_reactivate_column_repository( self ):
- '''Activate column_maker - this should not automatically activate convert_chars, so column_maker should be displayed as installed but missing repository dependencies'''
+ def test_0050_reactivate_column_repository( self ):
+ '''Reactivate column_maker. This should not automatically reactivate convert_chars, so column_maker should be displayed as installed but missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -185,8 +233,8 @@
'Deactivated' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
- def test_0040_reactivate_convert_repository( self ):
- '''Activate convert_chars - this should display both convert_chars and column_maker as installed with a green box'''
+ def test_0055_reactivate_convert_repository( self ):
+ '''Activate convert_chars. Both convert_chars and column_maker should now show as installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -210,8 +258,8 @@
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0045_uninstall_column_repository( self ):
- '''Uninstall column_maker - this should display convert_chars installed but missing repository dependencies'''
+ def test_0060_uninstall_column_repository( self ):
+ '''Uninstall column_maker. Verify that convert_chars is installed and missing repository dependencies, and column_maker was in the right tool panel section.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -227,7 +275,8 @@
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0050_reinstall_column_repository( self ):
+ self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'column_maker' )
+ def test_0065_reinstall_column_repository( self ):
'''Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
@@ -252,8 +301,8 @@
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0055_uninstall_convert_repository( self ):
- '''Uninstall convert_chars, verify column_maker installed but missing repository dependencies'''
+ def test_0070_uninstall_convert_repository( self ):
+ '''Uninstall convert_chars, verify column_maker installed but missing repository dependencies.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -269,8 +318,9 @@
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_column_repository,
strings_displayed=strings_displayed )
- def test_0060_uninstall_column_repository( self ):
- '''Uninstall column_maker - both convert_chars and column_maker are uninstalled'''
+ self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'convert_chars' )
+ def test_0075_uninstall_column_repository( self ):
+ '''Uninstall column_maker, verify that both convert_chars and column_maker are uninstalled.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
@@ -287,13 +337,16 @@
'Uninstalled' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
- def test_0065_reinstall_convert_repository( self ):
- '''Reinstall convert_chars and check the handle repository dependencies check box - this should install both convert_chars and column_maker ( make sure )'''
+ def test_0080_reinstall_convert_repository( self ):
+ '''Reinstall convert_chars with repository dependencies, verify that this installs both convert_chars and column_maker.'''
installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
common.test_user_1_name )
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
common.test_user_1_name )
- self.reinstall_repository( installed_convert_repository, install_repository_dependencies=True )
+ self.reinstall_repository( installed_convert_repository,
+ install_repository_dependencies=True,
+ no_changes=False,
+ new_tool_panel_section='convert_maker' )
strings_displayed = [ installed_column_repository.name,
installed_column_repository.description,
installed_column_repository.tool_shed,
@@ -312,3 +365,16 @@
'Installed repository dependencies' ]
self.display_installed_repository_manage_page( installed_convert_repository,
strings_displayed=strings_displayed )
+ def test_0085_uninstall_all_repositories( self ):
+ '''Uninstall convert_chars and column_maker to verify that they are in the right tool panel sections.'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_column_repository )
+ test_db_util.ga_refresh( installed_convert_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'convert_maker' )
+ self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'convert_maker' )
+
diff -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 test/tool_shed/functional/test_1085_repository_dependency_handling.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1085_repository_dependency_handling.py
@@ -0,0 +1,176 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
+import tool_shed.base.test_db_util as test_db_util
+
+column_repository_name = 'column_maker_1085'
+column_repository_description = "Add column"
+column_repository_long_description = "Compute an expression on every row"
+
+convert_repository_name = 'convert_chars_1085'
+convert_repository_description = "Convert delimiters"
+convert_repository_long_description = "Convert delimiters to tab"
+
+category_name = 'Test 1085 Advanced Circular Dependencies'
+category_description = 'Test circular dependency features'
+
+log = logging.getLogger( __name__ )
+
+class TestRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing freebayes with tool data table entries, .loc files, and tool dependencies.'''
+ def test_0000_create_or_login_admin_user( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ galaxy_admin_user = test_db_util.get_galaxy_user( common.admin_email )
+ assert galaxy_admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ galaxy_admin_user_private_role = test_db_util.get_galaxy_private_role( galaxy_admin_user )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_and_populate_column_repository( self ):
+ """Create a category for this test suite and add repositories to it."""
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=column_repository_name,
+ description=column_repository_description,
+ long_description=column_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'column_maker/column_maker.tar',
+ strings_displayed=[],
+ commit_message='Uploaded column_maker.tar.' )
+ def test_0010_create_and_populate_convert_repository( self ):
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=convert_repository_name,
+ description=convert_repository_description,
+ long_description=convert_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ self.upload_file( repository,
+ 'convert_chars/convert_chars.tar',
+ strings_displayed=[],
+ commit_message='Uploaded convert_chars.tar.' )
+ def test_0015_create_and_upload_dependency_files( self ):
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'column' ] )
+ self.generate_repository_dependency_xml( [ convert_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Column maker depends on the convert_chars repository.' )
+ self.upload_file( column_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on convert' )
+ convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
+ column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
+ repository_dependencies_path = self.generate_temp_path( 'test_1085', additional_paths=[ 'convert' ] )
+ self.generate_repository_dependency_xml( [ column_repository ],
+ self.get_filename( 'repository_dependencies.xml', filepath=repository_dependencies_path ),
+ dependency_description='Convert chars depends on the column_maker repository.' )
+ self.upload_file( convert_repository,
+ 'repository_dependencies.xml',
+ filepath=repository_dependencies_path,
+ commit_message='Uploaded dependency on column' )
+ def test_0020_install_repositories( self ):
+ '''Install column_maker into column_maker tool panel section and install repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ self.install_repository( column_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ new_tool_panel_section='column_maker',
+ strings_displayed=[ 'install_repository_dependencies' ] )
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ browse_strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.installed_changeset_revision ]
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision,
+ installed_column_repository.name,
+ installed_column_repository.installed_changeset_revision,
+ 'Installed repository dependencies' ]
+ self.display_galaxy_browse_repositories_page( strings_displayed=browse_strings_displayed )
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0025_uninstall_column_repository( self ):
+ '''uninstall column_maker, verify same section'''
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_column_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'column_maker' )
+ def test_0030_uninstall_convert_repository( self ):
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_convert_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'column_maker' )
+ def test_0035_reinstall_column_repository( self ):
+ '''reinstall column_maker into new section 'new_column_maker' (no_changes = false), no dependencies'''
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_column_repository,
+ install_tool_dependencies=False,
+ install_repository_dependencies=False,
+ new_tool_panel_section='new_column_maker',
+ no_changes=False )
+ strings_displayed = [ installed_column_repository.name,
+ installed_column_repository.description,
+ installed_column_repository.tool_shed,
+ installed_column_repository.installed_changeset_revision ]
+ self.display_installed_repository_manage_page( installed_column_repository,
+ strings_displayed=strings_displayed )
+ def test_0040_reinstall_convert_repository( self ):
+ '''reinstall convert_chars into new section 'new_convert_chars' (no_changes = false), no dependencies'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ self.reinstall_repository( installed_convert_repository,
+ install_tool_dependencies=False,
+ install_repository_dependencies=False,
+ new_tool_panel_section='new_convert_chars',
+ no_changes=False )
+ strings_displayed = [ installed_convert_repository.name,
+ installed_convert_repository.description,
+ installed_convert_repository.tool_shed,
+ installed_convert_repository.installed_changeset_revision ]
+ self.display_installed_repository_manage_page( installed_convert_repository,
+ strings_displayed=strings_displayed )
+ def test_0045_uninstall_and_verify_tool_panel_sections( self ):
+ '''uninstall both and verify tool panel sections'''
+ installed_convert_repository = test_db_util.get_installed_repository_by_name_owner( convert_repository_name,
+ common.test_user_1_name )
+ installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name,
+ common.test_user_1_name )
+ self.uninstall_repository( installed_column_repository, remove_from_disk=True )
+ self.uninstall_repository( installed_convert_repository, remove_from_disk=True )
+ test_db_util.ga_refresh( installed_convert_repository )
+ test_db_util.ga_refresh( installed_column_repository )
+ self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'new_column_maker' )
+ self.check_galaxy_repository_tool_panel_section( installed_convert_repository, 'new_convert_chars' )
diff -r 0b2c782ca2d7a62f6a4e0781c8b729225806b442 -r 6ef9b038e329b6ee4e71ef1d1b1a22b9b2000ca1 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -60,6 +60,7 @@
tool_sheds_conf_xml_template = '''<?xml version="1.0"?><tool_sheds><tool_shed name="Embedded tool shed for functional tests" url="http://${shed_url}:${shed_port}/"/>
+ <tool_shed name="Galaxy main tool shed" url="http://toolshed.g2.bx.psu.edu/"/></tool_sheds>
'''
@@ -123,7 +124,7 @@
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_DATA_TABLE_CONF', os.path.join( tool_shed_test_tmp_dir, 'tool_data_table_conf.xml' ) )
galaxy_tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_tool_conf.xml' ) )
galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_shed_tool_conf.xml' ) )
- galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOLS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
+ galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_SHEDS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
if 'GALAXY_TEST_TOOL_DATA_PATH' in os.environ:
tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for correctly positioning tool shed repository tools in the proper tool panel location when the repository is being installed or reinstalled.
by Bitbucket 09 Jan '13
by Bitbucket 09 Jan '13
09 Jan '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/857a60e69c7a/
changeset: 857a60e69c7a
user: greg
date: 2013-01-09 15:20:58
summary: Fix for correctly positioning tool shed repository tools in the proper tool panel location when the repository is being installed or reinstalled.
affected #: 1 file
diff -r be29d1334c08ec3e32290feab9a6da7bda9862ec -r 857a60e69c7aee92f67f37311a40a825b40a6481 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -184,17 +184,17 @@
instance and when uninstalled repositories are being reinstalled.
"""
message = ''
- # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
- # dependencies that may not be installed.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository dependencies
+ # that may not be installed.
all_created_or_updated_tool_shed_repositories = []
- # There will be a one-to-one mapping between items in created_or_updated_tool_shed_repositories and tool_panel_section_keys. The following
- # list will filter out repository dependencies that are not to be installed.
+ # There will be a one-to-one mapping between items in 3 lists: created_or_updated_tool_shed_repositories, tool_panel_section_keys and filtered_repo_info_dicts.
+ # The 3 lists will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
- # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
+ # Repositories will be filtered (e.g., if already installed, if elected to not be installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
- # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies,
- # we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies we have
+ # to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
if not all_repo_info_dicts:
# No repository dependencies were discovered so process the received repositories.
@@ -215,9 +215,12 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
if reinstalling or install_repository_dependencies:
+ # If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will be processed. However, if
+ # repository dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed.
if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ # The current tool shed repository is not currently installed, so we can update it's record in the database.
can_update = True
name = installed_tool_shed_repository.name
description = installed_tool_shed_repository.description
@@ -225,13 +228,19 @@
metadata_dict = installed_tool_shed_repository.metadata
dist_to_shed = installed_tool_shed_repository.dist_to_shed
else:
- # There is a repository already installed which is a dependency of the repository being reinstalled.
+ # The tool shed repository currently being processed is already installed or is in the process of being installed, so it's record
+ # in the database cannot be updated.
can_update = False
else:
+ # This block will be reached only if reinstalling is True, install_repository_dependencies is False and is_in_repo_info_dicts is False.
+ # The tool shed repository currently being processed must be a repository dependency that the user elected to not install, so it's
+ # record in the database cannot be updated.
can_update = False
else:
+ # This block will be reached only if reinstalling is False and install_repository_dependencies is False. This implies that the tool shed
+ # repository currently being processed has already been installed.
if len( all_repo_info_dicts ) == 1:
- # An attempt is being made to install a tool shed repository into a Galaxy instance when the same repository was previously installed.
+ # If only a single repository is being installed, return an informative message to the user.
message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner )
if installed_changeset_revision != changeset_revision:
message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision
@@ -247,26 +256,24 @@
created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
- elif is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
- can_update = True
else:
+ # We're in the process of installing multiple tool shed repositories into Galaxy. Since the repository currently being processed
+ # has already been installed, skip it and process the next repository in the list.
can_update = False
else:
- # A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
- # is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
+ # A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
+ # in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
+ # the repository's database record may be updated.
can_update = True
installed_changeset_revision = changeset_revision
metadata_dict = {}
dist_to_shed = False
if can_update:
+ # The database record for the tool shed repository currently being processed can be updated.
if reinstalling or install_repository_dependencies:
# Get the repository metadata to see where it was previously located in the tool panel.
- installed_tool_shed_repository = suc.get_repository_for_dependency_relationship( app=trans.app,
- tool_shed=tool_shed_url,
- name=name,
- owner=repository_owner,
- changeset_revision=changeset_revision )
if installed_tool_shed_repository:
+ # The tool shed repository status is one of 'New', 'Uninstalled', or 'Error'.
tool_section, new_tool_panel_section, tool_panel_section_key = \
handle_tool_panel_selection( trans=trans,
metadata=installed_tool_shed_repository.metadata,
@@ -274,6 +281,8 @@
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
else:
+ # We're installing a new tool shed repository that does not yet have a database record. This repository is a repository dependency
+ # of a different repository being installed.
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
tool_panel_section_key = 'section_%s' % str( section_id )
@@ -282,6 +291,7 @@
else:
tool_panel_section_key = None
else:
+ # We're installing a new tool shed repository that does not yet have a database record.
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
tool_panel_section_key = 'section_%s' % str( section_id )
@@ -300,9 +310,11 @@
current_changeset_revision=changeset_revision,
owner=repository_owner,
dist_to_shed=False )
+ # Add the processed tool shed repository to the list of all processed repositories maintained within this method.
all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- # Only append the tool_shed_repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ # Keep the one-to-one mapping between items in 3 lists.
created_or_updated_tool_shed_repositories.append( tool_shed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0