galaxy-commits
Threads by month
- ----- 2025 -----
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
April 2012
- 1 participants
- 170 discussions

commit/galaxy-central: natefoo: Provide mercurial as an egg for tool shed operations.
by Bitbucket 23 Apr '12
by Bitbucket 23 Apr '12
23 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c8f4f73e9199/
changeset: c8f4f73e9199
user: natefoo
date: 2012-04-23 19:20:29
summary: Provide mercurial as an egg for tool shed operations.
affected #: 10 files
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 dist-eggs.ini
--- a/dist-eggs.ini
+++ b/dist-eggs.ini
@@ -7,105 +7,82 @@
;
[hosts]
-py2.4-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.4
-py2.4-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.4
py2.5-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.5
py2.5-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.5
py2.6-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.6
py2.6-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.6
py2.7-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.7
py2.7-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.7
-py2.4-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.4
-py2.4-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.4
py2.5-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.5
py2.5-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.5
py2.6-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.6
py2.6-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.6
py2.7-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.7
py2.7-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.7
-py2.4-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /usr/local/bin/python2.4
-py2.5-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /usr/local/bin/python2.5
-py2.6-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /usr/local/bin/python2.6
-py2.7-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /usr/local/bin/python2.7
+py2.5-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.5/bin/python2.5
+py2.6-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.6/bin/python2.6
+py2.7-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.7/bin/python2.7
py2.6-macosx-10.6-universal-ucs2 = lion.bx.psu.edu /usr/bin/python2.6
py2.7-macosx-10.6-intel-ucs2 = lion.bx.psu.edu /usr/local/bin/python2.7
-py2.4-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.4
py2.5-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.5
py2.6-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.6
py2.7-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.7
-py2.4-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.4
py2.5-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.5
py2.6-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.6
py2.7-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.7
-py2.4-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.4
py2.5-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.5
py2.6-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.6
py2.7-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.7
-py2.4-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_64-ucs2/bin/python2.4
-py2.5-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_64-ucs2/bin/python2.5
-py2.6-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_64-ucs2/bin/python2.6
-py2.7-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_64-ucs2/bin/python2.7
+py2.5-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.5
+py2.6-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.6
+py2.7-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.7
; these hosts are used to build eggs with no C extensions
-py2.4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.4
py2.5 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.5
py2.6 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.6
py2.7 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.7
[groups]
-py2.4-linux-i686 = py2.4-linux-i686-ucs2 py2.4-linux-i686-ucs4
-py2.4-linux-x86_64 = py2.4-linux-x86_64-ucs2 py2.4-linux-x86_64-ucs4
py2.5-linux-i686 = py2.5-linux-i686-ucs2 py2.5-linux-i686-ucs4
py2.5-linux-x86_64 = py2.5-linux-x86_64-ucs2 py2.5-linux-x86_64-ucs4
py2.6-linux-i686 = py2.6-linux-i686-ucs2 py2.6-linux-i686-ucs4
py2.6-linux-x86_64 = py2.6-linux-x86_64-ucs2 py2.6-linux-x86_64-ucs4
py2.7-linux-i686 = py2.7-linux-i686-ucs2 py2.7-linux-i686-ucs4
py2.7-linux-x86_64 = py2.7-linux-x86_64-ucs2 py2.7-linux-x86_64-ucs4
-py2.4-linux = py2.4-linux-i686 py2.4-linux-x86_64
py2.5-linux = py2.5-linux-i686 py2.5-linux-x86_64
py2.6-linux = py2.6-linux-i686 py2.6-linux-x86_64
py2.7-linux = py2.7-linux-i686 py2.7-linux-x86_64
-linux-i686 = py2.4-linux-i686 py2.5-linux-i686 py2.6-linux-i686 py2.7-linux-i686
-linux-x86_64 = py2.4-linux-x86_64 py2.5-linux-x86_64 py2.6-linux-x86_64 py2.7-linux-x86_64
+linux-i686 = py2.5-linux-i686 py2.6-linux-i686 py2.7-linux-i686
+linux-x86_64 = py2.5-linux-x86_64 py2.6-linux-x86_64 py2.7-linux-x86_64
linux = linux-i686 linux-x86_64
-py2.4-macosx = py2.4-macosx-10.3-fat-ucs2
py2.5-macosx = py2.5-macosx-10.3-fat-ucs2
py2.6-macosx = py2.6-macosx-10.3-fat-ucs2 py2.6-macosx-10.6-universal-ucs2
py2.7-macosx = py2.7-macosx-10.3-fat-ucs2 py2.7-macosx-10.6-intel-ucs2
-macosx = py2.4-macosx py2.5-macosx py2.6-macosx py2.7-macosx
-py2.4-solaris-i86pc = py2.4-solaris-2.10-i86pc_32-ucs2 py2.4-solaris-2.10-i86pc_64-ucs2
+macosx = py2.5-macosx py2.6-macosx py2.7-macosx
py2.5-solaris-i86pc = py2.5-solaris-2.10-i86pc_32-ucs2 py2.5-solaris-2.10-i86pc_64-ucs2
py2.6-solaris-i86pc = py2.6-solaris-2.10-i86pc_32-ucs2 py2.6-solaris-2.10-i86pc_64-ucs2
py2.7-solaris-i86pc = py2.7-solaris-2.10-i86pc_32-ucs2 py2.7-solaris-2.10-i86pc_64-ucs2
-py2.4-solaris-sun4u = py2.4-solaris-2.10-sun4u_32-ucs2 py2.4-solaris-2.10-sun4u_64-ucs2
py2.5-solaris-sun4u = py2.5-solaris-2.10-sun4u_32-ucs2 py2.5-solaris-2.10-sun4u_64-ucs2
py2.6-solaris-sun4u = py2.6-solaris-2.10-sun4u_32-ucs2 py2.6-solaris-2.10-sun4u_64-ucs2
py2.7-solaris-sun4u = py2.7-solaris-2.10-sun4u_32-ucs2 py2.7-solaris-2.10-sun4u_64-ucs2
-py2.4-solaris = py2.4-solaris-i86pc py2.4-solaris-sun4u
py2.5-solaris = py2.5-solaris-i86pc py2.5-solaris-sun4u
py2.6-solaris = py2.6-solaris-i86pc py2.6-solaris-sun4u
py2.7-solaris = py2.7-solaris-i86pc py2.7-solaris-sun4u
-solaris-i86pc = py2.4-solaris-i86pc py2.5-solaris-i86pc py2.6-solaris-i86pc py2.7-solaris-i86pc
-solaris-sun4u = py2.4-solaris-sun4u py2.5-solaris-sun4u py2.6-solaris-sun4u py2.7-solaris-sun4u
+solaris-i86pc = py2.5-solaris-i86pc py2.6-solaris-i86pc py2.7-solaris-i86pc
+solaris-sun4u = py2.5-solaris-sun4u py2.6-solaris-sun4u py2.7-solaris-sun4u
solaris = solaris-i86pc solaris-sun4u
-py2.4-all = py2.4-linux py2.4-macosx py2.4-solaris
py2.5-all = py2.5-linux py2.5-macosx py2.5-solaris
py2.6-all = py2.6-linux py2.6-macosx py2.6-solaris
py2.7-all = py2.7-linux py2.7-macosx py2.7-solaris
-; group for building pysam on solaris 10 sparc
-;solaris-2.10-sun4u = py2.4-solaris-2.10-sun4u_32-ucs2 py2.5-solaris-2.10-sun4u_32-ucs2 py2.6-solaris-2.10-sun4u_32-ucs2 py2.4-solaris-2.10-sun4u_64-ucs2 py2.5-solaris-2.10-sun4u_64-ucs2 py2.6-solaris-2.10-sun4u_64-ucs2
-
; the 'all' key is used internally by the build system to specify which hosts
; to build on when no hosts are specified on the dist-eggs.py command line.
all = linux macosx solaris
; the 'noplatform' key, likewise, is for which build hosts should be used when
; building pure python (noplatform) eggs.
-noplatform = py2.4 py2.5 py2.6 py2.7
+noplatform = py2.5 py2.6 py2.7
; don't build these eggs on these platforms:
[ignore]
-GeneTrack = py2.4
-python-daemon = py2.4
ctypes = py2.5-linux-i686-ucs2 py2.5-linux-i686-ucs4 py2.6-linux-i686-ucs2 py2.6-linux-i686-ucs4 py2.7-linux-i686-ucs2 py2.7-linux-i686-ucs4 py2.5-linux-x86_64-ucs2 py2.5-linux-x86_64-ucs4 py2.6-linux-x86_64-ucs2 py2.6-linux-x86_64-ucs4 py2.7-linux-x86_64-ucs2 py2.7-linux-x86_64-ucs4 py2.5-macosx-10.3-fat-ucs2 py2.6-macosx-10.3-fat-ucs2 py2.6-macosx-10.6-universal-ucs2 py2.7-macosx-10.3-fat-ucs2 py2.5-solaris-2.10-i86pc_32-ucs2 py2.6-solaris-2.10-i86pc_32-ucs2 py2.7-solaris-2.10-i86pc_32-ucs2 py2.5-solaris-2.10-i86pc_64-ucs2 py2.6-solaris-2.10-i86pc_64-ucs2 py2.7-solaris-2.10-i86pc_64-ucs2 py2.5-solaris-2.10-sun4u_32-ucs2 py2.6-solaris-2.10-sun4u_32-ucs2 py2.7-solaris-2.10-sun4u_32-ucs2 py2.5-solaris-2.10-sun4u_64-ucs2 py2.6-solaris-2.10-sun4u_64-ucs2 py2.7-solaris-2.10-sun4u_64-ucs2
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -17,6 +17,7 @@
ctypes = 1.0.2
DRMAA_python = 0.2
MarkupSafe = 0.12
+mercurial = 2.1.2
MySQL_python = 1.2.3c1
numpy = 1.6.0
pbs_python = 4.1.0
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -6,6 +6,8 @@
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.model.orm import *
+
+pkg_resources.require( 'mercurial' )
from mercurial import ui, commands
pkg_resources.require( 'elementtree' )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -2,7 +2,9 @@
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
-from galaxy import tools
+from galaxy import eggs, tools
+
+eggs.require( 'mercurial' )
from mercurial import hg
log = logging.getLogger( __name__ )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -6,7 +6,11 @@
from galaxy.util.shed_util import get_configured_ui
from common import *
from repository import RepositoryListGrid, CategoryListGrid
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial import hg
+
import logging
log = logging.getLogger( __name__ )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -11,6 +11,9 @@
from galaxy.webapps.community import model
from galaxy.model.orm import *
from galaxy.model.item_attrs import UsesItemRatings
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -1,6 +1,9 @@
import os, logging
from galaxy.web.base.controller import *
from galaxy.webapps.community.controllers.common import *
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial.hgweb.hgwebdir_mod import hgwebdir
from mercurial.hgweb.request import wsgiapplication
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -12,6 +12,9 @@
from galaxy.model.orm import *
from galaxy.util.shed_util import get_configured_ui
from common import *
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial import hg, ui, patch, commands
log = logging.getLogger( __name__ )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -4,6 +4,9 @@
from galaxy.datatypes.checkers import *
from common import *
from galaxy.util.shed_util import get_configured_ui, handle_sample_tool_data_table_conf_file
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
diff -r 18c426204a337f94377074afa863261b6a4b10be -r c8f4f73e9199fc419a221647ccf2337e7e59ed10 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -9,7 +9,11 @@
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import *
from galaxy.web.form_builder import *
+
+from galaxy import eggs
+eggs.require('mercurial')
from mercurial import hg, ui
+
log = logging.getLogger( __name__ )
class User( object ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Better message when creating a custom build.
by Bitbucket 23 Apr '12
by Bitbucket 23 Apr '12
23 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/18c426204a33/
changeset: 18c426204a33
user: jgoecks
date: 2012-04-23 17:24:46
summary: Better message when creating a custom build.
affected #: 1 file
diff -r ebd5932ed7f1d0b434383995ea1338609b9814f4 -r 18c426204a337f94377074afa863261b6a4b10be templates/user/dbkeys.mako
--- a/templates/user/dbkeys.mako
+++ b/templates/user/dbkeys.mako
@@ -149,7 +149,7 @@
% if 'count' in dct:
${dct['count']}
% else:
- ?
+ Processing
% endif
</td><td><form action="dbkeys" method="post"><input type="hidden" name="key" value="${key}" /><input type="submit" name="delete" value="Delete" /></form></td>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c5da9bd8bbfc/
changeset: c5da9bd8bbfc
user: jgoecks
date: 2012-04-23 15:08:29
summary: Use tabix for indexing and getting data from GTF files. This is faster and more flexible than using interval index.
affected #: 3 files
diff -r 8b773372d0dce1d815b90d257efa073d5a039e72 -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -85,7 +85,11 @@
<datatype extension="gff3" type="galaxy.datatypes.interval:Gff3" display_in_upload="true"/><datatype extension="gif" type="galaxy.datatypes.images:Gif" mimetype="image/gif"/><datatype extension="gmaj.zip" type="galaxy.datatypes.images:Gmaj" mimetype="application/zip"/>
- <datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true"/>
+ <datatype extension="gtf" type="galaxy.datatypes.interval:Gtf" display_in_upload="true">
+ <converter file="gff_to_bgzip_converter.xml" target_datatype="bgzip"/>
+ <converter file="gff_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/>
+ <converter file="gff_to_summary_tree_converter.xml" target_datatype="summary_tree"/>
+ </datatype><datatype extension="h5" type="galaxy.datatypes.binary:Binary" mimetype="application/octet-stream" subclass="True" /><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
diff -r 8b773372d0dce1d815b90d257efa073d5a039e72 -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -953,6 +953,9 @@
dataset.metadata.attribute_types = attribute_types
dataset.metadata.attributes = len( attribute_types )
Gff.set_meta( self, dataset, overwrite = overwrite, skip = i )
+
+ def get_track_type( self ):
+ return "FeatureTrack", {"data": "tabix", "index": "summary_tree"}
class Wiggle( Tabular, _RemoteCallMixin ):
diff -r 8b773372d0dce1d815b90d257efa073d5a039e72 -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -1024,7 +1024,7 @@
"""
start, end = int( start ), int( end )
source = open( self.original_dataset.file_name )
-
+
def features_in_region_iter():
offset = 0
for feature in GFFReaderWrapper( source, fix_strand=True ):
@@ -1034,7 +1034,7 @@
yield feature, offset
offset += feature.raw_size
return features_in_region_iter()
-
+
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
"""
Process data from an iterator to a format that can be provided to client.
@@ -1057,6 +1057,47 @@
return { 'data': results, 'message': message }
+
+class GtfTabixDataProvider( TabixDataProvider ):
+
+ def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
+ # Loop through lines and group by transcript_id; each group is a feature.
+ features = {}
+ for count, line in enumerate( iterator ):
+ line_attrs = parse_gff_attributes( line.split('\t')[8] )
+ transcript_id = line_attrs[ 'transcript_id' ]
+ if transcript_id in features:
+ feature = features[ transcript_id ]
+ else:
+ feature = []
+ features[ transcript_id ] = feature
+ feature.append( line )
+
+ # Set up iterator for features.
+ def features_iterator():
+ for transcript_id, feature in features.items():
+ for line in feature:
+ yield line
+
+ # Process data.
+ filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
+ no_detail = ( "no_detail" in kwargs )
+ results = []
+ message = None
+
+ # TODO: remove reader-wrapper and create features directly.
+ for count, feature in enumerate( GFFReaderWrapper( features_iterator() ) ):
+ if count < start_val:
+ continue
+ if count-start_val >= max_vals:
+ message = ERROR_MAX_VALS % ( max_vals, "reads" )
+ break
+
+ payload = package_gff_feature( feature, no_detail=no_detail, filter_cols=filter_cols )
+ payload.insert( 0, feature.intervals[ 0 ].attributes[ 'transcript_id' ] )
+ results.append( payload )
+
+ return { 'data': results, 'message': message }
#
# -- ENCODE Peak data providers.
@@ -1189,6 +1230,7 @@
"tabix": {
Vcf: VcfTabixDataProvider,
Bed: BedTabixDataProvider,
+ Gtf: GtfTabixDataProvider,
ENCODEPeak: ENCODEPeakTabixDataProvider,
Interval: IntervalTabixDataProvider,
"default" : TabixDataProvider },
https://bitbucket.org/galaxy/galaxy-central/changeset/678ac108a4c4/
changeset: 678ac108a4c4
user: jgoecks
date: 2012-04-23 15:28:12
summary: Fix style conflict between base.css and trackster.css
affected #: 5 files
diff -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 static/june_2007_style/base_sprites.less.tmpl
--- a/static/june_2007_style/base_sprites.less.tmpl
+++ b/static/june_2007_style/base_sprites.less.tmpl
@@ -88,6 +88,10 @@
}
.icon-button.plus-button {
-sprite-group: fugue;
+ -sprite-image: fugue/plus-button-bw.png;
+}
+.icon-button.plus-button:hover {
+ -sprite-group: fugue;
-sprite-image: fugue/plus-button.png;
}
.icon-button.gear {
diff -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -751,9 +751,10 @@
.icon-button.go-to-full-screen{background:url(fugue.png) no-repeat 0px -286px;}
.icon-button.import{background:url(fugue.png) no-repeat 0px -312px;}
.icon-button.plus-button{background:url(fugue.png) no-repeat 0px -338px;}
-.icon-button.gear{background:url(fugue.png) no-repeat 0px -364px;}
-.icon-button.chart_curve{background:url(fugue.png) no-repeat 0px -390px;}
-.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -416px;}
+.icon-button.plus-button:hover{background:url(fugue.png) no-repeat 0px -364px;}
+.icon-button.gear{background:url(fugue.png) no-repeat 0px -390px;}
+.icon-button.chart_curve{background:url(fugue.png) no-repeat 0px -416px;}
+.text-and-autocomplete-select{background:url(fugue.png) no-repeat right -442px;}
div.historyItem-error .state-icon{background:url(history-states.png) no-repeat 0px 0px;}
div.historyItem-empty .state-icon{background:url(history-states.png) no-repeat 0px -25px;}
div.historyItem-queued .state-icon{background:url(history-states.png) no-repeat 0px -50px;}
diff -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 static/june_2007_style/blue/fugue.png
Binary file static/june_2007_style/blue/fugue.png has changed
diff -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 static/june_2007_style/blue/trackster.css
--- a/static/june_2007_style/blue/trackster.css
+++ b/static/june_2007_style/blue/trackster.css
@@ -86,8 +86,6 @@
.icon-button.toggle-expand:hover{background:transparent url(../images/fugue/toggle-expand.png) no-repeat;}
.icon-button.cross-circle{background:transparent url(../images/fugue/cross-circle-bw.png) no-repeat;margin-right:0px;}
.icon-button.cross-circle:hover{background:transparent url(../images/fugue/cross-circle.png) no-repeat;}
-.icon-button.plus-button{background:transparent url(../images/fugue/plus-button-bw.png) no-repeat;}
-.icon-button.plus-button:hover{background:transparent url(../images/fugue/plus-button.png) no-repeat;}
.icon-button.block--plus{background:transparent url(../images/fugue/block--plus-bw.png) no-repeat;}
.icon-button.block--plus:hover{background:transparent url(../images/fugue/block--plus.png) no-repeat;}
.icon-button.bookmarks{background:transparent url(../images/fugue/bookmarks-bw.png) no-repeat;}
diff -r c5da9bd8bbfc1e2634b7981c836a62796f7b33f7 -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 static/june_2007_style/trackster.css.tmpl
--- a/static/june_2007_style/trackster.css.tmpl
+++ b/static/june_2007_style/trackster.css.tmpl
@@ -411,12 +411,6 @@
.icon-button.cross-circle:hover {
background: transparent url(../images/fugue/cross-circle.png) no-repeat;
}
-.icon-button.plus-button {
- background: transparent url(../images/fugue/plus-button-bw.png) no-repeat;
-}
-.icon-button.plus-button:hover {
- background: transparent url(../images/fugue/plus-button.png) no-repeat;
-}
.icon-button.block--plus {
background: transparent url(../images/fugue/block--plus-bw.png) no-repeat;
}
https://bitbucket.org/galaxy/galaxy-central/changeset/ebd5932ed7f1/
changeset: ebd5932ed7f1
user: jgoecks
date: 2012-04-23 17:14:27
summary: Create GTF features directly rather than parsing from lines. This is much faster than parsing.
affected #: 2 files
diff -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 -r ebd5932ed7f1d0b434383995ea1338609b9814f4 lib/galaxy/datatypes/util/gff_util.py
--- a/lib/galaxy/datatypes/util/gff_util.py
+++ b/lib/galaxy/datatypes/util/gff_util.py
@@ -10,8 +10,8 @@
A GFF interval, including attributes. If file is strictly a GFF file,
only attribute is 'group.'
"""
- def __init__( self, reader, fields, chrom_col, feature_col, start_col, end_col, \
- strand_col, score_col, default_strand, fix_strand=False ):
+ def __init__( self, reader, fields, chrom_col=0, feature_col=2, start_col=3, end_col=4, \
+ strand_col=6, score_col=5, default_strand='.', fix_strand=False ):
# HACK: GFF format allows '.' for strand but GenomicInterval does not. To get around this,
# temporarily set strand and then unset after initing GenomicInterval.
unknown_strand = False
@@ -45,8 +45,8 @@
"""
A GFF feature, which can include multiple intervals.
"""
- def __init__( self, reader, chrom_col, feature_col, start_col, end_col, \
- strand_col, score_col, default_strand, fix_strand=False, intervals=[], \
+ def __init__( self, reader, chrom_col=0, feature_col=2, start_col=3, end_col=4, \
+ strand_col=6, score_col=5, default_strand='.', fix_strand=False, intervals=[], \
raw_size=0 ):
GFFInterval.__init__( self, reader, intervals[0].fields, chrom_col, feature_col, \
start_col, end_col, strand_col, score_col, default_strand, \
diff -r 678ac108a4c44b06693a22f6d7d301986a3b30f3 -r ebd5932ed7f1d0b434383995ea1338609b9814f4 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -1071,28 +1071,22 @@
else:
feature = []
features[ transcript_id ] = feature
- feature.append( line )
-
- # Set up iterator for features.
- def features_iterator():
- for transcript_id, feature in features.items():
- for line in feature:
- yield line
-
+ feature.append( GFFInterval( None, line.split( '\t') ) )
+
# Process data.
filter_cols = from_json_string( kwargs.get( "filter_cols", "[]" ) )
no_detail = ( "no_detail" in kwargs )
results = []
message = None
- # TODO: remove reader-wrapper and create features directly.
- for count, feature in enumerate( GFFReaderWrapper( features_iterator() ) ):
+ for count, intervals in enumerate( features.values() ):
if count < start_val:
continue
if count-start_val >= max_vals:
message = ERROR_MAX_VALS % ( max_vals, "reads" )
break
-
+
+ feature = GFFFeature( None, intervals=intervals )
payload = package_gff_feature( feature, no_detail=no_detail, filter_cols=filter_cols )
payload.insert( 0, feature.intervals[ 0 ].attributes[ 'transcript_id' ] )
results.append( payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8b773372d0dc/
changeset: 8b773372d0dc
user: dan
date: 2012-04-23 12:42:19
summary: Partial revert for c32b71dcfc84.
affected #: 1 file
diff -r d7fecd64a8027af7872e125d6772d3a04f5e1973 -r 8b773372d0dce1d815b90d257efa073d5a039e72 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -1125,7 +1125,7 @@
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
# metadata_safe = True means to only clear when assoc.metadata_safe == False
- for assoc in self.implicitly_converted_datasets + self.implicitly_converted_parent_datasets:
+ for assoc in self.implicitly_converted_datasets:
if not metadata_safe or not assoc.metadata_safe:
assoc.clear( purge = purge )
def get_display_name( self ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

22 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d7fecd64a802/
changeset: d7fecd64a802
user: jgoecks
date: 2012-04-22 17:58:24
summary: Refactor and simply tophat wrapper.
affected #: 2 files
diff -r 47bcf28a53e982e5c3062218f0eb8a9d24c6cbd6 -r d7fecd64a8027af7872e125d6772d3a04f5e1973 tools/ngs_rna/tophat_wrapper.py
--- a/tools/ngs_rna/tophat_wrapper.py
+++ b/tools/ngs_rna/tophat_wrapper.py
@@ -171,7 +171,7 @@
opts += ' --no-coverage-search'
if options.microexon_search:
opts += ' --microexon-search'
- if options.single_paired == 'paired':
+ if options.single_paired == 'paired' and options.mate_std_dev:
opts += ' --mate-std-dev %s' % options.mate_std_dev
if options.seg_mismatches:
opts += ' --segment-mismatches %d' % int( options.seg_mismatches )
diff -r 47bcf28a53e982e5c3062218f0eb8a9d24c6cbd6 -r d7fecd64a8027af7872e125d6772d3a04f5e1973 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -7,132 +7,100 @@
</requirements><command interpreter="python">
tophat_wrapper.py
- ## Change this to accommodate the number of threads you have available.
- --num-threads="4"
+
+ ## Change this to accommodate the number of threads you have available.
+ --num-threads="4"
- ## Provide outputs.
- --junctions-output=$junctions
- --hits-output=$accepted_hits
+ ## Provide outputs.
+ --junctions-output=$junctions
+ --hits-output=$accepted_hits
- ## Handle reference file.
- #if $refGenomeSource.genomeSource == "history":
- --own-file=$refGenomeSource.ownFile
+ ## Handle reference file.
+ #if $refGenomeSource.genomeSource == "history":
+ --own-file=$refGenomeSource.ownFile
+ #else:
+ --indexes-path="${refGenomeSource.index.fields.path}"
+ #end if
+
+ ## Are reads single-end or paired?
+ --single-paired=$singlePaired.sPaired
+
+ ## First input file always required.
+ --input1=$input1
+
+ ## Second input only if input is paired-end.
+ #if $singlePaired.sPaired == "paired"
+ --input2=$singlePaired.input2
+ -r $singlePaired.mate_inner_distance
+ --mate-std-dev=$singlePaired.mate_std_dev
+ #end if
+
+ ## Set params.
+ --settings=$params.settingsType
+ #if $params.settingsType == "full":
+ -a $params.anchor_length
+ -m $params.splice_mismatches
+ -i $params.min_intron_length
+ -I $params.max_intron_length
+ -g $params.max_multihits
+ --min-segment-intron $params.min_segment_intron
+ --max-segment-intron $params.max_segment_intron
+ --seg-mismatches=$params.seg_mismatches
+ --seg-length=$params.seg_length
+ --library-type=$params.library_type
+
+ ## Indel search.
+ #if $params.indel_search.allow_indel_search == "Yes":
+ ## --allow-indels
+ --max-insertion-length $params.indel_search.max_insertion_length
+ --max-deletion-length $params.indel_search.max_deletion_length
#else:
- --indexes-path="${refGenomeSource.index.fields.path}"
+ --no-novel-indels
#end if
- ## Are reads single-end or paired?
- --single-paired=$singlePaired.sPaired
-
- ## First input file always required.
- --input1=$input1
-
- ## Set params based on whether reads are single-end or paired.
- #if $singlePaired.sPaired == "single":
- --settings=$singlePaired.sParams.sSettingsType
- #if $singlePaired.sParams.sSettingsType == "full":
- -a $singlePaired.sParams.anchor_length
- -m $singlePaired.sParams.splice_mismatches
- -i $singlePaired.sParams.min_intron_length
- -I $singlePaired.sParams.max_intron_length
- -g $singlePaired.sParams.max_multihits
- --min-segment-intron $singlePaired.sParams.min_segment_intron
- --max-segment-intron $singlePaired.sParams.max_segment_intron
- --seg-mismatches=$singlePaired.sParams.seg_mismatches
- --seg-length=$singlePaired.sParams.seg_length
- --library-type=$singlePaired.sParams.library_type
-
- ## Indel search.
- #if $singlePaired.sParams.indel_search.allow_indel_search == "Yes":
- ## --allow-indels
- --max-insertion-length $singlePaired.sParams.indel_search.max_insertion_length
- --max-deletion-length $singlePaired.sParams.indel_search.max_deletion_length
- #else:
- --no-novel-indels
- #end if
-
- ## Supplying junctions parameters.
- #if $singlePaired.sParams.own_junctions.use_junctions == "Yes":
- #if $singlePaired.sParams.own_junctions.gene_model_ann.use_annotations == "Yes":
- -G $singlePaired.sParams.own_junctions.gene_model_ann.gene_annotation_model
- #end if
- #if $singlePaired.sParams.own_junctions.raw_juncs.use_juncs == "Yes":
- -j $singlePaired.sParams.own_junctions.raw_juncs.raw_juncs
- #end if
- ## TODO: No idea why a string cast is necessary, but it is:
- #if str($singlePaired.sParams.own_junctions.no_novel_juncs) == "Yes":
- --no-novel-juncs
- #end if
- #end if
-
- #if $singlePaired.sParams.coverage_search.use_search == "Yes":
- --coverage-search
- --min-coverage-intron $singlePaired.sParams.coverage_search.min_coverage_intron
- --max-coverage-intron $singlePaired.sParams.coverage_search.max_coverage_intron
- #else:
- --no-coverage-search
- #end if
- ## TODO: No idea why the type conversion is necessary, but it seems to be.
- #if str($singlePaired.sParams.microexon_search) == "Yes":
- --microexon-search
- #end if
+ ## Supplying junctions parameters.
+ #if $params.own_junctions.use_junctions == "Yes":
+ #if $params.own_junctions.gene_model_ann.use_annotations == "Yes":
+ -G $params.own_junctions.gene_model_ann.gene_annotation_model
#end if
- #else:
- --input2=$singlePaired.input2
- -r $singlePaired.mate_inner_distance
- --settings=$singlePaired.pParams.pSettingsType
- #if $singlePaired.pParams.pSettingsType == "full":
- --mate-std-dev=$singlePaired.pParams.mate_std_dev
- -a $singlePaired.pParams.anchor_length
- -m $singlePaired.pParams.splice_mismatches
- -i $singlePaired.pParams.min_intron_length
- -I $singlePaired.pParams.max_intron_length
- -g $singlePaired.pParams.max_multihits
- --min-segment-intron $singlePaired.pParams.min_segment_intron
- --max-segment-intron $singlePaired.pParams.max_segment_intron
- --seg-mismatches=$singlePaired.pParams.seg_mismatches
- --seg-length=$singlePaired.pParams.seg_length
- --library-type=$singlePaired.pParams.library_type
-
- ## Indel search.
- #if $singlePaired.pParams.indel_search.allow_indel_search == "Yes":
- ## --allow-indels
- --max-insertion-length $singlePaired.pParams.indel_search.max_insertion_length
- --max-deletion-length $singlePaired.pParams.indel_search.max_deletion_length
- #else:
- --no-novel-indels
- #end if
-
- ## Supplying junctions parameters.
- #if $singlePaired.pParams.own_junctions.use_junctions == "Yes":
- #if $singlePaired.pParams.own_junctions.gene_model_ann.use_annotations == "Yes":
- -G $singlePaired.pParams.own_junctions.gene_model_ann.gene_annotation_model
- #end if
- #if $singlePaired.pParams.own_junctions.raw_juncs.use_juncs == "Yes":
- -j $singlePaired.pParams.own_junctions.raw_juncs.raw_juncs
- #end if
- ## TODO: No idea why type cast is necessary, but it is:
- #if str($singlePaired.pParams.own_junctions.no_novel_juncs) == "Yes":
- --no-novel-juncs
- #end if
- #end if
-
- #if $singlePaired.pParams.coverage_search.use_search == "Yes":
- --coverage-search
- --min-coverage-intron $singlePaired.pParams.coverage_search.min_coverage_intron
- --max-coverage-intron $singlePaired.pParams.coverage_search.max_coverage_intron
- #else:
- --no-coverage-search
- #end if
- ## TODO: No idea why the type conversion is necessary, but it seems to be.
- #if str ($singlePaired.pParams.microexon_search) == "Yes":
- --microexon-search
- #end if
+ #if $params.own_junctions.raw_juncs.use_juncs == "Yes":
+ -j $params.own_junctions.raw_juncs.raw_juncs
+ #end if
+ ## TODO: No idea why a string cast is necessary, but it is:
+ #if str($params.own_junctions.no_novel_juncs) == "Yes":
+ --no-novel-juncs
#end if
#end if
+
+ #if $params.coverage_search.use_search == "Yes":
+ --coverage-search
+ --min-coverage-intron $params.coverage_search.min_coverage_intron
+ --max-coverage-intron $params.coverage_search.max_coverage_intron
+ #else:
+ --no-coverage-search
+ #end if
+ ## TODO: No idea why the type conversion is necessary, but it seems to be.
+ #if str($params.microexon_search) == "Yes":
+ --microexon-search
+ #end if
+ #end if
</command><inputs>
- <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
+ <conditional name="singlePaired">
+ <param name="sPaired" type="select" label="Is this library mate-paired?">
+ <option value="single">Single-end</option>
+ <option value="paired">Paired-end</option>
+ </param>
+ <when value="single">
+ <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33"/>
+ </when>
+ <when value="paired">
+ <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
+ <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
+ <param name="mate_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs" />
+ <param name="mate_std_dev" type="integer" value="20" label="Std. Dev for Distance between Mate Pairs" help="The standard deviation for the distribution on inner distances between mate pairs."/>
+ </when>
+ </conditional><conditional name="refGenomeSource"><param name="genomeSource" type="select" label="Will you select a reference genome from your history or use a built-in index?" help="Built-ins were indexed using default options"><option value="indexed">Use a built-in index</option>
@@ -150,193 +118,94 @@
<param name="ownFile" type="data" format="fasta" metadata_name="dbkey" label="Select the reference genome" /></when><!-- history --></conditional><!-- refGenomeSource -->
- <conditional name="singlePaired">
- <param name="sPaired" type="select" label="Is this library mate-paired?">
- <option value="single">Single-end</option>
- <option value="paired">Paired-end</option>
+ <conditional name="params">
+ <param name="settingsType" type="select" label="TopHat settings to use" help="You can use the default settings or set custom values for any of Tophat's parameters.">
+ <option value="preSet">Use Defaults</option>
+ <option value="full">Full parameter list</option></param>
- <when value="single">
- <conditional name="sParams">
- <param name="sSettingsType" type="select" label="TopHat settings to use" help="You can use the default settings or set custom values for any of Tophat's parameters.">
- <option value="preSet">Use Defaults</option>
- <option value="full">Full parameter list</option>
- </param>
- <when value="preSet" />
- <!-- Full/advanced params. -->
- <when value="full">
- <param name="library_type" type="select" label="Library Type" help="TopHat will treat the reads as strand specific. Every read alignment will have an XS attribute tag. Consider supplying library type options below to select the correct RNA-seq protocol.">
- <option value="fr-unstranded">FR Unstranded</option>
- <option value="fr-firststrand">FR First Strand</option>
- <option value="fr-secondstrand">FR Second Strand</option>
+ <when value="preSet" />
+ <!-- Full/advanced params. -->
+ <when value="full">
+ <param name="library_type" type="select" label="Library Type" help="TopHat will treat the reads as strand specific. Every read alignment will have an XS attribute tag. Consider supplying library type options below to select the correct RNA-seq protocol.">
+ <option value="fr-unstranded">FR Unstranded</option>
+ <option value="fr-firststrand">FR First Strand</option>
+ <option value="fr-secondstrand">FR Second Strand</option>
+ </param>
+ <param name="anchor_length" type="integer" value="8" label="Anchor length (at least 3)" help="Report junctions spanned by reads with at least this many bases on each side of the junction." />
+ <param name="splice_mismatches" type="integer" value="0" label="Maximum number of mismatches that can appear in the anchor region of spliced alignment" />
+ <param name="min_intron_length" type="integer" value="70" label="The minimum intron length" help="TopHat will ignore donor/acceptor pairs closer than this many bases apart." />
+ <param name="max_intron_length" type="integer" value="500000" label="The maximum intron length" help="When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read." />
+ <conditional name="indel_search">
+ <param name="allow_indel_search" type="select" label="Allow indel search">
+ <option value="Yes">Yes</option>
+ <option value="No">No</option></param>
- <param name="anchor_length" type="integer" value="8" label="Anchor length (at least 3)" help="Report junctions spanned by reads with at least this many bases on each side of the junction." />
- <param name="splice_mismatches" type="integer" value="0" label="Maximum number of mismatches that can appear in the anchor region of spliced alignment" />
- <param name="min_intron_length" type="integer" value="70" label="The minimum intron length" help="TopHat will ignore donor/acceptor pairs closer than this many bases apart." />
- <param name="max_intron_length" type="integer" value="500000" label="The maximum intron length" help="When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read." />
- <conditional name="indel_search">
- <param name="allow_indel_search" type="select" label="Allow indel search">
- <option value="Yes">Yes</option>
- <option value="No">No</option>
- </param>
- <when value="No"/>
- <when value="Yes">
- <param name="max_insertion_length" type="integer" value="3" label="Max insertion length." help="The maximum insertion length." />
- <param name="max_deletion_length" type="integer" value="3" label="Max deletion length." help="The maximum deletion length." />
- </when>
- </conditional>
-alignments (number of reads divided by average depth of coverage)" help="0.0 to 1.0 (0 to turn off)" />
- <param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" />
- <param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" />
- <param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" />
- <param name="seg_length" type="integer" value="25" label="Minimum length of read segments" />
-
- <!-- Options for supplying own junctions. -->
- <conditional name="own_junctions">
- <param name="use_junctions" type="select" label="Use Own Junctions">
+ <when value="No"/>
+ <when value="Yes">
+ <param name="max_insertion_length" type="integer" value="3" label="Max insertion length." help="The maximum insertion length." />
+ <param name="max_deletion_length" type="integer" value="3" label="Max deletion length." help="The maximum deletion length." />
+ </when>
+ </conditional>
+ alignments (number of reads divided by average depth of coverage)" help="0.0 to 1.0 (0 to turn off)" />
+ <param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" />
+ <param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" />
+ <param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
+ <param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" />
+ <param name="seg_length" type="integer" value="25" label="Minimum length of read segments" />
+
+ <!-- Options for supplying own junctions. -->
+ <conditional name="own_junctions">
+ <param name="use_junctions" type="select" label="Use Own Junctions">
+ <option value="No">No</option>
+ <option value="Yes">Yes</option>
+ </param>
+ <when value="Yes">
+ <conditional name="gene_model_ann">
+ <param name="use_annotations" type="select" label="Use Gene Annotation Model">
+ <option value="No">No</option>
+ <option value="Yes">Yes</option>
+ </param>
+ <when value="No" />
+ <when value="Yes">
+ <param format="gtf" name="gene_annotation_model" type="data" label="Gene Model Annotations" help="TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping."/>
+ </when>
+ </conditional>
+ <conditional name="raw_juncs">
+ <param name="use_juncs" type="select" label="Use Raw Junctions">
+ <option value="No">No</option>
+ <option value="Yes">Yes</option>
+ </param>
+ <when value="No" />
+ <when value="Yes">
+ <param format="interval" name="raw_juncs" type="data" label="Raw Junctions" help="Supply TopHat with a list of raw junctions. Junctions are specified one per line, in a tab-delimited format. Records look like: [chrom] [left] [right] [+/-] left and right are zero-based coordinates, and specify the last character of the left sequenced to be spliced to the first character of the right sequence, inclusive."/>
+ </when>
+ </conditional>
+ <param name="no_novel_juncs" type="select" label="Only look for supplied junctions"><option value="No">No</option><option value="Yes">Yes</option></param>
- <when value="Yes">
- <conditional name="gene_model_ann">
- <param name="use_annotations" type="select" label="Use Gene Annotation Model">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="No" />
- <when value="Yes">
- <param format="gtf" name="gene_annotation_model" type="data" label="Gene Model Annotations" help="TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping."/>
- </when>
- </conditional>
- <conditional name="raw_juncs">
- <param name="use_juncs" type="select" label="Use Raw Junctions">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="No" />
- <when value="Yes">
- <param format="interval" name="raw_juncs" type="data" label="Raw Junctions" help="Supply TopHat with a list of raw junctions. Junctions are specified one per line, in a tab-delimited format. Records look like: [chrom] [left] [right] [+/-] left and right are zero-based coordinates, and specify the last character of the left sequenced to be spliced to the first character of the right sequence, inclusive."/>
- </when>
- </conditional>
- <param name="no_novel_juncs" type="select" label="Only look for supplied junctions">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- </when>
- <when value="No" />
- </conditional><!-- /own_junctions -->
-
- <!-- Coverage search. -->
- <conditional name="coverage_search">
- <param name="use_search" type="select" label="Use Coverage Search">
- <option selected="true" value="Yes">Yes</option>
- <option value="No">No</option>
- </param>
- <when value="Yes">
- <param name="min_coverage_intron" type="integer" value="50" label="Minimum intron length that may be found during coverage search" />
- <param name="max_coverage_intron" type="integer" value="20000" label="Maximum intron length that may be found during coverage search" />
- </when>
- <when value="No" />
- </conditional>
- <param name="microexon_search" type="select" label="Use Microexon Search" help="With this option, the pipeline will attempt to find alignments incident to microexons. Works only for reads 50bp or longer.">
+ </when>
+ <when value="No" />
+ </conditional><!-- /own_junctions -->
+
+ <!-- Coverage search. -->
+ <conditional name="coverage_search">
+ <param name="use_search" type="select" label="Use Coverage Search">
+ <option selected="true" value="Yes">Yes</option><option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- </when><!-- full -->
- </conditional><!-- sParams -->
- </when><!-- single -->
- <when value="paired">
- <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
- <param name="mate_inner_distance" type="integer" value="20" label="Mean Inner Distance between Mate Pairs" />
- <conditional name="pParams">
- <param name="pSettingsType" type="select" label="TopHat settings to use" help="For most mapping needs use Commonly used settings. If you want full control use Full parameter list">
- <option value="preSet">Commonly used</option>
- <option value="full">Full parameter list</option></param>
- <when value="preSet" />
- <!-- Full/advanced params. -->
- <when value="full">
- <param name="library_type" type="select" label="Library Type" help="TopHat will treat the reads as strand specific. Every read alignment will have an XS attribute tag. Consider supplying library type options below to select the correct RNA-seq protocol.">
- <option value="fr-unstranded">FR Unstranded</option>
- <option value="fr-firststrand">FR First Strand</option>
- <option value="fr-secondstrand">FR Second Strand</option>
- </param>
- <param name="mate_std_dev" type="integer" value="20" label="Std. Dev for Distance between Mate Pairs" help="The standard deviation for the distribution on inner distances between mate pairs."/>
- <param name="anchor_length" type="integer" value="8" label="Anchor length (at least 3)" help="Report junctions spanned by reads with at least this many bases on each side of the junction." />
- <param name="splice_mismatches" type="integer" value="0" label="Maximum number of mismatches that can appear in the anchor region of spliced alignment" />
- <param name="min_intron_length" type="integer" value="70" label="The minimum intron length" help="TopHat will ignore donor/acceptor pairs closer than this many bases apart." />
- <param name="max_intron_length" type="integer" value="500000" label="The maximum intron length" help="When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read." />
- <conditional name="indel_search">
- <param name="allow_indel_search" type="select" label="Allow indel search">
- <option value="Yes">Yes</option>
- <option value="No">No</option>
- </param>
- <when value="No"/>
- <when value="Yes">
- <param name="max_insertion_length" type="integer" value="3" label="Max insertion length." help="The maximum insertion length." />
- <param name="max_deletion_length" type="integer" value="3" label="Max deletion length." help="The maximum deletion length." />
- </when>
- </conditional>
- <param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" />
- <param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" />
- <param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" />
- <param name="seg_length" type="integer" value="25" label="Minimum length of read segments" />
- <!-- Options for supplying own junctions. -->
- <conditional name="own_junctions">
- <param name="use_junctions" type="select" label="Use Own Junctions">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="Yes">
- <conditional name="gene_model_ann">
- <param name="use_annotations" type="select" label="Use Gene Annotation Model">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="No" />
- <when value="Yes">
- <param format="gtf" name="gene_annotation_model" type="data" label="Gene Model Annotations" help="TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping."/>
- </when>
- </conditional>
- <conditional name="raw_juncs">
- <param name="use_juncs" type="select" label="Use Raw Junctions">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="No" />
- <when value="Yes">
- <param format="interval" name="raw_juncs" type="data" label="Raw Junctions" help="Supply TopHat with a list of raw junctions. Junctions are specified one per line, in a tab-delimited format. Records look like: [chrom] [left] [right] [+/-] left and right are zero-based coordinates, and specify the last character of the left sequenced to be spliced to the first character of the right sequence, inclusive."/>
- </when>
- </conditional>
- <param name="no_novel_juncs" type="select" label="Only look for supplied junctions">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- </when>
- <when value="No" />
- </conditional><!-- /own_junctions -->
-
- <!-- Coverage search. -->
- <conditional name="coverage_search">
- <param name="use_search" type="select" label="Use Coverage Search">
- <option selected="true" value="Yes">Yes</option>
- <option value="No">No</option>
- </param>
- <when value="Yes">
- <param name="min_coverage_intron" type="integer" value="50" label="Minimum intron length that may be found during coverage search" />
- <param name="max_coverage_intron" type="integer" value="20000" label="Maximum intron length that may be found during coverage search" />
- </when>
- <when value="No" />
- </conditional>
- <param name="microexon_search" type="select" label="Use Microexon Search" help="With this option, the pipeline will attempt to find alignments incident to microexons. Works only for reads 50bp or longer.">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- </when><!-- full -->
- </conditional><!-- pParams -->
- </when><!-- paired -->
- </conditional>
+ <when value="Yes">
+ <param name="min_coverage_intron" type="integer" value="50" label="Minimum intron length that may be found during coverage search" />
+ <param name="max_coverage_intron" type="integer" value="20000" label="Maximum intron length that may be found during coverage search" />
+ </when>
+ <when value="No" />
+ </conditional>
+ <param name="microexon_search" type="select" label="Use Microexon Search" help="With this option, the pipeline will attempt to find alignments incident to microexons. Works only for reads 50bp or longer.">
+ <option value="No">No</option>
+ <option value="Yes">Yes</option>
+ </param>
+ </when><!-- full -->
+ </conditional><!-- params --></inputs><outputs>
@@ -425,11 +294,11 @@
tophat -o tmp_dir -p 1 tophat_in1 test-data/tophat_in2.fastqsanger
Rename the files in tmp_dir appropriately
-->
+ <param name="sPaired" value="single" /><param name="input1" ftype="fastqsanger" value="tophat_in2.fastqsanger" /><param name="genomeSource" value="indexed" /><param name="index" value="tophat_test" />
- <param name="sPaired" value="single" />
- <param name="sSettingsType" value="preSet" />
+ <param name="settingsType" value="preSet" /><output name="junctions" file="tophat_out1j.bed" /><output name="accepted_hits" file="tophat_out1h.bam" compare="sim_size" /></test>
@@ -440,13 +309,13 @@
tophat -o tmp_dir -p 1 -r 20 tophat_in1 test-data/tophat_in2.fastqsanger test-data/tophat_in3.fastqsanger
Rename the files in tmp_dir appropriately
-->
+ <param name="sPaired" value="paired" /><param name="input1" ftype="fastqsanger" value="tophat_in2.fastqsanger" />
+ <param name="input2" ftype="fastqsanger" value="tophat_in3.fastqsanger" /><param name="genomeSource" value="history" /><param name="ownFile" ftype="fasta" value="tophat_in1.fasta" />
- <param name="sPaired" value="paired" />
- <param name="input2" ftype="fastqsanger" value="tophat_in3.fastqsanger" /><param name="mate_inner_distance" value="20" />
- <param name="pSettingsType" value="preSet" />
+ <param name="settingsType" value="preSet" /><output name="junctions" file="tophat_out2j.bed" /><output name="accepted_hits" file="tophat_out2h.bam" compare="sim_size" /></test>
@@ -458,11 +327,11 @@
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
+ <param name="sPaired" value="single"/><param name="input1" ftype="fastqsanger" value="tophat_in2.fastqsanger"/><param name="genomeSource" value="history"/><param name="ownFile" value="tophat_in1.fasta"/>
- <param name="sPaired" value="single"/>
- <param name="sSettingsType" value="full"/>
+ <param name="settingsType" value="full"/><param name="library_type" value="FR Unstranded"/><param name="anchor_length" value="8"/><param name="splice_mismatches" value="0"/>
@@ -496,13 +365,13 @@
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
+ <param name="sPaired" value="paired"/><param name="input1" ftype="fastqsanger" value="tophat_in2.fastqsanger"/>
+ <param name="input2" ftype="fastqsanger" value="tophat_in3.fastqsanger"/><param name="genomeSource" value="indexed"/><param name="index" value="tophat_test"/>
- <param name="sPaired" value="paired"/>
- <param name="input2" ftype="fastqsanger" value="tophat_in3.fastqsanger"/><param name="mate_inner_distance" value="20"/>
- <param name="pSettingsType" value="full"/>
+ <param name="settingsType" value="full"/><param name="library_type" value="FR Unstranded"/><param name="mate_std_dev" value="20"/><param name="anchor_length" value="8"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: More fixes for setting tool shed repsoitory metadata.
by Bitbucket 20 Apr '12
by Bitbucket 20 Apr '12
20 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/47bcf28a53e9/
changeset: 47bcf28a53e9
user: greg
date: 2012-04-21 03:45:30
summary: More fixes for setting tool shed repsoitory metadata.
affected #: 3 files
diff -r 49195c2f37cedabd034984b08058b163302286f0 -r 47bcf28a53e982e5c3062218f0eb8a9d24c6cbd6 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -411,10 +411,15 @@
full_path = os.path.abspath( os.path.join( root, name ) )
if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- if element_tree_root.tag == 'tool':
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
+ is_tool = False
+ if is_tool:
try:
tool = toolbox.load_tool( full_path )
except Exception, e:
diff -r 49195c2f37cedabd034984b08058b163302286f0 -r 47bcf28a53e982e5c3062218f0eb8a9d24c6cbd6 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -323,10 +323,15 @@
full_path = os.path.abspath( os.path.join( root, name ) )
if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- if element_tree_root.tag == 'tool':
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
+ is_tool = False
+ if is_tool:
try:
tool = load_tool( trans, full_path )
valid = True
diff -r 49195c2f37cedabd034984b08058b163302286f0 -r 47bcf28a53e982e5c3062218f0eb8a9d24c6cbd6 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -924,7 +924,8 @@
element_tree = util.parse_xml( tmp_filename )
element_tree_root = element_tree.getroot()
is_tool = element_tree_root.tag == 'tool'
- except:
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( tmp_filename, str( e ) ) )
is_tool = False
if is_tool:
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fixes for setting repository metadata on older change sets in a tool shed repository.
by Bitbucket 20 Apr '12
by Bitbucket 20 Apr '12
20 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/49195c2f37ce/
changeset: 49195c2f37ce
user: greg
date: 2012-04-21 03:27:34
summary: Fixes for setting repository metadata on older change sets in a tool shed repository.
affected #: 2 files
diff -r 6f7240344304a143a3d70182092c61931595b613 -r 49195c2f37cedabd034984b08058b163302286f0 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -401,10 +401,14 @@
fh.close()
if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ]
or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ):
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( tmp_filename )
- element_tree_root = element_tree.getroot()
- if element_tree_root.tag == 'tool':
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( tmp_filename )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except:
+ is_tool = False
+ if is_tool:
try:
tool = load_tool( trans, tmp_filename )
valid = True
diff -r 6f7240344304a143a3d70182092c61931595b613 -r 49195c2f37cedabd034984b08058b163302286f0 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -919,10 +919,14 @@
fh.close()
if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ]
or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ):
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( tmp_filename )
- element_tree_root = element_tree.getroot()
- if element_tree_root.tag == 'tool':
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( tmp_filename )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except:
+ is_tool = False
+ if is_tool:
try:
tool = load_tool( trans, tmp_filename )
valid = True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f7240344304/
changeset: 6f7240344304
user: jgoecks
date: 2012-04-20 23:12:48
summary: Merge
affected #: 13 files
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2629,8 +2629,8 @@
class ToolShedRepository( object ):
def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
- changeset_revision=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False, uninstalled=False,
- dist_to_shed=False ):
+ changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False,
+ uninstalled=False, dist_to_shed=False ):
self.id = id
self.create_time = create_time
self.tool_shed = tool_shed
@@ -2639,6 +2639,7 @@
self.owner = owner
self.installed_changeset_revision = installed_changeset_revision
self.changeset_revision = changeset_revision
+ self.ctx_rev = ctx_rev
self.metadata = metadata
self.includes_datatypes = includes_datatypes
self.update_available = update_available
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -382,6 +382,7 @@
Column( "owner", TrimmedString( 255 ), index=True ),
Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "ctx_rev", TrimmedString( 10 ) ),
Column( "metadata", JSONType, nullable=True ),
Column( "includes_datatypes", Boolean, index=True, default=False ),
Column( "update_available", Boolean, default=False ),
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
@@ -0,0 +1,43 @@
+"""
+Migration script to add the ctx_rev column to the tool_shed_repository table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ col = Column( "ctx_rev", TrimmedString( 10 ) )
+ try:
+ col.create( ToolShedRepository_table )
+ assert col is ToolShedRepository_table.c.ctx_rev
+ except Exception, e:
+ print "Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e )
+def downgrade():
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ try:
+ ToolShedRepository_table.c.ctx_rev.drop()
+ except Exception, e:
+ print "Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e )
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,6 +7,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
+
log = logging.getLogger( __name__ )
class InstallManager( object ):
@@ -119,7 +120,7 @@
is_displayed = True
return is_displayed, tool_sections
def handle_repository_contents( self, current_working_dir, repository_clone_url, relative_install_dir, repository_elem, repository_name, description,
- changeset_revision, tmp_name ):
+ changeset_revision, ctx_rev ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -144,6 +145,7 @@
repository_name,
description,
changeset_revision,
+ ctx_rev,
repository_clone_url,
metadata_dict,
dist_to_shed=True )
@@ -166,11 +168,6 @@
self.migrated_tools_config,
tool_panel_dict=tool_panel_dict_for_display,
new_install=True )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
@@ -193,7 +190,7 @@
self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
return tool_shed_repository, metadata_dict
def install_repository( self, repository_elem ):
- # Install a single repository, loading contained tools into the tool config.
+ # Install a single repository, loading contained tools into the tool panel.
name = repository_elem.get( 'name' )
description = repository_elem.get( 'description' )
changeset_revision = repository_elem.get( 'changeset_revision' )
@@ -206,65 +203,55 @@
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( clone_dir, name )
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir,
- repository_clone_url,
- relative_install_dir,
- repository_elem,
- name,
- description,
- changeset_revision,
- tmp_name )
- if 'tools' in metadata_dict:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
- else:
- # Set the tool versions since they seem to be missing for this repository in the tool shed.
- # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'guid' ]
- old_tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = get_tool_version( self.app, tool_id )
- if not tool_version_using_old_id:
- tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_old_id )
- self.app.sa_session.flush()
- if not tool_version_using_guid:
- tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_guid )
- self.app.sa_session.flush()
- # Associate the two versions as parent / child.
- tool_version_association = get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
- if not tool_version_association:
- tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
- parent_id=tool_version_using_old_id.id )
- self.app.sa_session.add( tool_version_association )
- self.app.sa_session.flush()
+ ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, changeset_revision )
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir,
+ repository_clone_url,
+ relative_install_dir,
+ repository_elem,
+ name,
+ description,
+ changeset_revision,
+ ctx_rev )
+ if 'tools' in metadata_dict:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
- tmp_stderr = open( tmp_name, 'rb' )
- print "Error updating repository ', name, "': ', str( tmp_stderr.read() )
- tmp_stderr.close()
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- print "Error cloning repository '", name, "': ", str( tmp_stderr.read() )
- tmp_stderr.close()
+ # Set the tool versions since they seem to be missing for this repository in the tool shed.
+ # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'guid' ]
+ old_tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = get_tool_version( self.app, tool_id )
+ if not tool_version_using_old_id:
+ tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.app.sa_session.add( tool_version_using_old_id )
+ self.app.sa_session.flush()
+ if not tool_version_using_guid:
+ tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.app.sa_session.add( tool_version_using_guid )
+ self.app.sa_session.flush()
+ # Associate the two versions as parent / child.
+ tool_version_association = get_tool_version_association( self.app,
+ tool_version_using_old_id,
+ tool_version_using_guid )
+ if not tool_version_association:
+ tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+ parent_id=tool_version_using_old_id.id )
+ self.app.sa_session.add( tool_version_association )
+ self.app.sa_session.flush()
@property
def non_shed_tool_panel_configs( self ):
# Get the non-shed related tool panel config file names from the Galaxy config - the default is tool_conf.xml.
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,4 @@
-import os, tempfile, shutil, subprocess, logging, string
+import os, tempfile, shutil, subprocess, logging, string, urllib2
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
@@ -6,6 +6,7 @@
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.model.orm import *
+from mercurial import ui, commands
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
@@ -265,20 +266,17 @@
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
-def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ):
- log.debug( "Installing repository '%s'" % name )
- if not os.path.exists( clone_dir ):
- os.makedirs( clone_dir )
- log.debug( 'Cloning %s' % repository_clone_url )
- cmd = 'hg clone %s' % repository_clone_url
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( clone_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+ """
+ Clone the repository up to the specified changeset_revision. No subsequent revisions will be present
+ in the cloned repository.
+ """
+ commands.clone( get_configured_ui(),
+ repository_clone_url,
+ dest=repository_file_dir,
+ pull=True,
+ noupdate=False,
+ rev=[ ctx_rev ] )
def copy_sample_loc_file( app, filename ):
"""Copy xxx.loc.sample to ~/tool-data/xxx.loc.sample and ~/tool-data/xxx.loc"""
head, sample_loc_file = os.path.split( filename )
@@ -298,7 +296,8 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='', dist_to_shed=False ):
+def create_or_update_tool_shed_repository( app, name, description, changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
+ owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
sa_session = app.model.context.current
@@ -311,6 +310,7 @@
if tool_shed_repository:
tool_shed_repository.description = description
tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.ctx_rev = ctx_rev
tool_shed_repository.metadata = metadata_dict
tool_shed_repository.includes_datatypes = includes_datatypes
tool_shed_repository.deleted = False
@@ -322,6 +322,7 @@
owner=owner,
installed_changeset_revision=changeset_revision,
changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
metadata=metadata_dict,
includes_datatypes=includes_datatypes,
dist_to_shed=dist_to_shed )
@@ -672,6 +673,15 @@
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
converter_path = None
@@ -714,6 +724,12 @@
if converter_path and display_path:
break
return converter_path, display_path
+def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
+ url = '%s/repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ ctx_rev = response.read()
+ response.close()
+ return ctx_rev
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""
Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
@@ -1014,8 +1030,8 @@
if display_path:
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
-def load_repository_contents( trans, repository_name, description, owner, changeset_revision, tool_path, repository_clone_url,
- relative_install_dir, current_working_dir, tmp_name, tool_shed=None, tool_section=None, shed_tool_conf=None ):
+def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url,
+ relative_install_dir, current_working_dir, tool_shed=None, tool_section=None, shed_tool_conf=None ):
"""Generate the metadata for the installed tool shed repository, among other things."""
# It is critical that the installed repository is updated to the desired changeset_revision before metadata is set because the
# process for setting metadata uses the repository files on disk. This method is called when an admin is installing a new repository
@@ -1028,6 +1044,7 @@
repository_name,
description,
changeset_revision,
+ ctx_rev,
repository_clone_url,
metadata_dict,
dist_to_shed=False )
@@ -1051,11 +1068,6 @@
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=True )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load data types required by tools.
@@ -1089,18 +1101,12 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def pull_repository( current_working_dir, repo_files_dir, name ):
- # Pull the latest possible contents to the repository.
- log.debug( "Pulling latest updates to the repository named '%s'" % name )
- cmd = 'hg pull'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def pull_repository( repo, repository_clone_url, ctx_rev ):
+ """Pull changes from a remote repository to a local one."""
+ commands.pull( get_configured_ui(),
+ repo,
+ source=repository_clone_url,
+ rev=ctx_rev )
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1226,17 +1232,22 @@
elif c not in [ '\r' ]:
translated.append( 'X' )
return ''.join( translated )
-def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
- # Update the cloned repository to changeset_revision. It is imperative that the
- # installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision )
- cmd = 'hg update -r %s' % changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def update_repository( repo, ctx_rev=None ):
+ """
+ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
+ changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ """
+ # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ # The codes used to show the status of files are as follows.
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
+ # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
+ commands.update( get_configured_ui(),
+ repo,
+ rev=ctx_rev )
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -1325,8 +1325,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
if webapp == 'galaxy':
- cloned_repositories = trans.sa_session.query( trans.model.ToolShedRepository ) \
- .first()
+ cloned_repositories = trans.sa_session.query( trans.model.ToolShedRepository ).first()
return trans.fill_template( '/webapps/galaxy/admin/index.mako',
webapp=webapp,
cloned_repositories=cloned_repositories,
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -3,6 +3,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy import tools
+from mercurial import hg
log = logging.getLogger( __name__ )
@@ -290,58 +291,45 @@
current_working_dir = os.getcwd()
installed_repository_names = []
for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev = repo_info_tuple
clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, name )
if os.path.exists( clone_dir ):
# Repository and revision has already been cloned.
message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name )
else:
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
- tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict = load_repository_contents( trans,
- repository_name=name,
- description=description,
- owner=owner,
- changeset_revision=changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf )
- if 'tools' in metadata_dict:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, name, owner, changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
- else:
- message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
- message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
- message += "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
- installed_repository_names.append( name )
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
+ tool_shed = clean_tool_shed_url( tool_shed_url )
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
+ repository_name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tool_shed=tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
+ if 'tools' in metadata_dict:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
- tmp_stderr = open( tmp_name, 'rb' )
- message += '%s<br/>' % tmp_stderr.read()
- tmp_stderr.close()
+ message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
+ message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ message += "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message += '%s<br/>' % tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
+ installed_repository_names.append( name )
if installed_repository_names:
installed_repository_names.sort()
num_repositories_installed = len( installed_repository_names )
@@ -375,7 +363,7 @@
if len( decoded_repo_info_dict ) == 1:
name = decoded_repo_info_dict.keys()[ 0 ]
repo_info_tuple = decoded_repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev = repo_info_tuple
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
url = '%s/repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
response = urllib2.urlopen( url )
@@ -444,79 +432,82 @@
repository_clone_url = generate_clone_url( trans, repository )
clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, repository.name )
- returncode, tmp_name = clone_repository( repository.name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, repository.installed_changeset_revision )
- if returncode == 0:
- if repository.includes_tools:
- # Get the location in the tool panel in which each tool was originally loaded.
- metadata = repository.metadata
- if 'tool_panel_section' in metadata:
- tool_panel_dict = metadata[ 'tool_panel_section' ]
- if not tool_panel_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ if not repository.ctx_rev:
+ # The ctx_rev column was introduced late, so may be null for some installed ToolShedRepositories.
+ ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
+ else:
+ ctx_rev = repository.ctx_rev
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ if repository.includes_tools:
+ # Get the location in the tool panel in which each tool was originally loaded.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ if not tool_panel_dict:
+ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ else:
+ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
+ # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ tool_section_dict = tool_section_dicts[ 0 ]
+ original_section_id = tool_section_dict[ 'id' ]
+ original_section_name = tool_section_dict[ 'name' ]
+ if no_changes_checked:
+ if original_section_id in [ '' ]:
+ tool_section = None
+ else:
+ section_key = 'section_%s' % str( original_section_id )
+ if section_key in trans.app.toolbox.tool_panel:
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
else:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
- # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
- # following assumes everything was loaded into 1 section (or no section) in the tool panel.
- tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
- tool_section_dict = tool_section_dicts[ 0 ]
- original_section_id = tool_section_dict[ 'id' ]
- original_section_name = tool_section_dict[ 'name' ]
- if no_changes_checked:
- if original_section_id in [ '' ]:
- tool_section = None
- else:
- section_key = 'section_%s' % str( original_section_id )
- if section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = original_section_name
+ elem.attrib[ 'id' ] = original_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ # The user elected to change the tool panel section to contain the tools.
+ new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
+ tool_panel_section = kwd.get( 'tool_panel_section', '' )
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ new_section_key = 'section_%s' % str( section_id )
+ if new_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
else:
- # The user elected to change the tool panel section to contain the tools.
- new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
- tool_panel_section = kwd.get( 'tool_panel_section', '' )
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- new_section_key = 'section_%s' % str( section_id )
- if new_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
- elif tool_panel_section:
- section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- tool_section = None
- tool_shed_repository, metadata_dict = load_repository_contents( trans,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf )
- repository.uninstalled = False
+ # Appending a new section to trans.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section
+ elem.attrib[ 'id' ] = section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
+ elif tool_panel_section:
+ section_key = 'section_%s' % tool_panel_section
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
+ else:
+ tool_section = None
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
+ repository.uninstalled = False
repository.deleted = False
trans.sa_session.add( repository )
trans.sa_session.flush()
@@ -606,41 +597,32 @@
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
+ latest_ctx_rev = params.get( 'latest_ctx_rev', None )
repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
- if changeset_revision and latest_changeset_revision:
+ if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
else:
current_working_dir = os.getcwd()
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.join( relative_install_dir, name )
- returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
- if returncode == 0:
- # Update the repository metadata.
- repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url )
- repository.metadata = metadata_dict
- # Update the repository changeset_revision in the database.
- repository.changeset_revision = latest_changeset_revision
- repository.update_available = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
- ( name, latest_changeset_revision )
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message = tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message = tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
+ repo = hg.repository( get_configured_ui(), path=repo_files_dir )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ pull_repository( repo, repository_clone_url, latest_ctx_rev )
+ update_repository( repo, latest_ctx_rev )
+ # Update the repository metadata.
+ tool_shed = clean_tool_shed_url( tool_shed_url )
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ repository.metadata = metadata_dict
+ # Update the repository changeset_revision in the database.
+ repository.changeset_revision = latest_changeset_revision
+ repository.ctx_rev = latest_ctx_rev
+ repository.update_available = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
+ ( name, latest_changeset_revision )
else:
message = "The directory containing the cloned repository named '%s' cannot be found." % name
status = 'error'
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -3,6 +3,7 @@
from galaxy.model.orm import *
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util import inflector
+from galaxy.util.shed_util import get_configured_ui
from common import *
from repository import RepositoryListGrid, CategoryListGrid
from mercurial import hg
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,7 +5,7 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import copy_sample_loc_file, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
+from galaxy.util.shed_util import copy_sample_loc_file, get_configured_ui, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, to_html_escaped, to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
@@ -283,7 +283,7 @@
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
# record is not needed.
return False
-def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir ):
+def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir ):
"""
Browse the repository tip files on disk to generate metadata. This is faster than the
generate_metadata_for_changeset_revision() method below because fctx.data() does not have
@@ -291,8 +291,7 @@
invalid_tool_configs here, while they are ignored in older revisions.
"""
# If a push from the command line is occurring, update the repository files on disk before setting metadata.
- returncode, tmp_name = update_repository( os.getcwd(), os.path.abspath( repo_dir ), changeset_revision )
- # TODO: handle error if returncode is not 0?
+ update_repository( repo, str( ctx.rev() ) )
metadata_dict = {}
invalid_files = []
invalid_tool_configs = []
@@ -449,7 +448,7 @@
invalid_files = []
if ctx is not None:
if changeset_revision == repository.tip:
- metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir )
+ metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir )
else:
metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir )
if metadata_dict:
@@ -543,7 +542,7 @@
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = get_changectx_for_changeset( repo, current_changeset_revision )
if current_changeset_revision == repository.tip:
- current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo_dir )
+ current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo, repo_dir )
else:
current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
if current_metadata_dict:
@@ -780,15 +779,6 @@
if repository_metadata:
return repository_metadata.malicious
return False
-def get_configured_ui():
- # Configure any desired ui settings.
- _ui = ui.ui()
- # The following will suppress all messages. This is
- # the same as adding the following setting to the repo
- # hgrc file' [ui] section:
- # quiet = True
- _ui.setconfig( 'ui', 'quiet', True )
- return _ui
def get_user( trans, id ):
"""Get a user from the database by id"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
@@ -891,9 +881,10 @@
return True
return False
def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
- # Make a copy of a repository's files for browsing, remove from disk all files that
- # are not tracked, and commit all added, modified or removed files that have not yet
- # been committed.
+ # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge
+ # is not supported by the mercurial API.
+ # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all
+ # added, modified or removed files that have not yet been committed.
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -10,6 +10,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
+from galaxy.util.shed_util import get_configured_ui
from common import *
from mercurial import hg, ui, patch, commands
@@ -792,18 +793,33 @@
# Tell the caller if the repository includes Galaxy tools so the page
# enabling selection of the tool panel section can be displayed.
includes_tools = 'tools' in repository_metadata.metadata
+ # Get the changelog rev for this changeset_revision.
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = {}
- repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision )
+ repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision, str( ctx.rev() ) )
encoded_repo_info_dict = encode( repo_info_dict )
# Redirect back to local Galaxy to perform install.
url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
+ def get_ctx_rev( self, trans, **kwd ):
+ """Given a repository and changeset_revision, return the correct ctx.rev() value."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ return str( ctx.rev() )
+ return ''
+ @web.expose
def get_readme( self, trans, **kwd ):
- """
- If the received changeset_revision includes a file named readme (case ignored), return it's contents.
- """
+ """If the received changeset_revision includes a file named readme (case ignored), return it's contents."""
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
@@ -903,13 +919,17 @@
fh.close()
if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ]
or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ):
- try:
- tool = load_tool( trans, tmp_filename )
- valid = True
- except:
- valid = False
- if valid and tool is not None:
- tool_guids.append( generate_tool_guid( trans, repository, tool ) )
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( tmp_filename )
+ element_tree_root = element_tree.getroot()
+ if element_tree_root.tag == 'tool':
+ try:
+ tool = load_tool( trans, tmp_filename )
+ valid = True
+ except:
+ valid = False
+ if valid and tool is not None:
+ tool_guids.append( generate_tool_guid( trans, repository, tool ) )
try:
os.unlink( tmp_filename )
except:
@@ -927,11 +947,13 @@
metadata_tool_guids.append( tool_dict[ 'guid' ] )
metadata_tool_guids.sort()
if tool_guids == metadata_tool_guids:
- # We've found the repository_metadata record whose changeset_revision
- # value has been updated.
+ # We've found the repository_metadata record whose changeset_revision value has been updated.
if from_update_manager:
return update
url += repository_metadata.changeset_revision
+ # Get the ctx_rev for the discovered changeset_revision.
+ latest_ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+ url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
found = True
break
if not found:
@@ -941,7 +963,7 @@
return no_update
url += changeset_revision
else:
- # There are not tools in the changeset_revision, so no tool updates are possible.
+ # There are no tools in the changeset_revision, so no tool updates are possible.
if from_update_manager:
return no_update
url += changeset_revision
@@ -1168,13 +1190,10 @@
new_hgweb_config.flush()
shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
- # At this point, an entry for the repository is required to be in the hgweb.config
- # file so we can call repository.repo_path.
- # Since we support both http and https, we set push_ssl to False to override
- # the default (which is True) in the mercurial api.
- # The hg purge extension purges all files and directories not being tracked by
- # mercurial in the current repository. It'll remove unknown files and empty
- # directories. This is used in the update_for_browsing() method.
+ # At this point, an entry for the repository is required to be in the hgweb.config file so we can call repository.repo_path.
+ # Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api.
+ # The hg purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll
+ # remove unknown files and empty directories. This is not currently used because it is not supported in the mercurial API.
repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
@@ -1198,7 +1217,7 @@
repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
# Update repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
is_malicious = change_set_is_malicious( trans, id, repository.tip )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
@@ -1314,7 +1333,7 @@
repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
# Get the new repository tip.
repo = hg.repository( get_configured_ui(), repo_dir )
if tip != repository.tip:
@@ -1868,7 +1887,7 @@
break
if found:
break
- metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, repository_id, ctx, changeset_revision, repo_dir )
+ metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, repository_id, ctx, changeset_revision, repo, repo_dir )
else:
for filename in ctx:
if filename == tool_config:
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import get_configured_ui, handle_sample_tool_data_table_conf_file
from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
@@ -144,7 +144,7 @@
handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/framework/middleware/hg.py
--- a/lib/galaxy/webapps/community/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/community/framework/middleware/hg.py
@@ -3,7 +3,6 @@
"""
import os, logging
from sqlalchemy import *
-from mercurial import ui, hg
from paste.auth.basic import AuthBasicAuthenticator
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
diff -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 -r 6f7240344304a143a3d70182092c61931595b613 lib/galaxy/webapps/community/security/__init__.py
--- a/lib/galaxy/webapps/community/security/__init__.py
+++ b/lib/galaxy/webapps/community/security/__init__.py
@@ -6,7 +6,6 @@
from galaxy.util.bunch import Bunch
from galaxy.util import listify
from galaxy.model.orm import *
-from mercurial import hg, ui
log = logging.getLogger(__name__)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6af88237056f/
changeset: 6af88237056f
user: jgoecks
date: 2012-04-20 23:08:55
summary: Basic support for Tophat2.
affected #: 4 files
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 test-data/tophat_out2j.bed
--- a/test-data/tophat_out2j.bed
+++ b/test-data/tophat_out2j.bed
@@ -1,3 +1,3 @@
track name=junctions description="TopHat junctions"
-test_chromosome 179 400 JUNC00000001 38 + 179 400 255,0,0 2 71,50 0,171
-test_chromosome 350 549 JUNC00000002 30 + 350 549 255,0,0 2 50,49 0,150
+test_chromosome 179 400 JUNC00000001 45 + 179 400 255,0,0 2 71,50 0,171
+test_chromosome 350 550 JUNC00000002 38 + 350 550 255,0,0 2 50,50 0,150
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 test-data/tophat_out4j.bed
--- a/test-data/tophat_out4j.bed
+++ b/test-data/tophat_out4j.bed
@@ -1,3 +1,3 @@
track name=junctions description="TopHat junctions"
-test_chromosome 179 400 JUNC00000001 38 + 179 400 255,0,0 2 71,50 0,171
-test_chromosome 350 549 JUNC00000002 30 + 350 549 255,0,0 2 50,49 0,150
+test_chromosome 179 400 JUNC00000001 45 + 179 400 255,0,0 2 71,50 0,171
+test_chromosome 350 550 JUNC00000002 38 + 350 550 255,0,0 2 50,50 0,150
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 tools/ngs_rna/tophat_wrapper.py
--- a/tools/ngs_rna/tophat_wrapper.py
+++ b/tools/ngs_rna/tophat_wrapper.py
@@ -28,7 +28,6 @@
parser.add_option( '-I', '--max-intron-length', dest='max_intron_length',
help='The maximum intron length. When searching for junctions ab initio, TopHat will ignore donor/acceptor pairs farther than this many bases apart, except when such a pair is supported by a split segment alignment of a long read.' )
parser.add_option( '-g', '--max_multihits', dest='max_multihits', help='Maximum number of alignments to be allowed' )
- parser.add_option( '', '--initial-read-mismatches', dest='initial_read_mismatches', help='Number of mismatches allowed in the initial read mapping' )
parser.add_option( '', '--seg-mismatches', dest='seg_mismatches', help='Number of mismatches allowed in each segment alignment for reads mapped independently' )
parser.add_option( '', '--seg-length', dest='seg_length', help='Minimum length of read segments' )
parser.add_option( '', '--library-type', dest='library_type', help='TopHat will treat the reads as strand specific. Every read alignment will have an XS attribute tag. Consider supplying library type options below to select the correct RNA-seq protocol.' )
@@ -53,15 +52,10 @@
parser.add_option( '', '--no-novel-indels', action="store_true", dest='no_novel_indels', help="Skip indel search. Indel search is enabled by default.")
# Types of search.
parser.add_option( '', '--microexon-search', action="store_true", dest='microexon_search', help='With this option, the pipeline will attempt to find alignments incident to microexons. Works only for reads 50bp or longer.')
- parser.add_option( '', '--closure-search', action="store_true", dest='closure_search', help='Enables the mate pair closure-based search for junctions. Closure-based search should only be used when the expected inner distance between mates is small (<= 50bp)')
- parser.add_option( '', '--no-closure-search', action="store_false", dest='closure_search' )
parser.add_option( '', '--coverage-search', action="store_true", dest='coverage_search', help='Enables the coverage based search for junctions. Use when coverage search is disabled by default (such as for reads 75bp or longer), for maximum sensitivity.')
parser.add_option( '', '--no-coverage-search', action="store_false", dest='coverage_search' )
parser.add_option( '', '--min-segment-intron', dest='min_segment_intron', help='Minimum intron length that may be found during split-segment search' )
parser.add_option( '', '--max-segment-intron', dest='max_segment_intron', help='Maximum intron length that may be found during split-segment search' )
- parser.add_option( '', '--min-closure-exon', dest='min_closure_exon', help='Minimum length for exonic hops in potential splice graph' )
- parser.add_option( '', '--min-closure-intron', dest='min_closure_intron', help='Minimum intron length that may be found during closure search' )
- parser.add_option( '', '--max-closure-intron', dest='max_closure_intron', help='Maximum intron length that may be found during closure search' )
parser.add_option( '', '--min-coverage-intron', dest='min_coverage_intron', help='Minimum intron length that may be found during coverage search' )
parser.add_option( '', '--max-coverage-intron', dest='max_coverage_intron', help='Maximum intron length that may be found during coverage search' )
@@ -175,16 +169,10 @@
opts += ' --coverage-search --min-coverage-intron %s --max-coverage-intron %s' % ( options.min_coverage_intron, options.max_coverage_intron )
else:
opts += ' --no-coverage-search'
- if options.closure_search:
- opts += ' --closure-search --min-closure-exon %s --min-closure-intron %s --max-closure-intron %s' % ( options.min_closure_exon, options.min_closure_intron, options.max_closure_intron )
- else:
- opts += ' --no-closure-search'
if options.microexon_search:
opts += ' --microexon-search'
if options.single_paired == 'paired':
opts += ' --mate-std-dev %s' % options.mate_std_dev
- if options.initial_read_mismatches:
- opts += ' --initial-read-mismatches %d' % int( options.initial_read_mismatches )
if options.seg_mismatches:
opts += ' --segment-mismatches %d' % int( options.seg_mismatches )
if options.seg_length:
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r 6af88237056f0d4c7ae4c3dd5af647cde4830710 tools/ngs_rna/tophat_wrapper.xml
--- a/tools/ngs_rna/tophat_wrapper.xml
+++ b/tools/ngs_rna/tophat_wrapper.xml
@@ -1,5 +1,5 @@
<tool id="tophat" name="Tophat for Illumina" version="0.5">
- <!-- Wrapper compatible with Tophat versions 1.3.0 to 1.4.1 -->
+ <!-- Wrapper compatible with Tophat versions 1.3.0 to 2.0.0 --><description>Find splice junctions using RNA-seq data</description><version_command>tophat --version</version_command><requirements>
@@ -38,7 +38,6 @@
-g $singlePaired.sParams.max_multihits
--min-segment-intron $singlePaired.sParams.min_segment_intron
--max-segment-intron $singlePaired.sParams.max_segment_intron
- --initial-read-mismatches=$singlePaired.sParams.initial_read_mismatches
--seg-mismatches=$singlePaired.sParams.seg_mismatches
--seg-length=$singlePaired.sParams.seg_length
--library-type=$singlePaired.sParams.library_type
@@ -66,14 +65,6 @@
#end if
#end if
- #if $singlePaired.sParams.closure_search.use_search == "Yes":
- --closure-search
- --min-closure-exon $singlePaired.sParams.closure_search.min_closure_exon
- --min-closure-intron $singlePaired.sParams.closure_search.min_closure_intron
- --max-closure-intron $singlePaired.sParams.closure_search.max_closure_intron
- #else:
- --no-closure-search
- #end if
#if $singlePaired.sParams.coverage_search.use_search == "Yes":
--coverage-search
--min-coverage-intron $singlePaired.sParams.coverage_search.min_coverage_intron
@@ -99,7 +90,6 @@
-g $singlePaired.pParams.max_multihits
--min-segment-intron $singlePaired.pParams.min_segment_intron
--max-segment-intron $singlePaired.pParams.max_segment_intron
- --initial-read-mismatches=$singlePaired.pParams.initial_read_mismatches
--seg-mismatches=$singlePaired.pParams.seg_mismatches
--seg-length=$singlePaired.pParams.seg_length
--library-type=$singlePaired.pParams.library_type
@@ -127,14 +117,6 @@
#end if
#end if
- #if $singlePaired.pParams.closure_search.use_search == "Yes":
- --closure-search
- --min-closure-exon $singlePaired.pParams.closure_search.min_closure_exon
- --min-closure-intron $singlePaired.pParams.closure_search.min_closure_intron
- --max-closure-intron $singlePaired.pParams.closure_search.max_closure_intron
- #else:
- --no-closure-search
- #end if
#if $singlePaired.pParams.coverage_search.use_search == "Yes":
--coverage-search
--min-coverage-intron $singlePaired.pParams.coverage_search.min_coverage_intron
@@ -206,7 +188,6 @@
<param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" /><param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" /><param name="seg_length" type="integer" value="25" label="Minimum length of read segments" />
@@ -245,19 +226,6 @@
<when value="No" /></conditional><!-- /own_junctions -->
- <!-- Closure search. -->
- <conditional name="closure_search">
- <param name="use_search" type="select" label="Use Closure Search">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="Yes">
- <param name="min_closure_exon" type="integer" value="50" label="During closure search for paired end reads, exonic hops in the potential splice graph must be at least this long. The default is 50." />
- <param name="min_closure_intron" type="integer" value="50" label="Minimum intron length that may be found during closure search" />
- <param name="max_closure_intron" type="integer" value="5000" label="Maximum intron length that may be found during closure search" />
- </when>
- <when value="No" />
- </conditional><!-- Coverage search. --><conditional name="coverage_search"><param name="use_search" type="select" label="Use Coverage Search">
@@ -312,7 +280,6 @@
<param name="max_multihits" type="integer" value="20" label="Maximum number of alignments to be allowed" /><param name="min_segment_intron" type="integer" value="50" label="Minimum intron length that may be found during split-segment (default) search" /><param name="max_segment_intron" type="integer" value="500000" label="Maximum intron length that may be found during split-segment (default) search" />
- <param name="initial_read_mismatches" type="integer" min="0" value="2" label="Number of mismatches allowed in the initial read mapping" /><param name="seg_mismatches" type="integer" min="0" max="3" value="2" label="Number of mismatches allowed in each segment alignment for reads mapped independently" /><param name="seg_length" type="integer" value="25" label="Minimum length of read segments" /><!-- Options for supplying own junctions. -->
@@ -350,19 +317,6 @@
<when value="No" /></conditional><!-- /own_junctions -->
- <!-- Closure search. -->
- <conditional name="closure_search">
- <param name="use_search" type="select" label="Use Closure Search">
- <option value="No">No</option>
- <option value="Yes">Yes</option>
- </param>
- <when value="Yes">
- <param name="min_closure_exon" type="integer" value="50" label="During closure search for paired end reads, exonic hops in the potential splice graph must be at least this long. The default is 50." />
- <param name="min_closure_intron" type="integer" value="50" label="Minimum intron length that may be found during closure search" />
- <param name="max_closure_intron" type="integer" value="5000" label="Maximum intron length that may be found during closure search" />
- </when>
- <when value="No" />
- </conditional><!-- Coverage search. --><conditional name="coverage_search"><param name="use_search" type="select" label="Use Coverage Search">
@@ -500,7 +454,7 @@
<test><!-- Tophat commands:
bowtie-build -f test-data/tophat_in1.fasta tophat_in1
- tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +closure-search +min-closure-exon 50 +min-closure-intron 50 +max-closure-intro 5000 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
+ tophat -o tmp_dir -p 1 -a 8 -m 0 -i 70 -I 500000 -F 0.15 -g 40 +coverage-search +min-coverage-intron 50 +max-coverage-intro 20000 +segment-mismatches 2 +segment-length 25 +microexon-search tophat_in1 test-data/tophat_in2.fastqsanger
Replace the + with double-dash
Rename the files in tmp_dir appropriately
-->
@@ -527,10 +481,6 @@
<param name="use_juncs" value="No" /><param name="no_novel_juncs" value="No" /><param name="use_search" value="Yes" />
- <param name="min_closure_exon" value="50" />
- <param name="min_closure_intron" value="50" />
- <param name="max_closure_intron" value="5000" />
- <param name="use_search" value="Yes" /><param name="min_coverage_intron" value="50" /><param name="max_coverage_intron" value="20000" /><param name="microexon_search" value="Yes" />
@@ -643,17 +593,11 @@
-G/--GTF [GTF 2.2 file] Supply TopHat with a list of gene model annotations. TopHat will use the exon records in this file to build a set of known splice junctions for each gene, and will attempt to align reads to these junctions even if they would not normally be covered by the initial mapping.
-j/--raw-juncs [juncs file] Supply TopHat with a list of raw junctions. Junctions are specified one per line, in a tab-delimited format. Records look like: [chrom] [left] [right] [+/-], left and right are zero-based coordinates, and specify the last character of the left sequenced to be spliced to the first character of the right sequence, inclusive.
-no-novel-juncs Only look for junctions indicated in the supplied GFF file. (ignored without -G)
- --no-closure-search Disables the mate pair closure-based search for junctions. Currently, has no effect - closure search is off by default.
- --closure-search Enables the mate pair closure-based search for junctions. Closure-based search should only be used when the expected inner distance between mates is small (about or less than 50bp)
--no-coverage-search Disables the coverage based search for junctions.
--coverage-search Enables the coverage based search for junctions. Use when coverage search is disabled by default (such as for reads 75bp or longer), for maximum sensitivity.
--microexon-search With this option, the pipeline will attempt to find alignments incident to microexons. Works only for reads 50bp or longer.
- --butterfly-search TopHat will use a slower but potentially more sensitive algorithm to find junctions in addition to its standard search. Consider using this if you expect that your experiment produced a lot of reads from pre-mRNA, that fall within the introns of your transcripts.
--segment-mismatches Read segments are mapped independently, allowing up to this many mismatches in each segment alignment. The default is 2.
--segment-length Each read is cut up into segments, each at least this long. These segments are mapped independently. The default is 25.
- --min-closure-exon During closure search for paired end reads, exonic hops in the potential splice graph must be at least this long. The default is 50.
- --min-closure-intron The minimum intron length that may be found during closure search. The default is 50.
- --max-closure-intron The maximum intron length that may be found during closure search. The default is 5000.
--min-coverage-intron The minimum intron length that may be found during coverage search. The default is 50.
--max-coverage-intron The maximum intron length that may be found during coverage search. The default is 20000.
--min-segment-intron The minimum intron length that may be found during split-segment search. The default is 50.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Use the mercurial api for all tool shed hg actions.
by Bitbucket 20 Apr '12
by Bitbucket 20 Apr '12
20 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ad5fbb3146b3/
changeset: ad5fbb3146b3
user: greg
date: 2012-04-20 20:27:16
summary: Use the mercurial api for all tool shed hg actions.
affected #: 13 files
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2629,8 +2629,8 @@
class ToolShedRepository( object ):
def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
- changeset_revision=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False, uninstalled=False,
- dist_to_shed=False ):
+ changeset_revision=None, ctx_rev=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False,
+ uninstalled=False, dist_to_shed=False ):
self.id = id
self.create_time = create_time
self.tool_shed = tool_shed
@@ -2639,6 +2639,7 @@
self.owner = owner
self.installed_changeset_revision = installed_changeset_revision
self.changeset_revision = changeset_revision
+ self.ctx_rev = ctx_rev
self.metadata = metadata
self.includes_datatypes = includes_datatypes
self.update_available = update_available
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -382,6 +382,7 @@
Column( "owner", TrimmedString( 255 ), index=True ),
Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "changeset_revision", TrimmedString( 255 ), index=True ),
+ Column( "ctx_rev", TrimmedString( 10 ) ),
Column( "metadata", JSONType, nullable=True ),
Column( "includes_datatypes", Boolean, index=True, default=False ),
Column( "update_available", Boolean, default=False ),
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0097_add_ctx_rev_column.py
@@ -0,0 +1,43 @@
+"""
+Migration script to add the ctx_rev column to the tool_shed_repository table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ col = Column( "ctx_rev", TrimmedString( 10 ) )
+ try:
+ col.create( ToolShedRepository_table )
+ assert col is ToolShedRepository_table.c.ctx_rev
+ except Exception, e:
+ print "Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e )
+def downgrade():
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ try:
+ ToolShedRepository_table.c.ctx_rev.drop()
+ except Exception, e:
+ print "Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e )
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,6 +7,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
+
log = logging.getLogger( __name__ )
class InstallManager( object ):
@@ -119,7 +120,7 @@
is_displayed = True
return is_displayed, tool_sections
def handle_repository_contents( self, current_working_dir, repository_clone_url, relative_install_dir, repository_elem, repository_name, description,
- changeset_revision, tmp_name ):
+ changeset_revision, ctx_rev ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -144,6 +145,7 @@
repository_name,
description,
changeset_revision,
+ ctx_rev,
repository_clone_url,
metadata_dict,
dist_to_shed=True )
@@ -166,11 +168,6 @@
self.migrated_tools_config,
tool_panel_dict=tool_panel_dict_for_display,
new_install=True )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
@@ -193,7 +190,7 @@
self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
return tool_shed_repository, metadata_dict
def install_repository( self, repository_elem ):
- # Install a single repository, loading contained tools into the tool config.
+ # Install a single repository, loading contained tools into the tool panel.
name = repository_elem.get( 'name' )
description = repository_elem.get( 'description' )
changeset_revision = repository_elem.get( 'changeset_revision' )
@@ -206,65 +203,55 @@
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( clone_dir, name )
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir,
- repository_clone_url,
- relative_install_dir,
- repository_elem,
- name,
- description,
- changeset_revision,
- tmp_name )
- if 'tools' in metadata_dict:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
- else:
- # Set the tool versions since they seem to be missing for this repository in the tool shed.
- # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'guid' ]
- old_tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
- tool_version_using_guid = get_tool_version( self.app, tool_id )
- if not tool_version_using_old_id:
- tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_old_id )
- self.app.sa_session.flush()
- if not tool_version_using_guid:
- tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
- tool_shed_repository=tool_shed_repository )
- self.app.sa_session.add( tool_version_using_guid )
- self.app.sa_session.flush()
- # Associate the two versions as parent / child.
- tool_version_association = get_tool_version_association( self.app,
- tool_version_using_old_id,
- tool_version_using_guid )
- if not tool_version_association:
- tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
- parent_id=tool_version_using_old_id.id )
- self.app.sa_session.add( tool_version_association )
- self.app.sa_session.flush()
+ ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, changeset_revision )
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir,
+ repository_clone_url,
+ relative_install_dir,
+ repository_elem,
+ name,
+ description,
+ changeset_revision,
+ ctx_rev )
+ if 'tools' in metadata_dict:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( self.app, tool_version_dicts, tool_shed_repository )
else:
- tmp_stderr = open( tmp_name, 'rb' )
- print "Error updating repository ', name, "': ', str( tmp_stderr.read() )
- tmp_stderr.close()
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- print "Error cloning repository '", name, "': ", str( tmp_stderr.read() )
- tmp_stderr.close()
+ # Set the tool versions since they seem to be missing for this repository in the tool shed.
+ # CRITICAL NOTE: These default settings may not properly handle all parent/child associations.
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'guid' ]
+ old_tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = get_tool_version( self.app, tool_id )
+ if not tool_version_using_old_id:
+ tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.app.sa_session.add( tool_version_using_old_id )
+ self.app.sa_session.flush()
+ if not tool_version_using_guid:
+ tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.app.sa_session.add( tool_version_using_guid )
+ self.app.sa_session.flush()
+ # Associate the two versions as parent / child.
+ tool_version_association = get_tool_version_association( self.app,
+ tool_version_using_old_id,
+ tool_version_using_guid )
+ if not tool_version_association:
+ tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+ parent_id=tool_version_using_old_id.id )
+ self.app.sa_session.add( tool_version_association )
+ self.app.sa_session.flush()
@property
def non_shed_tool_panel_configs( self ):
# Get the non-shed related tool panel config file names from the Galaxy config - the default is tool_conf.xml.
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,4 +1,4 @@
-import os, tempfile, shutil, subprocess, logging, string
+import os, tempfile, shutil, subprocess, logging, string, urllib2
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
@@ -6,6 +6,7 @@
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.model.orm import *
+from mercurial import ui, commands
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
@@ -265,20 +266,17 @@
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
-def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ):
- log.debug( "Installing repository '%s'" % name )
- if not os.path.exists( clone_dir ):
- os.makedirs( clone_dir )
- log.debug( 'Cloning %s' % repository_clone_url )
- cmd = 'hg clone %s' % repository_clone_url
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( clone_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+ """
+ Clone the repository up to the specified changeset_revision. No subsequent revisions will be present
+ in the cloned repository.
+ """
+ commands.clone( get_configured_ui(),
+ repository_clone_url,
+ dest=repository_file_dir,
+ pull=True,
+ noupdate=False,
+ rev=[ ctx_rev ] )
def copy_sample_loc_file( app, filename ):
"""Copy xxx.loc.sample to ~/tool-data/xxx.loc.sample and ~/tool-data/xxx.loc"""
head, sample_loc_file = os.path.split( filename )
@@ -298,7 +296,8 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='', dist_to_shed=False ):
+def create_or_update_tool_shed_repository( app, name, description, changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
+ owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
# to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
sa_session = app.model.context.current
@@ -311,6 +310,7 @@
if tool_shed_repository:
tool_shed_repository.description = description
tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.ctx_rev = ctx_rev
tool_shed_repository.metadata = metadata_dict
tool_shed_repository.includes_datatypes = includes_datatypes
tool_shed_repository.deleted = False
@@ -322,6 +322,7 @@
owner=owner,
installed_changeset_revision=changeset_revision,
changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
metadata=metadata_dict,
includes_datatypes=includes_datatypes,
dist_to_shed=dist_to_shed )
@@ -672,6 +673,15 @@
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
converter_path = None
@@ -714,6 +724,12 @@
if converter_path and display_path:
break
return converter_path, display_path
+def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
+ url = '%s/repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ ctx_rev = response.read()
+ response.close()
+ return ctx_rev
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""
Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
@@ -1014,8 +1030,8 @@
if display_path:
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
-def load_repository_contents( trans, repository_name, description, owner, changeset_revision, tool_path, repository_clone_url,
- relative_install_dir, current_working_dir, tmp_name, tool_shed=None, tool_section=None, shed_tool_conf=None ):
+def load_repository_contents( trans, repository_name, description, owner, changeset_revision, ctx_rev, tool_path, repository_clone_url,
+ relative_install_dir, current_working_dir, tool_shed=None, tool_section=None, shed_tool_conf=None ):
"""Generate the metadata for the installed tool shed repository, among other things."""
# It is critical that the installed repository is updated to the desired changeset_revision before metadata is set because the
# process for setting metadata uses the repository files on disk. This method is called when an admin is installing a new repository
@@ -1028,6 +1044,7 @@
repository_name,
description,
changeset_revision,
+ ctx_rev,
repository_clone_url,
metadata_dict,
dist_to_shed=False )
@@ -1051,11 +1068,6 @@
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=True )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
if 'datatypes_config' in metadata_dict:
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load data types required by tools.
@@ -1089,18 +1101,12 @@
if k not in [ 'id', 'version', 'name' ]:
return True
return False
-def pull_repository( current_working_dir, repo_files_dir, name ):
- # Pull the latest possible contents to the repository.
- log.debug( "Pulling latest updates to the repository named '%s'" % name )
- cmd = 'hg pull'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def pull_repository( repo, repository_clone_url, ctx_rev ):
+ """Pull changes from a remote repository to a local one."""
+ commands.pull( get_configured_ui(),
+ repo,
+ source=repository_clone_url,
+ rev=ctx_rev )
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1226,17 +1232,22 @@
elif c not in [ '\r' ]:
translated.append( 'X' )
return ''.join( translated )
-def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
- # Update the cloned repository to changeset_revision. It is imperative that the
- # installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision )
- cmd = 'hg update -r %s' % changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- return returncode, tmp_name
+def update_repository( repo, ctx_rev=None ):
+ """
+ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
+ changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ """
+ # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ # The codes used to show the status of files are as follows.
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
+ # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
+ commands.update( get_configured_ui(),
+ repo,
+ rev=ctx_rev )
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -1325,8 +1325,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
if webapp == 'galaxy':
- cloned_repositories = trans.sa_session.query( trans.model.ToolShedRepository ) \
- .first()
+ cloned_repositories = trans.sa_session.query( trans.model.ToolShedRepository ).first()
return trans.fill_template( '/webapps/galaxy/admin/index.mako',
webapp=webapp,
cloned_repositories=cloned_repositories,
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -3,6 +3,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy import tools
+from mercurial import hg
log = logging.getLogger( __name__ )
@@ -290,58 +291,45 @@
current_working_dir = os.getcwd()
installed_repository_names = []
for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev = repo_info_tuple
clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, name )
if os.path.exists( clone_dir ):
# Repository and revision has already been cloned.
message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name )
else:
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
- tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict = load_repository_contents( trans,
- repository_name=name,
- description=description,
- owner=owner,
- changeset_revision=changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf )
- if 'tools' in metadata_dict:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, name, owner, changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
- else:
- message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
- message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
- message += "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
- installed_repository_names.append( name )
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
+ tool_shed = clean_tool_shed_url( tool_shed_url )
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
+ repository_name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tool_shed=tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
+ if 'tools' in metadata_dict:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
else:
- tmp_stderr = open( tmp_name, 'rb' )
- message += '%s<br/>' % tmp_stderr.read()
- tmp_stderr.close()
+ message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
+ message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ message += "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message += '%s<br/>' % tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
+ installed_repository_names.append( name )
if installed_repository_names:
installed_repository_names.sort()
num_repositories_installed = len( installed_repository_names )
@@ -375,7 +363,7 @@
if len( decoded_repo_info_dict ) == 1:
name = decoded_repo_info_dict.keys()[ 0 ]
repo_info_tuple = decoded_repo_info_dict[ name ]
- description, repository_clone_url, changeset_revision = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev = repo_info_tuple
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
url = '%s/repository/get_readme?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % ( tool_shed_url, name, owner, changeset_revision )
response = urllib2.urlopen( url )
@@ -444,79 +432,82 @@
repository_clone_url = generate_clone_url( trans, repository )
clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, repository.name )
- returncode, tmp_name = clone_repository( repository.name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, repository.installed_changeset_revision )
- if returncode == 0:
- if repository.includes_tools:
- # Get the location in the tool panel in which each tool was originally loaded.
- metadata = repository.metadata
- if 'tool_panel_section' in metadata:
- tool_panel_dict = metadata[ 'tool_panel_section' ]
- if not tool_panel_dict:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ if not repository.ctx_rev:
+ # The ctx_rev column was introduced late, so may be null for some installed ToolShedRepositories.
+ ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
+ else:
+ ctx_rev = repository.ctx_rev
+ clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ if repository.includes_tools:
+ # Get the location in the tool panel in which each tool was originally loaded.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ if not tool_panel_dict:
+ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ else:
+ tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
+ # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
+ # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ tool_section_dict = tool_section_dicts[ 0 ]
+ original_section_id = tool_section_dict[ 'id' ]
+ original_section_name = tool_section_dict[ 'name' ]
+ if no_changes_checked:
+ if original_section_id in [ '' ]:
+ tool_section = None
+ else:
+ section_key = 'section_%s' % str( original_section_id )
+ if section_key in trans.app.toolbox.tool_panel:
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
else:
- tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
- # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
- # following assumes everything was loaded into 1 section (or no section) in the tool panel.
- tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
- tool_section_dict = tool_section_dicts[ 0 ]
- original_section_id = tool_section_dict[ 'id' ]
- original_section_name = tool_section_dict[ 'name' ]
- if no_changes_checked:
- if original_section_id in [ '' ]:
- tool_section = None
- else:
- section_key = 'section_%s' % str( original_section_id )
- if section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = original_section_name
+ elem.attrib[ 'id' ] = original_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ # The user elected to change the tool panel section to contain the tools.
+ new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
+ tool_panel_section = kwd.get( 'tool_panel_section', '' )
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ new_section_key = 'section_%s' % str( section_id )
+ if new_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
else:
- # The user elected to change the tool panel section to contain the tools.
- new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
- tool_panel_section = kwd.get( 'tool_panel_section', '' )
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- new_section_key = 'section_%s' % str( section_id )
- if new_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
- elif tool_panel_section:
- section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- tool_section = None
- tool_shed_repository, metadata_dict = load_repository_contents( trans,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf )
- repository.uninstalled = False
+ # Appending a new section to trans.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section
+ elem.attrib[ 'id' ] = section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
+ elif tool_panel_section:
+ section_key = 'section_%s' % tool_panel_section
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
+ else:
+ tool_section = None
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf )
+ repository.uninstalled = False
repository.deleted = False
trans.sa_session.add( repository )
trans.sa_session.flush()
@@ -606,41 +597,32 @@
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
+ latest_ctx_rev = params.get( 'latest_ctx_rev', None )
repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
- if changeset_revision and latest_changeset_revision:
+ if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
else:
current_working_dir = os.getcwd()
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.join( relative_install_dir, name )
- returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
- if returncode == 0:
- # Update the repository metadata.
- repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
- tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url )
- repository.metadata = metadata_dict
- # Update the repository changeset_revision in the database.
- repository.changeset_revision = latest_changeset_revision
- repository.update_available = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
- ( name, latest_changeset_revision )
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message = tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message = tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
+ repo = hg.repository( get_configured_ui(), path=repo_files_dir )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ pull_repository( repo, repository_clone_url, latest_ctx_rev )
+ update_repository( repo, latest_ctx_rev )
+ # Update the repository metadata.
+ tool_shed = clean_tool_shed_url( tool_shed_url )
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ repository.metadata = metadata_dict
+ # Update the repository changeset_revision in the database.
+ repository.changeset_revision = latest_changeset_revision
+ repository.ctx_rev = latest_ctx_rev
+ repository.update_available = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
+ ( name, latest_changeset_revision )
else:
message = "The directory containing the cloned repository named '%s' cannot be found." % name
status = 'error'
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -3,6 +3,7 @@
from galaxy.model.orm import *
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util import inflector
+from galaxy.util.shed_util import get_configured_ui
from common import *
from repository import RepositoryListGrid, CategoryListGrid
from mercurial import hg
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,7 +5,7 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import copy_sample_loc_file, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
+from galaxy.util.shed_util import copy_sample_loc_file, get_configured_ui, generate_datatypes_metadata, generate_tool_metadata, generate_workflow_metadata
from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, to_html_escaped, to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
@@ -283,7 +283,7 @@
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
# record is not needed.
return False
-def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir ):
+def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir ):
"""
Browse the repository tip files on disk to generate metadata. This is faster than the
generate_metadata_for_changeset_revision() method below because fctx.data() does not have
@@ -291,8 +291,7 @@
invalid_tool_configs here, while they are ignored in older revisions.
"""
# If a push from the command line is occurring, update the repository files on disk before setting metadata.
- returncode, tmp_name = update_repository( os.getcwd(), os.path.abspath( repo_dir ), changeset_revision )
- # TODO: handle error if returncode is not 0?
+ update_repository( repo, str( ctx.rev() ) )
metadata_dict = {}
invalid_files = []
invalid_tool_configs = []
@@ -449,7 +448,7 @@
invalid_files = []
if ctx is not None:
if changeset_revision == repository.tip:
- metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir )
+ metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo, repo_dir )
else:
metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir )
if metadata_dict:
@@ -543,7 +542,7 @@
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = get_changectx_for_changeset( repo, current_changeset_revision )
if current_changeset_revision == repository.tip:
- current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo_dir )
+ current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo, repo_dir )
else:
current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
if current_metadata_dict:
@@ -780,15 +779,6 @@
if repository_metadata:
return repository_metadata.malicious
return False
-def get_configured_ui():
- # Configure any desired ui settings.
- _ui = ui.ui()
- # The following will suppress all messages. This is
- # the same as adding the following setting to the repo
- # hgrc file' [ui] section:
- # quiet = True
- _ui.setconfig( 'ui', 'quiet', True )
- return _ui
def get_user( trans, id ):
"""Get a user from the database by id"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
@@ -891,9 +881,10 @@
return True
return False
def update_for_browsing( trans, repository, current_working_dir, commit_message='' ):
- # Make a copy of a repository's files for browsing, remove from disk all files that
- # are not tracked, and commit all added, modified or removed files that have not yet
- # been committed.
+ # This method id deprecated, but we'll keep it around for a while in case we need it. The problem is that hg purge
+ # is not supported by the mercurial API.
+ # Make a copy of a repository's files for browsing, remove from disk all files that are not tracked, and commit all
+ # added, modified or removed files that have not yet been committed.
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
# The following will delete the disk copy of only the files in the repository.
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -10,6 +10,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
+from galaxy.util.shed_util import get_configured_ui
from common import *
from mercurial import hg, ui, patch, commands
@@ -792,18 +793,33 @@
# Tell the caller if the repository includes Galaxy tools so the page
# enabling selection of the tool panel section can be displayed.
includes_tools = 'tools' in repository_metadata.metadata
+ # Get the changelog rev for this changeset_revision.
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = {}
- repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision )
+ repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision, str( ctx.rev() ) )
encoded_repo_info_dict = encode( repo_info_dict )
# Redirect back to local Galaxy to perform install.
url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
+ def get_ctx_rev( self, trans, **kwd ):
+ """Given a repository and changeset_revision, return the correct ctx.rev() value."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ return str( ctx.rev() )
+ return ''
+ @web.expose
def get_readme( self, trans, **kwd ):
- """
- If the received changeset_revision includes a file named readme (case ignored), return it's contents.
- """
+ """If the received changeset_revision includes a file named readme (case ignored), return it's contents."""
repository_name = kwd[ 'name' ]
repository_owner = kwd[ 'owner' ]
changeset_revision = kwd[ 'changeset_revision' ]
@@ -903,13 +919,17 @@
fh.close()
if not ( check_binary( tmp_filename ) or check_image( tmp_filename ) or check_gzip( tmp_filename )[ 0 ]
or check_bz2( tmp_filename )[ 0 ] or check_zip( tmp_filename ) ):
- try:
- tool = load_tool( trans, tmp_filename )
- valid = True
- except:
- valid = False
- if valid and tool is not None:
- tool_guids.append( generate_tool_guid( trans, repository, tool ) )
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( tmp_filename )
+ element_tree_root = element_tree.getroot()
+ if element_tree_root.tag == 'tool':
+ try:
+ tool = load_tool( trans, tmp_filename )
+ valid = True
+ except:
+ valid = False
+ if valid and tool is not None:
+ tool_guids.append( generate_tool_guid( trans, repository, tool ) )
try:
os.unlink( tmp_filename )
except:
@@ -927,11 +947,13 @@
metadata_tool_guids.append( tool_dict[ 'guid' ] )
metadata_tool_guids.sort()
if tool_guids == metadata_tool_guids:
- # We've found the repository_metadata record whose changeset_revision
- # value has been updated.
+ # We've found the repository_metadata record whose changeset_revision value has been updated.
if from_update_manager:
return update
url += repository_metadata.changeset_revision
+ # Get the ctx_rev for the discovered changeset_revision.
+ latest_ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+ url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
found = True
break
if not found:
@@ -941,7 +963,7 @@
return no_update
url += changeset_revision
else:
- # There are not tools in the changeset_revision, so no tool updates are possible.
+ # There are no tools in the changeset_revision, so no tool updates are possible.
if from_update_manager:
return no_update
url += changeset_revision
@@ -1168,13 +1190,10 @@
new_hgweb_config.flush()
shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
- # At this point, an entry for the repository is required to be in the hgweb.config
- # file so we can call repository.repo_path.
- # Since we support both http and https, we set push_ssl to False to override
- # the default (which is True) in the mercurial api.
- # The hg purge extension purges all files and directories not being tracked by
- # mercurial in the current repository. It'll remove unknown files and empty
- # directories. This is used in the update_for_browsing() method.
+ # At this point, an entry for the repository is required to be in the hgweb.config file so we can call repository.repo_path.
+ # Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api.
+ # The hg purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll
+ # remove unknown files and empty directories. This is not currently used because it is not supported in the mercurial API.
repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
@@ -1198,7 +1217,7 @@
repo = hg.repository( get_configured_ui(), repository.repo_path )
current_working_dir = os.getcwd()
# Update repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
is_malicious = change_set_is_malicious( trans, id, repository.tip )
return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
repo=repo,
@@ -1314,7 +1333,7 @@
repo.commit( user=trans.user.username, text=commit_message )
handle_email_alerts( trans, repository )
# Update the repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
# Get the new repository tip.
repo = hg.repository( get_configured_ui(), repo_dir )
if tip != repository.tip:
@@ -1868,7 +1887,7 @@
break
if found:
break
- metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, repository_id, ctx, changeset_revision, repo_dir )
+ metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, repository_id, ctx, changeset_revision, repo, repo_dir )
else:
for filename in ctx:
if filename == tool_config:
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import get_configured_ui, handle_sample_tool_data_table_conf_file
from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
@@ -144,7 +144,7 @@
handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
if ok:
# Update the repository files for browsing.
- update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
+ update_repository( repo )
# Get the new repository tip.
if tip != repository.tip:
if ( isgzip or isbz2 ) and uncompress_file:
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/framework/middleware/hg.py
--- a/lib/galaxy/webapps/community/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/community/framework/middleware/hg.py
@@ -3,7 +3,6 @@
"""
import os, logging
from sqlalchemy import *
-from mercurial import ui, hg
from paste.auth.basic import AuthBasicAuthenticator
from paste.httpheaders import REMOTE_USER, AUTH_TYPE
diff -r 89786a8d42c1fbbfa84bfcbab07702a942d57d1b -r ad5fbb3146b31c13a30870fdd4cc701f6a7467ca lib/galaxy/webapps/community/security/__init__.py
--- a/lib/galaxy/webapps/community/security/__init__.py
+++ b/lib/galaxy/webapps/community/security/__init__.py
@@ -6,7 +6,6 @@
from galaxy.util.bunch import Bunch
from galaxy.util import listify
from galaxy.model.orm import *
-from mercurial import hg, ui
log = logging.getLogger(__name__)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0