galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2012
- 2 participants
- 113 discussions
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c754d8c07440/
changeset: c754d8c07440
user: jgoecks
date: 2012-02-28 23:50:26
summary: Parameterize per-tool job runners so that parameter name/value pairs can be used to define multiple runners per tool. Documentation is in sample universe file. Add 'params' column to jobs table to store job parameters, and add source parameter for all jobs initiated in Trackster.
affected #: 8 files
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -180,9 +180,38 @@
# Heartbeat log file name override
if global_conf is not None:
self.heartbeat_log = global_conf.get( 'heartbeat_log', 'heartbeat.log' )
- #Store per-tool runner config
+ #Store per-tool runner configs.
try:
- self.tool_runners = global_conf_parser.items("galaxy:tool_runners")
+ tool_runners_config = global_conf_parser.items("galaxy:tool_runners")
+
+ # Process config to group multiple configs for the same tool.
+ tool_runners = {}
+ for entry in tool_runners_config:
+ tool_config, url = entry
+ tool = None
+ runner_dict = {}
+ if tool_config.find("[") != -1:
+ # Found tool with additional params; put params in dict.
+ tool, params = tool_config[:-1].split( "[" )
+ param_dict = {}
+ for param in params.split( "," ):
+ name, value = param.split( "@" )
+ param_dict[ name ] = value
+ runner_dict[ 'params' ] = param_dict
+ else:
+ tool = tool_config
+
+ # Add runner URL.
+ runner_dict[ 'url' ] = url
+
+ # Create tool entry if necessary.
+ if tool not in tool_runners:
+ tool_runners[ tool ] = []
+
+ # Add entry to runners.
+ tool_runners[ tool ].append( runner_dict )
+
+ self.tool_runners = tool_runners
except ConfigParser.NoSectionError:
self.tool_runners = []
self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -336,12 +336,15 @@
self.tool_provided_job_metadata = None
# Wrapper holding the info required to restore and clean up from files used for setting metadata externally
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job )
+ self.params = None
+ if job.params:
+ self.params = from_json_string( job.params )
self.__user_system_pwent = None
self.__galaxy_system_pwent = None
def get_job_runner( self ):
- return self.tool.job_runner
+ return self.tool.get_job_runner( self.params )
def get_job( self ):
return self.sa_session.query( model.Job ).get( self.job_id )
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -420,7 +420,8 @@
Column( "job_runner_name", String( 255 ) ),
Column( "job_runner_external_id", String( 255 ) ),
Column( "object_store_id", TrimmedString( 255 ), index=True ),
- Column( "imported", Boolean, default=False, index=True ) )
+ Column( "imported", Boolean, default=False, index=True ),
+ Column( "params", TrimmedString(255), index=True ) )
JobParameter.table = Table( "job_parameter", metadata,
Column( "id", Integer, primary_key=True ),
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/model/migrate/versions/0093_add_job_params_col.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0093_add_job_params_col.py
@@ -0,0 +1,49 @@
+"""
+Migration script to create "params" column in job table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import logging
+log = logging.getLogger( __name__ )
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+# Column to add.
+params_col = Column( "params", TrimmedString(255), index=True )
+
+def display_migration_details():
+ print ""
+ print "This migration script adds a 'params' column to the Job table."
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ # Add column to Job table.
+ try:
+ Job_table = Table( "job", metadata, autoload=True )
+ params_col.create( Job_table )
+ assert params_col is Job_table.c.params
+
+ except Exception, e:
+ print str(e)
+ log.debug( "Adding column 'params' to job table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+
+ # Drop column from Job table.
+ try:
+ Job_table = Table( "job", metadata, autoload=True )
+ params_col = Job_table.c.params
+ params_col.drop()
+ except Exception, e:
+ log.debug( "Dropping column 'params' from job table failed: %s" % ( str( e ) ) )
\ No newline at end of file
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -562,6 +562,31 @@
if tool_version:
return tool_version.get_version_ids( self.app )
return []
+ def get_job_runner( self, job_params=None ):
+ # Look through runners to find one with matching parameters.
+ selected_runner = None
+ if len( self.job_runners ) == 1:
+ # Most tools have a single runner.
+ selected_runner = self.job_runners[0]
+ elif job_params is None:
+ # Use job runner with no params
+ for runner in self.job_runners:
+ if "params" not in runner:
+ selected_runner = runner
+ else:
+ # Find runner with matching parameters.
+ for runner in self.job_runners:
+ if "params" in runner:
+ match = True
+ runner_params = runner[ "params" ]
+ for param, value in job_params.items():
+ if param not in runner_params or \
+ runner_params[ param ] != job_params[ param ]:
+ match = False
+ break
+ if match:
+ selected_runner = runner
+ return selected_runner[ "url" ]
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -626,17 +651,18 @@
self.parallelism = ToolParallelismInfo(parallelism)
else:
self.parallelism = None
+ # Set job runner(s). Each runner is a dict with 'url' and, optionally, 'params'.
if self.app.config.start_job_runners is None:
# Jobs are always local regardless of tool config if no additional
# runners are started
- self.job_runner = "local:///"
+ self.job_runners = [ { "url" : "local:///" } ]
else:
# Set job runner to the cluster default
- self.job_runner = self.app.config.default_cluster_job_runner
- for tup in self.app.config.tool_runners:
- if tup[0] == self.id.lower():
- self.job_runner = tup[1]
- break
+ self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
+ # Set custom runner(s) if they're defined.
+ self_id = self.id.lower()
+ if self_id in self.app.config.tool_runners:
+ self.job_runners = self.app.config.tool_runners[ self_id ]
# Is this a 'hidden' tool (hidden in tool menu)
self.hidden = util.xml_text(root, "hidden")
if self.hidden: self.hidden = util.string_as_bool(self.hidden)
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -1,6 +1,7 @@
from galaxy.model import LibraryDatasetDatasetAssociation
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
+from galaxy.util.json import to_json_string
from galaxy.tools.parameters import *
from galaxy.tools.parameters.grouping import *
from galaxy.util.template import fill_template
@@ -100,7 +101,7 @@
tool.visit_inputs( param_values, visitor )
return input_datasets
- def execute(self, tool, trans, incoming={}, return_job=False, set_output_hid=True, set_output_history=True, history=None ):
+ def execute(self, tool, trans, incoming={}, return_job=False, set_output_hid=True, set_output_history=True, history=None, job_params=None ):
"""
Executes a tool, creating job and tool outputs, associating them, and
submitting the job to the job queue. If history is not specified, use
@@ -389,6 +390,8 @@
for name, dataset in out_data.iteritems():
job.add_output_dataset( name, dataset )
job.object_store_id = object_store_id
+ if job_params:
+ job.params = to_json_string( job_params )
trans.sa_session.add( job )
trans.sa_session.flush()
# Some tools are not really executable, but jobs are still created for them ( for record keeping ).
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -991,7 +991,9 @@
# Execute tool and handle outputs.
#
try:
- subset_job, subset_job_outputs = tool.execute( trans, incoming=tool_params, history=target_history )
+ subset_job, subset_job_outputs = tool.execute( trans, incoming=tool_params,
+ history=target_history,
+ job_params={ "source" : "trackster" } )
except Exception, e:
# Lots of things can go wrong when trying to execute tool.
return to_json_string( { "error" : True, "message" : e.__class__.__name__ + ": " + str(e) } )
diff -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 -r c754d8c07440e4376853eb3b804af366b1442ff0 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -630,8 +630,13 @@
# ---- Tool Job Runners -----------------------------------------------------
-# Individual per-tool job runner overrides. If not listed here, a tool will
-# run with the runner defined with default_cluster_job_runner.
+# Individual per-tool job runner overrides. Parameters can be included to define
+# multiple runners per tool. E.g. to run Cufflinks jobs initiated from Trackster
+# differently than standard Cufflinks jobs:
+# cufflinks = local:///
+# cufflinks[source@trackster] = local:///
+# If not listed here, a tool will run with the runner defined with
+# default_cluster_job_runner.
[galaxy:tool_runners]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for installing a tool shed repository that includes tools.
by Bitbucket 28 Feb '12
by Bitbucket 28 Feb '12
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c7ca68b62282/
changeset: c7ca68b62282
user: greg
date: 2012-02-28 21:52:03
summary: Fix for installing a tool shed repository that includes tools.
affected #: 1 file
diff -r 56e403f0bb140c27ee7c8df444da375e167d0f6e -r c7ca68b622823d146fac4db1cb0d5963e1daa1a9 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -19,7 +19,12 @@
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = shed_tool_conf_dict[ 'config_elems' ]
# Generate the list of ElementTree Element objects for each section or list of tools.
- elem_list = generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner=owner )
+ elem_list = generate_tool_panel_elem_list( repository_name,
+ repository_clone_url,
+ changeset_revision,
+ tool_panel_dict,
+ repository_tools_tups,
+ owner=owner )
# Load the tools into the tool panel outside of any sections.
for config_elem in elem_list:
if config_elem.tag == 'section':
@@ -785,6 +790,7 @@
# or outside of any sections in the tool panel. We cannot pass a specific tool_config since we do not yet have one.
tool_section_dict = generate_tool_section_dict( tool_config=None, tool_section=tool_section )
metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
+ tool_panel_dict = metadata_dict[ 'tool_panel_section' ]
# Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked deleted, undelete it. This
# must happen before the call to add_to_tool_panel() below because tools will not be properly loaded if the repository is marked deleted.
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for vcf_bgzip_to_tabix converter that was broken in changeset ac77414506d4
by Bitbucket 28 Feb '12
by Bitbucket 28 Feb '12
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/56e403f0bb14/
changeset: 56e403f0bb14
user: dan
date: 2012-02-28 20:51:22
summary: Fix for vcf_bgzip_to_tabix converter that was broken in changeset ac77414506d4
affected #: 1 file
diff -r e0a627d11af0aa21f7039529c3959e4806a0d4dd -r 56e403f0bb140c27ee7c8df444da375e167d0f6e lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml
--- a/lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml
+++ b/lib/galaxy/datatypes/converters/vcf_bgzip_to_tabix_converter.xml
@@ -1,6 +1,6 @@
<tool id="CONVERTER_vcf_bgzip_to_tabix_0" name="Convert BGZ VCF to tabix" version="1.0.0" hidden="true"><!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
- <command interpreter="python">interval_to_tabix_converter.py -P 'vcf' '$input1' '$output1'</command>
+ <command interpreter="python">interval_to_tabix_converter.py -P 'vcf' '' '$input1' '$output1'</command><inputs><page><param format="vcf_bgzip" name="input1" type="data" label="Choose BGZIP'd VCF file"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: File missed in the change set for the re-engineered InstallManager.
by Bitbucket 28 Feb '12
by Bitbucket 28 Feb '12
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e0a627d11af0/
changeset: e0a627d11af0
user: greg
date: 2012-02-28 20:07:20
summary: File missed in the change set for the re-engineered InstallManager.
affected #: 1 file
diff -r 1a9ee49d66127b78aa5923dcbcdaed43b5c53b75 -r e0a627d11af0aa21f7039529c3959e4806a0d4dd manage_tools.sh
--- /dev/null
+++ b/manage_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`
+python ./scripts/manage_tools.py $@
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for generating metadata in a tool shed repository for a change set that contains data types but is not the repository tip.
by Bitbucket 28 Feb '12
by Bitbucket 28 Feb '12
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1a9ee49d6612/
changeset: 1a9ee49d6612
user: greg
date: 2012-02-28 19:41:01
summary: Fix for generating metadata in a tool shed repository for a change set that contains data types but is not the repository tip.
affected #: 1 file
diff -r 7f320a462a92d4c97b710d84b6977f49c5c481a7 -r 1a9ee49d66127b78aa5923dcbcdaed43b5c53b75 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -346,15 +346,25 @@
metadata_dict = {}
invalid_files = []
sample_files = []
- datatypes_config = None
+ tmp_datatypes_config = None
# Find datatypes_conf.xml if it exists.
for filename in ctx:
if filename == 'datatypes_conf.xml':
fctx = ctx[ filename ]
- datatypes_config = fctx.data()
+ # Write the contents of datatypes_config.xml to a temporary file.
+ fh = tempfile.NamedTemporaryFile( 'w' )
+ tmp_datatypes_config = fh.name
+ fh.close()
+ fh = open( tmp_datatypes_config, 'w' )
+ fh.write( fctx.data() )
+ fh.close()
break
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ if tmp_datatypes_config:
+ metadata_dict = generate_datatypes_metadata( tmp_datatypes_config, metadata_dict )
+ try:
+ os.unlink( tmp_datatypes_config )
+ except:
+ pass
# Get all tool config file names from the hgweb url, something like:
# /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
for filename in ctx:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/314a10ae112d/
changeset: 314a10ae112d
branch: split_blast
user: dannon
date: 2012-02-28 18:32:54
summary: Close split_blast branch
affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
10 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/876baf3452a8/
changeset: 876baf3452a8
branch: split_blast
user: peterjc
date: 2012-02-16 13:14:54
summary: Enable splitting BLAST jobs up (doesn't work yet)
affected #: 5 files
diff -r 5ac62b79d6926ad0a9db8810dedf9fe1a1ed41fa -r 876baf3452a8ace8ac58deff41a695a6248794cb tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.11"><description>Search nucleotide database with nucleotide query sequence(s)</description>
+ <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 5ac62b79d6926ad0a9db8810dedf9fe1a1ed41fa -r 876baf3452a8ace8ac58deff41a695a6248794cb tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.11"><description>Search protein database with protein query sequence(s)</description>
+ <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastp -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 5ac62b79d6926ad0a9db8810dedf9fe1a1ed41fa -r 876baf3452a8ace8ac58deff41a695a6248794cb tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.11"><description>Search protein database with translated nucleotide query sequence(s)</description>
+ <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 5ac62b79d6926ad0a9db8810dedf9fe1a1ed41fa -r 876baf3452a8ace8ac58deff41a695a6248794cb tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.11"><description>Search translated nucleotide database with protein query sequence(s)</description>
+ <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 5ac62b79d6926ad0a9db8810dedf9fe1a1ed41fa -r 876baf3452a8ace8ac58deff41a695a6248794cb tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
@@ -1,5 +1,6 @@
<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.11"><description>Search translated nucleotide database with translated nucleotide query sequence(s)</description>
+ <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
https://bitbucket.org/galaxy/galaxy-central/changeset/762777618073/
changeset: 762777618073
branch: split_blast
user: peterjc
date: 2012-02-16 13:15:48
summary: Move FASTQ splitting from Sequence class to Fastq class
affected #: 1 file
diff -r 876baf3452a8ace8ac58deff41a695a6248794cb -r 76277761807306ec2be3f1e4059dd7cde6fd2dc6 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py
+++ b/lib/galaxy/datatypes/sequence.py
@@ -190,143 +190,10 @@
write_split_files = classmethod(write_split_files)
def split( cls, input_datasets, subdir_generator_function, split_params):
- """
- FASTQ files are split on cluster boundaries, in increments of 4 lines
- """
+ """Split a generic sequence file (not sensible or possible, see subclasses)."""
if split_params is None:
return None
-
- # first, see if there are any associated FQTOC files that will give us the split locations
- # if so, we don't need to read the files to do the splitting
- toc_file_datasets = []
- for ds in input_datasets:
- tmp_ds = ds
- fqtoc_file = None
- while fqtoc_file is None and tmp_ds is not None:
- fqtoc_file = tmp_ds.get_converted_files_by_type('fqtoc')
- tmp_ds = tmp_ds.copied_from_library_dataset_dataset_association
-
- if fqtoc_file is not None:
- toc_file_datasets.append(fqtoc_file)
-
- if len(toc_file_datasets) == len(input_datasets):
- return cls.do_fast_split(input_datasets, toc_file_datasets, subdir_generator_function, split_params)
- return cls.do_slow_split(input_datasets, subdir_generator_function, split_params)
- split = classmethod(split)
-
- def process_split_file(data):
- """
- This is called in the context of an external process launched by a Task (possibly not on the Galaxy machine)
- to create the input files for the Task. The parameters:
- data - a dict containing the contents of the split file
- """
- args = data['args']
- input_name = data['input_name']
- output_name = data['output_name']
- start_sequence = long(args['start_sequence'])
- sequence_count = long(args['num_sequences'])
-
- if 'toc_file' in args:
- toc_file = simplejson.load(open(args['toc_file'], 'r'))
- commands = Sequence.get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count)
- else:
- commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count)
- for cmd in commands:
- if 0 != os.system(cmd):
- raise Exception("Executing '%s' failed" % cmd)
- return True
- process_split_file = staticmethod(process_split_file)
-
- def get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count):
- """
- Uses a Table of Contents dict, parsed from an FQTOC file, to come up with a set of
- shell commands that will extract the parts necessary
- >>> three_sections=[dict(start=0, end=74, sequences=10), dict(start=74, end=148, sequences=10), dict(start=148, end=148+76, sequences=10)]
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=0, sequence_count=10)
- ['dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null >> ./output.gz']
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=1, sequence_count=5)
- ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +5 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=0, sequence_count=20)
- ['dd bs=1 skip=0 count=148 if=./input.gz 2> /dev/null >> ./output.gz']
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=5, sequence_count=10)
- ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +21 2> /dev/null) | head -20 | gzip -c >> ./output.gz', '(dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +1 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=10, sequence_count=10)
- ['dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null >> ./output.gz']
- >>> Sequence.get_split_commands_with_toc('./input.gz', './output.gz', dict(sections=three_sections), start_sequence=5, sequence_count=20)
- ['(dd bs=1 skip=0 count=74 if=./input.gz 2> /dev/null )| zcat | ( tail -n +21 2> /dev/null) | head -20 | gzip -c >> ./output.gz', 'dd bs=1 skip=74 count=74 if=./input.gz 2> /dev/null >> ./output.gz', '(dd bs=1 skip=148 count=76 if=./input.gz 2> /dev/null )| zcat | ( tail -n +1 2> /dev/null) | head -20 | gzip -c >> ./output.gz']
- """
- sections = toc_file['sections']
- result = []
-
- current_sequence = long(0)
- i=0
- # skip to the section that contains my starting sequence
- while i < len(sections) and start_sequence >= current_sequence + long(sections[i]['sequences']):
- current_sequence += long(sections[i]['sequences'])
- i += 1
- if i == len(sections): # bad input data!
- raise Exception('No FQTOC section contains starting sequence %s' % start_sequence)
-
- # These two variables act as an accumulator for consecutive entire blocks that
- # can be copied verbatim (without decompressing)
- start_chunk = long(-1)
- end_chunk = long(-1)
- copy_chunk_cmd = 'dd bs=1 skip=%s count=%s if=%s 2> /dev/null >> %s'
-
- while sequence_count > 0 and i < len(sections):
- # we need to extract partial data. So, find the byte offsets of the chunks that contain the data we need
- # use a combination of dd (to pull just the right sections out) tail (to skip lines) and head (to get the
- # right number of lines
- sequences = long(sections[i]['sequences'])
- skip_sequences = start_sequence-current_sequence
- sequences_to_extract = min(sequence_count, sequences-skip_sequences)
- start_copy = long(sections[i]['start'])
- end_copy = long(sections[i]['end'])
- if sequences_to_extract < sequences:
- if start_chunk > -1:
- result.append(copy_chunk_cmd % (start_chunk, end_chunk-start_chunk, input_name, output_name))
- start_chunk = -1
- # extract, unzip, trim, recompress
- result.append('(dd bs=1 skip=%s count=%s if=%s 2> /dev/null )| zcat | ( tail -n +%s 2> /dev/null) | head -%s | gzip -c >> %s' %
- (start_copy, end_copy-start_copy, input_name, skip_sequences*4+1, sequences_to_extract*4, output_name))
- else: # whole section - add it to the start_chunk/end_chunk accumulator
- if start_chunk == -1:
- start_chunk = start_copy
- end_chunk = end_copy
- sequence_count -= sequences_to_extract
- start_sequence += sequences_to_extract
- current_sequence += sequences
- i += 1
- if start_chunk > -1:
- result.append(copy_chunk_cmd % (start_chunk, end_chunk-start_chunk, input_name, output_name))
-
- if sequence_count > 0:
- raise Exception('%s sequences not found in file' % sequence_count)
-
- return result
- get_split_commands_with_toc = staticmethod(get_split_commands_with_toc)
-
-
- def get_split_commands_sequential(is_compressed, input_name, output_name, start_sequence, sequence_count):
- """
- Does a brain-dead sequential scan & extract of certain sequences
- >>> Sequence.get_split_commands_sequential(True, './input.gz', './output.gz', start_sequence=0, sequence_count=10)
- ['zcat "./input.gz" | ( tail -n +1 2> /dev/null) | head -40 | gzip -c > "./output.gz"']
- >>> Sequence.get_split_commands_sequential(False, './input.fastq', './output.fastq', start_sequence=10, sequence_count=10)
- ['tail -n +41 "./input.fastq" 2> /dev/null | head -40 > "./output.fastq"']
- """
- start_line = start_sequence * 4
- line_count = sequence_count * 4
- # TODO: verify that tail can handle 64-bit numbers
- if is_compressed:
- cmd = 'zcat "%s" | ( tail -n +%s 2> /dev/null) | head -%s | gzip -c' % (input_name, start_line+1, line_count)
- else:
- cmd = 'tail -n +%s "%s" 2> /dev/null | head -%s' % (start_line+1, input_name, line_count)
- cmd += ' > "%s"' % output_name
-
- return [cmd]
- get_split_commands_sequential = staticmethod(get_split_commands_sequential)
-
+ raise NotImplementedError("Can't split generic sequence files")
class Alignment( data.Text ):
@@ -335,6 +202,13 @@
"""Add metadata elements"""
MetadataElement( name="species", desc="Species", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None )
+ def split( cls, input_datasets, subdir_generator_function, split_params):
+ """Split a generic alignment file (not sensible or possible, see subclasses)."""
+ if split_params is None:
+ return None
+ raise NotImplementedError("Can't split generic alignment files")
+
+
class Fasta( Sequence ):
"""Class representing a FASTA sequence"""
file_ext = "fasta"
@@ -502,6 +376,55 @@
except:
return False
+ def split( cls, input_datasets, subdir_generator_function, split_params):
+ """
+ FASTQ files are split on cluster boundaries, in increments of 4 lines
+ """
+ if split_params is None:
+ return None
+
+ # first, see if there are any associated FQTOC files that will give us the split locations
+ # if so, we don't need to read the files to do the splitting
+ toc_file_datasets = []
+ for ds in input_datasets:
+ tmp_ds = ds
+ fqtoc_file = None
+ while fqtoc_file is None and tmp_ds is not None:
+ fqtoc_file = tmp_ds.get_converted_files_by_type('fqtoc')
+ tmp_ds = tmp_ds.copied_from_library_dataset_dataset_association
+
+ if fqtoc_file is not None:
+ toc_file_datasets.append(fqtoc_file)
+
+ if len(toc_file_datasets) == len(input_datasets):
+ return cls.do_fast_split(input_datasets, toc_file_datasets, subdir_generator_function, split_params)
+ return cls.do_slow_split(input_datasets, subdir_generator_function, split_params)
+ split = classmethod(split)
+
+ def process_split_file(data):
+ """
+ This is called in the context of an external process launched by a Task (possibly not on the Galaxy machine)
+ to create the input files for the Task. The parameters:
+ data - a dict containing the contents of the split file
+ """
+ args = data['args']
+ input_name = data['input_name']
+ output_name = data['output_name']
+ start_sequence = long(args['start_sequence'])
+ sequence_count = long(args['num_sequences'])
+
+ if 'toc_file' in args:
+ toc_file = simplejson.load(open(args['toc_file'], 'r'))
+ commands = Sequence.get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count)
+ else:
+ commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count)
+ for cmd in commands:
+ if 0 != os.system(cmd):
+ raise Exception("Executing '%s' failed" % cmd)
+ return True
+ process_split_file = staticmethod(process_split_file)
+
+
class FastqSanger( Fastq ):
"""Class representing a FASTQ sequence ( the Sanger variant )"""
file_ext = "fastqsanger"
https://bitbucket.org/galaxy/galaxy-central/changeset/ebe94a2c25c3/
changeset: ebe94a2c25c3
branch: split_blast
user: peterjc
date: 2012-02-16 17:22:58
summary: Not all datatype splitters write a JSON file
affected #: 1 file
diff -r 76277761807306ec2be3f1e4059dd7cde6fd2dc6 -r ebe94a2c25c365cce3058aa963717a088627a526 scripts/extract_dataset_part.py
--- a/scripts/extract_dataset_part.py
+++ b/scripts/extract_dataset_part.py
@@ -31,6 +31,9 @@
Argument: a JSON file
"""
file_path = sys.argv.pop( 1 )
+ if not os.path.isfile(file_path):
+ #Nothing to do - some splitters don't write a JSON file
+ sys.exit(0)
data = simplejson.load(open(file_path, 'r'))
try:
class_name_parts = data['class_name'].split('.')
https://bitbucket.org/galaxy/galaxy-central/changeset/416c961c0da9/
changeset: 416c961c0da9
branch: split_blast
user: peterjc
date: 2012-02-16 19:20:29
summary: Simple FASTA splitting (no JSON metadata files)
affected #: 1 file
diff -r ebe94a2c25c365cce3058aa963717a088627a526 -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py
+++ b/lib/galaxy/datatypes/sequence.py
@@ -261,6 +261,66 @@
pass
return False
+ def split(cls, input_datasets, subdir_generator_function, split_params):
+ """Split a FASTA file sequence by sequence."""
+ if split_params is None:
+ return
+ if len(input_datasets) > 1:
+ raise Exception("FASTA file splitting does not support multiple files")
+ input_file = input_datasets[0].file_name
+
+ #Counting chunk size as number of sequences.
+ if 'split_mode' not in split_params:
+ raise Exception('Tool does not define a split mode')
+ elif split_params['split_mode'] == 'number_of_parts':
+ #if split_mode = number_of_parts, and split_size = 10, then
+ #we count the number of sequences (say 1234) and divide by
+ #by ten, giving ten files of approx 123 sequences each.
+ chunk_size = 123
+ elif split_params['split_mode'] == 'to_size':
+ #Split the input file into as many sub-files as required,
+ #each containing to_size many sequences
+ chunk_size = int(split_params['split_size'])
+ else:
+ raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+
+ log.debug("Attemping to split FASTA file %s into chunks of %i sequences" \
+ % (input_file, chunk_size))
+ f = open(input_file, "rU")
+ part_file = None
+ try:
+ #Note if the input FASTA file has no sequences, we will
+ #produce just one sub-file which will be a copy of it.
+ part_dir = subdir_generator_function()
+ part_path = os.path.join(part_dir, os.path.basename(input_file))
+ part_file = open(part_path, 'w')
+ log.debug("Writing %s part to %s" % (input_file, part_path))
+ rec_count = 0
+ while True:
+ line = f.readline()
+ if not line:
+ break
+ if line[0]==">":
+ rec_count += 1
+ if rec_count > chunk_size:
+ #Start a new sub-file
+ part_file.close()
+ part_dir = subdir_generator_function()
+ part_path = os.path.join(part_dir, os.path.basename(input_file))
+ part_file = open(part_path, 'w')
+ log.debug("Writing %s part to %s" % (input_file, part_path))
+ rec_count = 1
+ part_file.write(line)
+ part_file.close()
+ except Exception, e:
+ log.error('Unable to split FASTA file: %s' % str(e))
+ f.close()
+ if part_file is not None:
+ part_file.close()
+ raise
+ f.close()
+ split = classmethod(split)
+
class csFasta( Sequence ):
""" Class representing the SOLID Color-Space sequence ( csfasta ) """
file_ext = "csfasta"
https://bitbucket.org/galaxy/galaxy-central/changeset/44c2446e05f0/
changeset: 44c2446e05f0
branch: split_blast
user: peterjc
date: 2012-02-16 19:21:32
summary: Use FASTA splitting in BLAST wrappers
affected #: 5 files
diff -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 -r 44c2446e05f01665fa393caec44193004857d7b7 tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
@@ -1,6 +1,7 @@
<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.11"><description>Search nucleotide database with nucleotide query sequence(s)</description>
- <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
+ <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 -r 44c2446e05f01665fa393caec44193004857d7b7 tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
@@ -1,6 +1,7 @@
<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.11"><description>Search protein database with protein query sequence(s)</description>
- <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
+ <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastp -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 -r 44c2446e05f01665fa393caec44193004857d7b7 tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
@@ -1,6 +1,7 @@
<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.11"><description>Search protein database with translated nucleotide query sequence(s)</description>
- <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
+ <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 -r 44c2446e05f01665fa393caec44193004857d7b7 tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
@@ -1,6 +1,7 @@
<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.11"><description>Search translated nucleotide database with protein query sequence(s)</description>
- <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
+ <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 416c961c0da95ec92bcf47a3272bdb278c42d7c6 -r 44c2446e05f01665fa393caec44193004857d7b7 tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
@@ -1,6 +1,7 @@
<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.11"><description>Search translated nucleotide database with translated nucleotide query sequence(s)</description>
- <parallelism method="multi" split_inputs="query" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
+ <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
https://bitbucket.org/galaxy/galaxy-central/changeset/26a0c0aa776d/
changeset: 26a0c0aa776d
branch: split_blast
user: peterjc
date: 2012-02-17 13:05:15
summary: Size based FASTA splitting
affected #: 1 file
diff -r 44c2446e05f01665fa393caec44193004857d7b7 -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py
+++ b/lib/galaxy/datatypes/sequence.py
@@ -262,7 +262,14 @@
return False
def split(cls, input_datasets, subdir_generator_function, split_params):
- """Split a FASTA file sequence by sequence."""
+ """Split a FASTA file sequence by sequence.
+
+ Note that even if split_mode="number_of_parts", the actual number of
+ sub-files produced may not match that requested by split_size.
+
+ If split_mode="to_size" then split_size is treated as the number of
+ FASTA records to put in each sub-file (not size in bytes).
+ """
if split_params is None:
return
if len(input_datasets) > 1:
@@ -273,17 +280,76 @@
if 'split_mode' not in split_params:
raise Exception('Tool does not define a split mode')
elif split_params['split_mode'] == 'number_of_parts':
- #if split_mode = number_of_parts, and split_size = 10, then
- #we count the number of sequences (say 1234) and divide by
+ split_size = int(split_params['split_size'])
+ log.debug("Split %s into %i parts..." % (input_file, split_size))
+ #if split_mode = number_of_parts, and split_size = 10, and
+ #we know the number of sequences (say 1234), then divide by
#by ten, giving ten files of approx 123 sequences each.
- chunk_size = 123
+ if input_datasets[0].metadata is not None \
+ and input_datasets[0].metadata.sequences:
+ #Galaxy has already counted/estimated the number
+ batch_size = 1 + input_datasets[0].metadata.sequences // split_size
+ cls._count_split(input_file, batch_size, subdir_generator_function)
+ else:
+ #OK, if Galaxy hasn't counted them, it may be a big file.
+ #We're not going to count the records which would be slow
+ #and a waste of disk IO time - instead we'll split using
+ #the file size.
+ chunk_size = os.path.getsize(input_file) // split_size
+ cls._size_split(input_file, chunk_size, subdir_generator_function)
elif split_params['split_mode'] == 'to_size':
#Split the input file into as many sub-files as required,
#each containing to_size many sequences
- chunk_size = int(split_params['split_size'])
+ batch_size = int(split_params['split_size'])
+ log.debug("Split %s into batches of %i records..." % (input_file, batch_size))
+ cls._count_split(input_file, batch_size, subdir_generator_function)
else:
raise Exception('Unsupported split mode %s' % split_params['split_mode'])
+ split = classmethod(split)
+ def _size_split(cls, input_file, chunk_size, subdir_generator_function):
+ """Split a FASTA file into chunks based on size on disk.
+
+ This does of course preserve complete records - it only splits at the
+ start of a new FASTQ sequence record.
+ """
+ log.debug("Attemping to split FASTA file %s into chunks of %i bytes" \
+ % (input_file, chunk_size))
+ f = open(input_file, "rU")
+ part_file = None
+ try:
+ #Note if the input FASTA file has no sequences, we will
+ #produce just one sub-file which will be a copy of it.
+ part_dir = subdir_generator_function()
+ part_path = os.path.join(part_dir, os.path.basename(input_file))
+ part_file = open(part_path, 'w')
+ log.debug("Writing %s part to %s" % (input_file, part_path))
+ start_offset = 0
+ while True:
+ offset = f.tell()
+ line = f.readline()
+ if not line:
+ break
+ if line[0]==">" and offset - start_offset >= chunk_size:
+ #Start a new sub-file
+ part_file.close()
+ part_dir = subdir_generator_function()
+ part_path = os.path.join(part_dir, os.path.basename(input_file))
+ part_file = open(part_path, 'w')
+ log.debug("Writing %s part to %s" % (input_file, part_path))
+ start_offset = f.tell()
+ part_file.write(line)
+ except Exception, e:
+ log.error('Unable to size split FASTA file: %s' % str(e))
+ f.close()
+ if part_file is not None:
+ part_file.close()
+ raise
+ f.close()
+ _size_split = classmethod(_size_split)
+
+ def _count_split(cls, input_file, chunk_size, subdir_generator_function):
+ """Split a FASTA file into chunks based on counting records."""
log.debug("Attemping to split FASTA file %s into chunks of %i sequences" \
% (input_file, chunk_size))
f = open(input_file, "rU")
@@ -313,13 +379,13 @@
part_file.write(line)
part_file.close()
except Exception, e:
- log.error('Unable to split FASTA file: %s' % str(e))
+ log.error('Unable to count split FASTA file: %s' % str(e))
f.close()
if part_file is not None:
part_file.close()
raise
f.close()
- split = classmethod(split)
+ _count_split = classmethod(_count_split)
class csFasta( Sequence ):
""" Class representing the SOLID Color-Space sequence ( csfasta ) """
https://bitbucket.org/galaxy/galaxy-central/changeset/1fb89ae798be/
changeset: 1fb89ae798be
branch: split_blast
user: peterjc
date: 2012-02-17 13:24:01
summary: BLAST wrappers: Split FASTA query into parts
affected #: 5 files
diff -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastn_wrapper.xml
@@ -1,7 +1,7 @@
<tool id="ncbi_blastn_wrapper" name="NCBI BLAST+ blastn" version="0.0.11"><description>Search nucleotide database with nucleotide query sequence(s)</description><!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
- <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <parallelism method="multi" split_inputs="query" split_mode="number_of_parts" split_size="4" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastp_wrapper.xml
@@ -1,7 +1,7 @@
<tool id="ncbi_blastp_wrapper" name="NCBI BLAST+ blastp" version="0.0.11"><description>Search protein database with protein query sequence(s)</description><!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
- <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <parallelism method="multi" split_inputs="query" split_mode="number_of_parts" split_size="4" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastp -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_blastx_wrapper.xml
@@ -1,7 +1,7 @@
<tool id="ncbi_blastx_wrapper" name="NCBI BLAST+ blastx" version="0.0.11"><description>Search protein database with translated nucleotide query sequence(s)</description><!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
- <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <parallelism method="multi" split_inputs="query" split_mode="number_of_parts" split_size="4" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>blastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastn_wrapper.xml
@@ -1,7 +1,7 @@
<tool id="ncbi_tblastn_wrapper" name="NCBI BLAST+ tblastn" version="0.0.11"><description>Search translated nucleotide database with protein query sequence(s)</description><!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
- <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <parallelism method="multi" split_inputs="query" split_mode="number_of_parts" split_size="4" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastn -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
diff -r 26a0c0aa776d5ab557263e3704c2cb6e2fce7a30 -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
--- a/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
+++ b/tools/ncbi_blast_plus/ncbi_tblastx_wrapper.xml
@@ -1,7 +1,7 @@
<tool id="ncbi_tblastx_wrapper" name="NCBI BLAST+ tblastx" version="0.0.11"><description>Search translated nucleotide database with translated nucleotide query sequence(s)</description><!-- If job splitting is enabled, break up the query file into batches of 500 sequences -->
- <parallelism method="multi" split_inputs="query" split_mode="to_size" split_size="500" shared_inputs="subject" merge_outputs="output1"></parallelism>
+ <parallelism method="multi" split_inputs="query" split_mode="number_of_parts" split_size="4" shared_inputs="subject" merge_outputs="output1"></parallelism><version_command>tblastx -version</version_command><command interpreter="python">hide_stderr.py
## The command is a Cheetah template which allows some Python based syntax.
https://bitbucket.org/galaxy/galaxy-central/changeset/c2f30968b034/
changeset: c2f30968b034
branch: split_blast
user: peterjc
date: 2012-02-17 16:27:19
summary: Merged stdout/stderr was missing newline between tasks
affected #: 1 file
diff -r 1fb89ae798bee0a08d4bb7b3ea08be87b4410531 -r c2f30968b0343decb3dbc985d2fc948df489e453 lib/galaxy/jobs/splitters/multi.py
--- a/lib/galaxy/jobs/splitters/multi.py
+++ b/lib/galaxy/jobs/splitters/multi.py
@@ -152,8 +152,8 @@
out = tw.get_task().stdout.strip()
err = tw.get_task().stderr.strip()
if len(out) > 0:
- stdout += tw.working_directory + ':\n' + out
+ stdout += "\n" + tw.working_directory + ':\n' + out
if len(err) > 0:
- stderr += tw.working_directory + ':\n' + err
+ stderr += "\n" + tw.working_directory + ':\n' + err
return (stdout, stderr)
-
+
https://bitbucket.org/galaxy/galaxy-central/changeset/2d74c0a4e931/
changeset: 2d74c0a4e931
branch: split_blast
user: peterjc
date: 2012-02-17 17:03:24
summary: Explicit failure for merging XML files
affected #: 1 file
diff -r c2f30968b0343decb3dbc985d2fc948df489e453 -r 2d74c0a4e9314826efb02458980cc2d5e58678e7 lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -42,6 +42,13 @@
#TODO - Is there a more robust way to do this?
return line.startswith('<?xml ')
+ def merge(split_files, output_file):
+ """Merging multiple XML files is non-trivial and must be done in subclasses."""
+ if len(split_files) > 1:
+ raise NotImplementedError("Merging multiple XML files is non-trivial and must be implemented for each XML type")
+ #For one file only, use base class method (move/copy)
+ data.Text.merge(split_files, output_file)
+ merge = staticmethod(merge)
class BlastXml( GenericXml ):
"""NCBI Blast XML Output data"""
https://bitbucket.org/galaxy/galaxy-central/changeset/ebf65c0b1e26/
changeset: ebf65c0b1e26
branch: split_blast
user: peterjc
date: 2012-02-22 19:43:23
summary: Basic BLAST XML merge implementation
affected #: 1 file
diff -r 2d74c0a4e9314826efb02458980cc2d5e58678e7 -r ebf65c0b1e26a17d5d78f70ec19eccfc800d06fd lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -93,7 +93,51 @@
return False
handle.close()
return True
-
+
+ def merge(split_files, output_file):
+ """Merging multiple XML files is non-trivial and must be done in subclasses."""
+ if len(split_files) == 1:
+ #For one file only, use base class method (move/copy)
+ return data.Text.merge(split_files, output_file)
+ out = open(output_file, "w")
+ h = None
+ for f in split_files:
+ h = open(f)
+ body = False
+ header = []
+ while True:
+ line = h.readline()
+ header.append(line)
+ if "<Iteration>" in line:
+ break
+ header = "".join(header)
+ if "<BlastOutput>" not in header:
+ out.close()
+ h.close()
+ raise ValueError("%s is not a BLAST XML file:\n%s\n..." % (f, header))
+ if f == split_files[0]:
+ out.write(header)
+ old_header = header
+ elif old_header[:300] != header[:300]:
+ #Enough to check <BlastOutput_program> and <BlastOutput_version> match
+ out.close()
+ h.close()
+ raise ValueError("BLAST XML headers don't match for %s and %s - have:\n%s\n...\n\nAnd:\n%s\n...\n" \
+ % (split_files[0], f, old_header[:300], header[:300]))
+ else:
+ out.write(" <Iteration>\n")
+ for line in h:
+ if "</BlastOutput_iterations>" in line:
+ break
+ #TODO - Increment <Iteration_iter-num> and if required automatic query names
+ #like <Iteration_query-ID>Query_3</Iteration_query-ID> to be increasing?
+ out.write(line)
+ h.close()
+ out.write(" </BlastOutput_iterations>\n")
+ out.write("</BlastOutput>\n")
+ out.close()
+ merge = staticmethod(merge)
+
class MEMEXml( GenericXml ):
"""MEME XML Output data"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4d692c86cf6e/
changeset: 4d692c86cf6e
user: greg
date: 2012-02-28 17:38:08
summary: The InstallManager is now a stand-alone appication that attempts to load the tool panel as closely as possible to the way it was loaded before tools were migrated from the Galaxy distribution to the tool shed.
affected #: 27 files
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -18,6 +18,7 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.new_installation = False
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
self.config.check()
@@ -27,9 +28,13 @@
db_url = self.config.database_connection
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
- # Initialize database / check for appropriate schema version
+ # Initialize database / check for appropriate schema version. # If this
+ # is a new installation, we'll restrict the tool migration messaging.
from galaxy.model.migrate.check import create_or_verify_database
- create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
+ create_or_verify_database( db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options, app=self )
+ # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
+ from galaxy.tool_shed.migrate.check import verify_tools
+ verify_tools( self, db_url, kwargs.get( 'global_conf', {} ).get( '__file__', None ), self.config.database_engine_options )
# Object store manager
self.object_store = build_object_store_from_config(self.config)
# Setup the database engine and ORM
@@ -65,15 +70,13 @@
self.tag_handler = GalaxyTagHandler()
# Tool data tables
self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( self.config.tool_data_table_config_path )
- # Initialize the tools
- self.toolbox = tools.ToolBox( self.config.tool_configs, self.config.tool_path, self )
+ # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
+ tool_configs = self.config.tool_configs
+ if self.config.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.config.migrated_tools_config )
+ self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
# Search support for tools
self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
- # If enabled, check for tools missing from the distribution because they
- # have been moved to the tool shed and install all such discovered tools.
- if self.config.get_bool( 'enable_tool_shed_install', False ):
- from tool_shed import install_manager
- self.install_manager = install_manager.InstallManager( self, self.config.tool_shed_install_config, self.config.install_tool_config )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -52,9 +52,9 @@
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
self.len_file_path = kwargs.get( "len_file_path", resolve_path(os.path.join(self.tool_data_path, 'shared','ucsc','chrom'), self.root) )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
- self.enable_tool_shed_install = string_as_bool( kwargs.get( 'enable_tool_shed_install', False ) )
- self.tool_shed_install_config = resolve_path( kwargs.get( "tool_shed_install_config_file", "tool_shed_install.xml" ), self.root )
- self.install_tool_config = resolve_path( kwargs.get( "install_tool_config_file", "shed_tool_conf.xml" ), self.root )
+ # The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
+ # and moved to the tool shed.
+ self.migrated_tools_config = resolve_path( "migrated_tools_conf.xml", self.root )
if 'tool_config_file' in kwargs:
tcf = kwargs[ 'tool_config_file' ]
elif 'tool_config_files' in kwargs:
@@ -210,7 +210,10 @@
def check( self ):
paths_to_check = [ self.root, self.tool_path, self.tool_data_path, self.template_path ]
# Look for any tool shed configs and retrieve the tool_path attribute from the <toolbox> tag.
- for config_filename in self.tool_configs:
+ tool_configs = self.tool_configs
+ if self.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.migrated_tools_config )
+ for config_filename in tool_configs:
tree = parse_xml( config_filename )
root = tree.getroot()
tool_path = root.get( 'tool_path' )
@@ -241,10 +244,12 @@
os.makedirs( path )
except Exception, e:
raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
-
# Check that required files exist
- for path in self.tool_configs:
- if not os.path.isfile(path):
+ tool_configs = self.tool_configs
+ if self.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.migrated_tools_config )
+ for path in tool_configs:
+ if not os.path.isfile( path ):
raise ConfigurationError("File not found: %s" % path )
if not os.path.isfile( self.datatypes_config ):
raise ConfigurationError("File not found: %s" % self.datatypes_config )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2682,10 +2682,10 @@
self.dist_to_shed = dist_to_shed
@property
def includes_tools( self ):
- return 'tools' in self.metadata
+ return self.metadata and 'tools' in self.metadata
@property
def includes_workflows( self ):
- return 'workflows' in self.metadata
+ return self.metadata and 'workflows' in self.metadata
class ToolVersion( object ):
def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
@@ -2746,6 +2746,12 @@
self.tool_id = tool_id
self.parent_id = parent_id
+class MigrateTools( object ):
+ def __init__( self, repository_id=None, repository_path=None, version=None ):
+ self.repository_id = repository_id
+ self.repository_path = repository_path
+ self.version = version
+
## ---- Utility methods -------------------------------------------------------
def directory_hash_id( id ):
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -394,6 +394,11 @@
Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
+MigrateTools.table = Table( "migrate_tools", metadata,
+ Column( "repository_id", TrimmedString( 255 ) ),
+ Column( "repository_path", TEXT ),
+ Column( "version", Integer ) )
+
Job.table = Table( "job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/model/migrate/check.py
--- a/lib/galaxy/model/migrate/check.py
+++ b/lib/galaxy/model/migrate/check.py
@@ -1,7 +1,6 @@
import sys, os.path, logging
from galaxy import eggs
-
import pkg_resources
pkg_resources.require( "sqlalchemy-migrate" )
@@ -20,7 +19,7 @@
"mysql" : "MySQL_python"
}
-def create_or_verify_database( url, galaxy_config_file, engine_options={} ):
+def create_or_verify_database( url, galaxy_config_file, engine_options={}, app=None ):
"""
Check that the database is use-able, possibly creating it if empty (this is
the only time we automatically create tables, otherwise we force the
@@ -30,9 +29,7 @@
2) Database older than migration support --> fail and require manual update
3) Database at state where migrate support introduced --> add version control information but make no changes (might still require manual update)
4) Database versioned but out of date --> fail with informative message, user must run "sh manage_db.sh upgrade"
-
"""
-
dialect = ( url.split( ':', 1 ) )[0]
try:
egg = dialect_to_egg[dialect]
@@ -45,7 +42,6 @@
except KeyError:
# Let this go, it could possibly work with db's we don't support
log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
-
# Create engine and metadata
engine = create_engine( url, **engine_options )
meta = MetaData( bind=engine )
@@ -53,8 +49,10 @@
try:
dataset_table = Table( "dataset", meta, autoload=True )
except NoSuchTableError:
- # No 'dataset' table means a completely uninitialized database, which
- # is fine, init the database in a versioned state
+ # No 'dataset' table means a completely uninitialized database. If we have an app, we'll
+ # set it's new_installation setting to True so the tool migration process will be skipped.
+ if app:
+ app.new_installation = True
log.info( "No database, initializing" )
# Database might or might not be versioned
try:
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0092_add_migrate_tools_table.py
@@ -0,0 +1,49 @@
+"""
+Migration script to create the migrate_tools table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+MigrateTools_table = Table( "migrate_tools", metadata,
+ Column( "repository_id", TrimmedString( 255 ) ),
+ Column( "repository_path", TEXT ),
+ Column( "version", Integer ) )
+
+def upgrade():
+ print __doc__
+
+ metadata.reflect()
+ # Create the table.
+ try:
+ MigrateTools_table.create()
+ cmd = "INSERT INTO migrate_tools VALUES ('GalaxyTools', 'lib/galaxy/tool_shed/migrate', %d)" % 1
+ db_session.execute( cmd )
+ except Exception, e:
+ log.debug( "Creating migrate_tools table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ try:
+ MigrateTools_table.drop()
+ except Exception, e:
+ log.debug( "Dropping migrate_tools table failed: %s" % str( e ) )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -2,7 +2,7 @@
Classes encapsulating the management of repositories installed from Galaxy tool sheds.
"""
import os, logging
-from galaxy.util.shed_util import create_repository_dict_for_proprietary_datatypes, load_datatype_items
+from galaxy.util.shed_util import *
from galaxy.model.orm import *
log = logging.getLogger(__name__)
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -4,77 +4,197 @@
shed. Tools included in tool_shed_install.xml that have already been installed will not be
re-installed.
"""
-import urllib2
+import logging, urllib2
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
-
log = logging.getLogger( __name__ )
class InstallManager( object ):
- def __init__( self, app, tool_shed_install_config, install_tool_config ):
+ def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config ):
"""
- Check tool settings in tool_shed_install_config and install all tools that are
- not already installed. The tool panel configuration file is the received
- shed_tool_config, which defaults to shed_tool_conf.xml.
+ Check tool settings in tool_shed_install_config and install all tools that are not already installed. The tool panel
+ configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
"""
self.app = app
- self.sa_session = self.app.model.context.current
- self.install_tool_config = install_tool_config
- # Parse shed_tool_config to get the install location (tool_path).
- tree = util.parse_xml( install_tool_config )
+ self.toolbox = self.app.toolbox
+ self.migrated_tools_config = migrated_tools_config
+ # Get the local non-shed related tool panel config (the default name is tool_conf.xml). If the user has more than 1
+ # non-shed tool panel config it could cause problems.
+ self.proprietary_tool_conf = self.non_shed_tool_panel_config
+ self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
+ # Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config.
+ tree = util.parse_xml( migrated_tools_config )
root = tree.getroot()
self.tool_path = root.get( 'tool_path' )
- # Keep an in-memory list of xml elements to enable persistence of the changing tool config.
- config_elems = []
+ print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
# Parse tool_shed_install_config to check each of the tools.
- log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config )
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
- log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
self.repository_owner = 'devteam'
+ for repository_elem in root:
+ self.install_repository( repository_elem )
+ def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
+ found = False
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ if name == tool_config:
+ found = True
+ break
+ if found:
+ break
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ tool = self.toolbox.load_tool( full_path )
+ return generate_tool_guid( repository_clone_url, tool )
+ def get_proprietary_tool_panel_elems( self, latest_tool_migration_script_number ):
+ # Parse self.proprietary_tool_conf (the default is tool_conf.xml) and generate a list of Elements that are either ToolSection elements
+ # or Tool elements. These will be used to generate new entries in the migrated_tools_conf.xml file for the installed tools.
+ tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+ # Parse the XML and load the file attributes for later checking against self.proprietary_tool_conf.
+ migrated_tool_configs = []
+ tree = util.parse_xml( tools_xml_file_path )
+ root = tree.getroot()
for elem in root:
- config_elems.append( elem )
if elem.tag == 'repository':
- self.install_repository( elem )
+ for tool_elem in elem:
+ migrated_tool_configs.append( tool_elem.get( 'file' ) )
+ # Parse self.proprietary_tool_conf and generate the list of tool panel Elements that contain them.
+ tool_panel_elems = []
+ tree = util.parse_xml( self.proprietary_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ # Tools outside of sections.
+ file_path = elem.get( 'file', None )
+ if file_path:
+ path, name = os.path.split( file_path )
+ if name in migrated_tool_configs:
+ if elem not in tool_panel_elems:
+ tool_panel_elems.append( elem )
elif elem.tag == 'section':
- self.install_section( elem )
- shed_tool_conf_dict = dict( config_filename=install_tool_config,
- tool_path=self.tool_path,
- config_elems=config_elems )
- self.app.toolbox.shed_tool_confs.append( shed_tool_conf_dict )
- def install_repository( self, elem, section_name='', section_id='' ):
- # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
- # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" installed_changeset_revision="f3ed6cfe6402">
- # <tool id="Cut1" version="1.0.1" />
- # </repository>
- name = elem.get( 'name' )
- description = elem.get( 'description' )
- changeset_revision = elem.get( 'changeset_revision' )
+ # Tools contained in a section.
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ file_path = section_elem.get( 'file', None )
+ if file_path:
+ path, name = os.path.split( file_path )
+ if name in migrated_tool_configs:
+ # Append the section, not the tool.
+ if elem not in tool_panel_elems:
+ tool_panel_elems.append( elem )
+ return tool_panel_elems
+ def get_containing_tool_section( self, tool_config ):
+ """
+ If tool_config is defined somewhere in self.proprietary_tool_panel_elems, return True and the ToolSection in which the tool is
+ displayed or None if it is displayed outside of any sections.
+ """
+ for proprietary_tool_panel_elem in self.proprietary_tool_panel_elems:
+ if proprietary_tool_panel_elem.tag == 'tool':
+ # The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+ proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
+ proprietary_path, proprietary_name = os.path.split( proprietary_tool_config )
+ if tool_config == proprietary_name:
+ # The tool is loaded outside of any sections.
+ return True, None
+ if proprietary_tool_panel_elem.tag == 'section':
+ # The proprietary_tool_panel_elem looks something like <section name="EMBOSS" id="EMBOSSLite">.
+ for section_elem in proprietary_tool_panel_elem:
+ if section_elem.tag == 'tool':
+ # The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
+ proprietary_tool_config = section_elem.get( 'file' )
+ proprietary_path, proprietary_name = os.path.split( proprietary_tool_config )
+ if tool_config == proprietary_name:
+ # The tool is loaded inside of the section_elem.
+ return True, ToolSection( proprietary_tool_panel_elem )
+ return False, None
+ def handle_repository_contents( self, current_working_dir, repository_clone_url, relative_install_dir, repository_elem, repository_name, description,
+ changeset_revision, tmp_name ):
+ # Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
+ # updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ tool_panel_dict = {}
+ for tool_elem in repository_elem:
+ # The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
+ tool_config = tool_elem.get( 'file' )
+ # See if tool_config is defined somewhere in self.proprietary_tool_panel_elems.
+ is_loaded, tool_section = self.get_containing_tool_section( tool_config )
+ if is_loaded:
+ guid = self.get_guid( repository_clone_url, relative_install_dir, tool_config )
+ tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_section=tool_section )
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ for k, v in tool_panel_dict_for_tool_config.items():
+ tool_panel_dict[ k ] = v
+ metadata_dict = generate_metadata( self.toolbox, relative_install_dir, repository_clone_url, tool_panel_dict=tool_panel_dict )
+ # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked
+ # deleted, undelete it. It is critical that this happens before the call to add_to_tool_panel() below because
+ # tools will not be properly loaded if the repository is marked deleted.
+ print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name
+ tool_shed_repository = create_or_update_tool_shed_repository( self.app,
+ repository_name,
+ description,
+ changeset_revision,
+ repository_clone_url,
+ metadata_dict,
+ dist_to_shed=True )
+ if 'tools' in metadata_dict:
+ repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
+ if repository_tools_tups:
+ sample_files = metadata_dict.get( 'sample_files', [] )
+ # Handle missing data table entries for tool parameters that are dynamically generated select lists.
+ repository_tools_tups = handle_missing_data_table_entry( self.app, self.tool_path, sample_files, repository_tools_tups )
+ # Handle missing index files for tool parameters that are dynamically generated select lists.
+ repository_tools_tups = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
+ # Handle tools that use fabric scripts to install dependencies.
+ handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
+ add_to_tool_panel( self.app,
+ repository_name,
+ repository_clone_url,
+ changeset_revision,
+ repository_tools_tups,
+ self.repository_owner,
+ self.migrated_tools_config,
+ tool_panel_dict,
+ new_install=True )
+ # Remove the temporary file
+ try:
+ os.unlink( tmp_name )
+ except:
+ pass
+ if 'datatypes_config' in metadata_dict:
+ datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
+ # Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
+ # after this installation completes.
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False )
+ if converter_path or display_path:
+ # Create a dictionary of tool shed repository related information.
+ repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
+ name=repository_name,
+ owner=self.repository_owner,
+ installed_changeset_revision=changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
+ if converter_path:
+ # Load proprietary datatype converters
+ self.app.datatypes_registry.load_datatype_converters( self.toolbox, installed_repository_dict=repository_dict )
+ if display_path:
+ # Load proprietary datatype display applications
+ self.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
+ return tool_shed_repository, metadata_dict
+ def install_repository( self, repository_elem ):
+ # Install a single repository, loading contained tools into the tool config.
+ name = repository_elem.get( 'name' )
+ description = repository_elem.get( 'description' )
+ changeset_revision = repository_elem.get( 'changeset_revision' )
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
if self.__isinstalled( clone_dir ):
- log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
+ print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
else:
- if section_name and section_id:
- section_key = 'section_%s' % str( section_id )
- if section_key in self.app.toolbox.tool_panel:
- # Appending a tool to an existing section in self.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % section_name )
- tool_section = self.app.toolbox.tool_panel[ section_key ]
- else:
- # Appending a new section to self.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % section_name )
- new_section_elem = Element( 'section' )
- new_section_elem.attrib[ 'name' ] = section_name
- new_section_elem.attrib[ 'id' ] = section_id
- new_section_elem.attrib[ 'version' ] = ''
- tool_section = ToolSection( new_section_elem )
- self.app.toolbox.tool_panel[ section_key ] = tool_section
- else:
- tool_section = None
current_working_dir = os.getcwd()
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
@@ -83,25 +203,18 @@
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
- tool_shed_repository, metadata_dict = load_repository_contents( app=self.app,
- repository_name=name,
- description=description,
- owner=self.repository_owner,
- changeset_revision=changeset_revision,
- tool_path=self.tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=self.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=self.install_tool_config,
- new_install=True,
- dist_to_shed=True )
+ tool_shed_repository, metadata_dict = self.handle_repository_contents( current_working_dir,
+ repository_clone_url,
+ relative_install_dir,
+ repository_elem,
+ name,
+ description,
+ changeset_revision,
+ tmp_name )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, name, self.repository_owner, changeset_revision )
+ ( tool_shed_url, tool_shed_repository.name, self.repository_owner, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
response.close()
@@ -121,13 +234,13 @@
if not tool_version_using_old_id:
tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
tool_shed_repository=tool_shed_repository )
- self.sa_session.add( tool_version_using_old_id )
- self.sa_session.flush()
+ self.app.sa_session.add( tool_version_using_old_id )
+ self.app.sa_session.flush()
if not tool_version_using_guid:
tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
tool_shed_repository=tool_shed_repository )
- self.sa_session.add( tool_version_using_guid )
- self.sa_session.flush()
+ self.app.sa_session.add( tool_version_using_guid )
+ self.app.sa_session.flush()
# Associate the two versions as parent / child.
tool_version_association = get_tool_version_association( self.app,
tool_version_using_old_id,
@@ -135,28 +248,29 @@
if not tool_version_association:
tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
parent_id=tool_version_using_old_id.id )
- self.sa_session.add( tool_version_association )
- self.sa_session.flush()
+ self.app.sa_session.add( tool_version_association )
+ self.app.sa_session.flush()
else:
tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ print "Error updating repository ', name, "': ', str( tmp_stderr.read() )
tmp_stderr.close()
else:
tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ print "Error cloning repository '", name, "': ", str( tmp_stderr.read() )
tmp_stderr.close()
- def install_section( self, elem ):
- # Install 1 or more repositories into a section in the tool config. An entry looks something like:
- # <section name="EMBOSS" id="EMBOSSLite">
- # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" installed_changeset_revision="bdd88ae5d0ac">
- # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
- # ...
- # </repository>
- # </section>
- section_name = elem.get( 'name' )
- section_id = elem.get( 'id' )
- for repository_elem in elem:
- self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
+ @property
+ def non_shed_tool_panel_config( self ):
+ # Get the non-shed related tool panel config value from the Galaxy config - the default is tool_conf.xml.
+ for config_filename in self.app.config.tool_configs:
+ # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
+ # <toolbox tool_path="../shed_tools">
+ tree = util.parse_xml( config_filename )
+ root = tree.getroot()
+ tool_path = root.get( 'tool_path', None )
+ if tool_path is None:
+ # There will be a problem here if the user has defined 2 non-shed related configs.
+ return config_filename
+ return None
def __get_url_from_tool_shed( self, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu
# We need the URL to this tool shed, which is something like:
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/migrate/check.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -0,0 +1,119 @@
+import sys, os, logging, subprocess
+from galaxy import eggs
+import pkg_resources
+pkg_resources.require( "sqlalchemy-migrate" )
+
+from migrate.versioning import repository, schema
+from sqlalchemy import *
+from common import *
+
+log = logging.getLogger( __name__ )
+
+# Path relative to galaxy
+migrate_repository_directory = os.path.dirname( __file__ ).replace( os.getcwd() + os.path.sep, '', 1 )
+migrate_repository = repository.Repository( migrate_repository_directory )
+dialect_to_egg = {
+ "sqlite" : "pysqlite>=2",
+ "postgres" : "psycopg2",
+ "mysql" : "MySQL_python"
+}
+
+def verify_tools( app, url, galaxy_config_file, engine_options={} ):
+ # Check the value in the migrate_tools.version database table column to verify that the number is in
+ # sync with the number of version scripts in ~/lib/galaxy/tools/migrate/versions.
+ dialect = ( url.split( ':', 1 ) )[0]
+ try:
+ egg = dialect_to_egg[ dialect ]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+ except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+ # Create engine and metadata
+ engine = create_engine( url, **engine_options )
+ meta = MetaData( bind=engine )
+ # The migrate_tools table was created in database version script 0092_add_migrate_tools_table.py.
+ version_table = Table( "migrate_tools", meta, autoload=True )
+ # Verify that the code and the database are in sync.
+ db_schema = schema.ControlledSchema( engine, migrate_repository )
+ latest_tool_migration_script_number = migrate_repository.versions.latest
+ if latest_tool_migration_script_number != db_schema.version:
+ if app.new_installation:
+ # New installations will not be missing tools, so we don't need to worry about them.
+ missing_tool_configs = []
+ else:
+ tool_panel_config = get_non_shed_tool_panel_config( app )
+ if tool_panel_config is None:
+ missing_tool_configs = []
+ else:
+ missing_tool_configs = check_for_missing_tools( tool_panel_config, latest_tool_migration_script_number )
+ config_arg = ''
+ if os.path.abspath( os.path.join( os.getcwd(), 'universe_wsgi.ini' ) ) != galaxy_config_file:
+ config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' )
+ # Automatically update the value of the migrate_tools.version database table column.
+ cmd = 'sh manage_tools.sh%s upgrade' % config_arg
+ proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+ return_code = proc.wait()
+ output = proc.stdout.read( 32768 )
+ if return_code != 0:
+ raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output )
+ elif missing_tool_configs:
+ msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
+ msg += "\n\nThe list of files at the end of this message refer to tools that are configured to load into the tool panel for\n"
+ msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools can be automatically installed\n"
+ msg += "from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
+ msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n"
+ msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n"
+ msg += "be removed from your file named %s.\n\n" % tool_panel_config
+ msg += "CRITICAL NOTE IF YOU PLAN TO INSTALL\n"
+ msg += "The location in which the tool repositories will be installed is the value of the 'tool_path' attribute in the <tool>\n"
+ msg += 'tag of the file named ./migrated_tool_conf.xml (i.e., <toolbox tool_path="../shed_tools">). The default location\n'
+ msg += "setting is '../shed_tools', which may be problematic for some cluster environments, so make sure to change it before\n"
+ msg += "you execute the installation process if appropriate. The configured location must be outside of the Galaxy installation\n"
+ msg += "directory or it must be in a sub-directory protected by a properly configured .hgignore file if the directory is within\n"
+ msg += "the Galaxy installation directory hierarchy. This is because tool shed repositories will be installed using mercurial's\n"
+ msg += "clone feature, which creates .hg directories and associated mercurial repository files. Not having .hgignore properly\n"
+ msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n"
+ msg += "repositories if they are in directories that pose conflicts. See mercurial's .hgignore documentation at the following\n"
+ msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n"
+ msg += output
+ msg += "After the installation process finishes, you can start your Galaxy server. As part of this installation process,\n"
+ msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n"
+ msg += "tools will continue to be loaded into your tool panel. Because of this, existing entries for these files should be\n"
+ msg += "removed from your file named %s, but only after the installation process finishes.\n\n" % tool_panel_config
+ for i, missing_tool_config in enumerate( missing_tool_configs ):
+ msg += "%s\n" % missing_tool_config
+ # Should we do the following?
+ #if i > 10:
+ # msg += "\n...and %d more tools...\n" % ( len( missing_tool_configs ) - ( i + 1 ) )
+ # break
+ msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"
+ raise Exception( msg )
+ else:
+ log.info( "At migrate_tools version %d" % db_schema.version )
+
+def migrate_to_current_version( engine, schema ):
+ # Changes to get to current version.
+ changeset = schema.changeset( None )
+ for ver, change in changeset:
+ nextver = ver + changeset.step
+ log.info( 'Installing tools from version %s -> %s... ' % ( ver, nextver ) )
+ old_stdout = sys.stdout
+ class FakeStdout( object ):
+ def __init__( self ):
+ self.buffer = []
+ def write( self, s ):
+ self.buffer.append( s )
+ def flush( self ):
+ pass
+ sys.stdout = FakeStdout()
+ try:
+ schema.runchange( ver, change, changeset.step )
+ finally:
+ for message in "".join( sys.stdout.buffer ).split( "\n" ):
+ log.info( message )
+ sys.stdout = old_stdout
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/migrate/common.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -0,0 +1,112 @@
+import sys, os, ConfigParser
+import galaxy.config
+import galaxy.datatypes.registry
+from galaxy import util, tools
+import galaxy.model.mapping
+import galaxy.tools.search
+from galaxy.objectstore import build_object_store_from_config
+import galaxy.tool_shed.tool_shed_registry
+from galaxy.tool_shed import install_manager
+from galaxy.tool_shed.migrate.common import *
+
+def check_for_missing_tools( tool_panel_config, latest_tool_migration_script_number ):
+ # Get the 000x_tools.xml file associated with the current migrate_tools version number.
+ tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
+ # Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
+ migrated_tool_configs = []
+ tree = util.parse_xml( tools_xml_file_path )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'repository':
+ for tool_elem in elem.findall( 'tool' ):
+ migrated_tool_configs.append( tool_elem.get( 'file' ) )
+ # Parse the proprietary tool_panel_config (the default is tool_conf.xml) and generate the list of missing tool config file names.
+ missing_tool_configs = []
+ tree = util.parse_xml( tool_panel_config )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ missing_tool_configs = check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs )
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ missing_tool_configs = check_tool_tag_set( section_elem, migrated_tool_configs, missing_tool_configs )
+ return missing_tool_configs
+def check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs ):
+ file_path = elem.get( 'file', None )
+ if file_path:
+ path, name = os.path.split( file_path )
+ if name in migrated_tool_configs:
+ missing_tool_configs.append( name )
+ return missing_tool_configs
+def get_non_shed_tool_panel_config( app ):
+ # Get the non-shed related tool panel config value from the Galaxy config - the default is tool_conf.xml.
+ for config_filename in app.config.tool_configs:
+ # Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
+ # <toolbox tool_path="../shed_tools">
+ tree = util.parse_xml( config_filename )
+ root = tree.getroot()
+ tool_path = root.get( 'tool_path', None )
+ if tool_path is None:
+ # There will be a problem here if the user has defined 2 non-shed related configs.
+ return config_filename
+ return None
+class MigrateToolsApplication( object ):
+ """Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
+ def __init__( self, tools_migration_config ):
+ galaxy_config_file = 'universe_wsgi.ini'
+ if '-c' in sys.argv:
+ pos = sys.argv.index( '-c' )
+ sys.argv.pop( pos )
+ galaxy_config_file = sys.argv.pop( pos )
+ if not os.path.exists( galaxy_config_file ):
+ print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % galaxy_config_file
+ sys.exit( 1 )
+ config_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } )
+ config_parser.read( galaxy_config_file )
+ galaxy_config_dict = {}
+ for key, value in config_parser.items( "app:main" ):
+ galaxy_config_dict[ key ] = value
+ self.config = galaxy.config.Configuration( **galaxy_config_dict )
+ if self.config.database_connection is None:
+ self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ self.object_store = build_object_store_from_config( self.config )
+ # Setup the database engine and ORM
+ self.model = galaxy.model.mapping.init( self.config.file_path,
+ self.config.database_connection,
+ engine_options={},
+ create_tables=False,
+ object_store=self.object_store )
+ # Create an empty datatypes registry.
+ self.datatypes_registry = galaxy.datatypes.registry.Registry()
+ # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
+ self.datatypes_registry.load_datatypes( self.config.root, self.config.datatypes_config )
+ # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
+ tool_configs = self.config.tool_configs
+ if self.config.migrated_tools_config not in tool_configs:
+ tool_configs.append( self.config.migrated_tools_config )
+ self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
+ # Search support for tools
+ self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+ # Set up the tool sheds registry.
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
+ # Get the latest tool migration script number to send to the Install manager.
+ latest_migration_script_number = int( tools_migration_config.split( '_' )[ 0 ] )
+ # The value of migrated_tools_config is migrated_tools_conf.xml, and is reserved for containing only those tools that have been
+ # eliminated from the distribution and moved to the tool shed. A side-effect of instantiating the InstallManager is the automatic
+ # installation of all appropriate tool shed repositories.
+ self.install_manager = install_manager.InstallManager( app=self,
+ latest_migration_script_number=latest_migration_script_number,
+ tool_shed_install_config=os.path.join( self.config.root,
+ 'scripts',
+ 'migrate_tools',
+ tools_migration_config ),
+ migrated_tools_config=self.config.migrated_tools_config )
+ @property
+ def sa_session( self ):
+ return self.model.context.current
+ def shutdown( self ):
+ self.object_store.shutdown()
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/migrate/migrate.cfg
--- /dev/null
+++ b/lib/galaxy/tool_shed/migrate/migrate.cfg
@@ -0,0 +1,20 @@
+[db_settings]
+# Used to identify which repository this database is versioned under.
+# You can use the name of your project.
+repository_id=GalaxyTools
+
+# The name of the database table used to track the schema version.
+# This name shouldn't already be used by your project.
+# If this is changed once a database is under version control, you'll need to
+# change the table name in each database too.
+version_table=migrate_tools
+
+# When committing a change script, Migrate will attempt to generate the
+# sql for all supported databases; normally, if one of them fails - probably
+# because you don't have that database installed - it is ignored and the
+# commit continues, perhaps ending successfully.
+# Databases in this list MUST compile successfully during a commit, or the
+# entire commit will fail. List the databases your application will actually
+# be using to ensure your updates to that database work properly.
+# This must be a list; example: ['postgres','sqlite']
+required_dbs=[]
\ No newline at end of file
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/migrate/versions/0001_tools.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/migrate/versions/0001_tools.py
@@ -0,0 +1,9 @@
+"""
+Initialize the version column of the migrate_tools database table to 1. No tool migrations are handled in this version.
+"""
+import sys
+
+def upgrade():
+ print __doc__
+def downgrade():
+ pass
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -9,15 +9,9 @@
class UpdateManager( object ):
def __init__( self, app ):
- """
- Check tool settings in tool_shed_install_config and install all tools that are
- not already installed. The tool panel configuration file is the received
- shed_tool_config, which defaults to shed_tool_conf.xml.
- """
self.app = app
self.sa_session = self.app.model.context.current
- # Ideally only one Galaxy server process
- # should be able to check for repository updates.
+ # Ideally only one Galaxy server process should be able to check for repository updates.
self.running = True
self.sleeper = Sleeper()
self.restarter = threading.Thread( target=self.__restarter )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -30,6 +30,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
+from galaxy.util.shed_util import *
from galaxy.visualization.tracks.visual_analytics import TracksterConfig
@@ -178,38 +179,31 @@
metadata = tool_shed_repository.metadata
update_needed = False
if 'tool_panel_section' in metadata:
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ tool_section = generate_tool_section_element_from_dict( tool_section_dict )
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata, tool_section=tool_section )
if section:
- # If the tool_panel_section dictionary is included in the metadata, update it if necessary.
- tool_panel_section = metadata[ 'tool_panel_section' ]
- if tool_panel_section [ 'id' ] != section.id or \
- tool_panel_section [ 'version' ] != section.version or \
- tool_panel_section [ 'name' ] != section.name:
- tool_panel_section [ 'id' ] = section.id
- tool_panel_section [ 'version' ] = section.version
- tool_panel_section [ 'name' ] = section.name
- update_needed = True
+ # This means all tools are loaded into the same tool panel section or are all outside of any sections.
+ for guid, tool_section_dict in tool_panel_dict.items():
+ if tool_section_dict [ 'id' ] != section.id or \
+ tool_section_dict [ 'version' ] != section.version or \
+ tool_section_dict [ 'name' ] != section.name:
+ tool_section_dict [ 'id' ] = section.id
+ tool_section_dict [ 'version' ] = section.version
+ tool_section_dict [ 'name' ] = section.name
+ tool_panel_dict[ guid ] = tool_section_dict
+ update_needed = True
else:
# The tool_panel_section was introduced late, so set it's value if its missing in the metadata.
- if section:
- if section.id is None:
- section_id = ''
- else:
- section_id = section.id
- if section.version is None:
- section_version = ''
- else:
- section_version = section.version
- if section.name is None:
- section_name = ''
- else:
- section_name = section.name
- tool_panel_section = dict( id=section_id, version=section_version, name=section_name )
- update_needed = True
- else:
- tool_panel_section = dict( id='', version='', name='' )
- update_needed = True
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( tool_shed_repository.metadata, tool_section=section )
+ update_needed = True
if update_needed:
- metadata[ 'tool_panel_section' ] = tool_panel_section
+ metadata[ 'tool_panel_section' ] = tool_panel_dict
tool_shed_repository.metadata = metadata
self.sa_session.add( tool_shed_repository )
self.sa_session.flush()
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -12,6 +12,111 @@
log = logging.getLogger( __name__ )
+def add_to_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner, shed_tool_conf, tool_panel_dict, new_install=True ):
+ """A tool shed repository is being installed or updated so handle tool panel alterations accordingly."""
+ # We need to change the in-memory version and the file system version of the shed_tool_conf file.
+ index, shed_tool_conf_dict = get_shed_tool_conf_dict( app, shed_tool_conf )
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ config_elems = shed_tool_conf_dict[ 'config_elems' ]
+ # Generate the list of ElementTree Element objects for each section or list of tools.
+ elem_list = generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner=owner )
+ # Load the tools into the tool panel outside of any sections.
+ for config_elem in elem_list:
+ if config_elem.tag == 'section':
+ app.toolbox.load_section_tag_set( config_elem, app.toolbox.tool_panel, tool_path )
+ elif config_elem.tag == 'tool':
+ guid = config_elem.get( 'guid' )
+ app.toolbox.load_tool_tag_set( config_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ if new_install:
+ # Add the elements to the in-memory list of config_elems.
+ for elem_entry in elem_list:
+ config_elems.append( elem_entry )
+ # Replace the old list of config_elems with the new list.
+ shed_tool_conf_dict[ 'config_elems' ] = config_elems
+ app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
+ # Append the new entry (either section or list of tools) to the shed_tool_config file.
+ config_elems_to_xml_file( app, shed_tool_conf_dict )
+ if app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools.
+ app.toolbox_search = ToolBoxSearch( app.toolbox )
+def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
+ """
+ Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
+ add information to appropriate element attributes that will enable proprietary datatype class modules, datatypes converters
+ and display applications to be discovered and properly imported by the datatypes registry. The value of override will
+ be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
+ has been initialized, the registry's contents cannot be overridden by conflicting data types.
+ """
+ tree = util.parse_xml( datatypes_config )
+ datatypes_config_root = tree.getroot()
+ # Path to datatype converters
+ converter_path = None
+ # Path to datatype display applications
+ display_path = None
+ relative_path_to_datatype_file_name = None
+ datatype_files = datatypes_config_root.find( 'datatype_files' )
+ datatype_class_modules = []
+ if datatype_files:
+ # The <datatype_files> tag set contains any number of <datatype_file> tags.
+ # <datatype_files>
+ # <datatype_file name="gmap.py"/>
+ # <datatype_file name="metagenomics.py"/>
+ # </datatype_files>
+ # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ datatype_class_modules.append( os.path.join( root, name ) )
+ break
+ break
+ if datatype_class_modules:
+ registration = datatypes_config_root.find( 'registration' )
+ converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
+ if converter_path:
+ registration.attrib[ 'proprietary_converter_path' ] = converter_path
+ if display_path:
+ registration.attrib[ 'proprietary_display_path' ] = display_path
+ for relative_path_to_datatype_file_name in datatype_class_modules:
+ relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
+ for elem in registration.findall( 'datatype' ):
+ # Handle 'type' attribute which should be something like one of the following:
+ # type="gmap:GmapDB"
+ # type="galaxy.datatypes.gmap:GmapDB"
+ dtype = elem.get( 'type', None )
+ if dtype:
+ fields = dtype.split( ':' )
+ proprietary_datatype_module = fields[ 0 ]
+ if proprietary_datatype_module.find( '.' ) >= 0:
+ # Handle the case where datatype_module is "galaxy.datatypes.gmap".
+ proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
+ # The value of proprietary_path must be an absolute path due to job_working_directory.
+ elem.attrib[ 'proprietary_path' ] = os.path.abspath( relative_head )
+ elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
+
+ sniffers = datatypes_config_root.find( 'sniffers' )
+ fd, proprietary_datatypes_config = tempfile.mkstemp()
+ os.write( fd, '<?xml version="1.0"?>\n' )
+ os.write( fd, '<datatypes>\n' )
+ os.write( fd, '%s' % util.xml_to_string( registration ) )
+ os.write( fd, '%s' % util.xml_to_string( sniffers ) )
+ os.write( fd, '</datatypes>\n' )
+ os.close( fd )
+ os.chmod( proprietary_datatypes_config, 0644 )
+ else:
+ proprietary_datatypes_config = datatypes_config
+ # Load proprietary datatypes
+ app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
+ if datatype_files:
+ try:
+ os.unlink( proprietary_datatypes_config )
+ except:
+ pass
+ return converter_path, display_path
def config_elems_to_xml_file( app, shed_tool_conf_dict ):
# Persist the current in-memory list of config_elems in the received shed_tool_conf_dict
# to a file named by the value of config_filename in the received shed_tool_conf_dict.
@@ -80,10 +185,8 @@
converter_path=converter_path,
display_path=display_path )
def create_or_update_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='', dist_to_shed=False ):
- # This method is used by the InstallManager, which does not have access to trans. The
- # received value for dist_to_shed will be True if the InstallManager is installing a repository
- # that contains tools or datatypes that used to be in the Galaxy distribution, but have been
- # moved to the main Galaxy tool shed.
+ # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
+ # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
sa_session = app.model.context.current
tmp_url = clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -111,13 +214,12 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
+def generate_clone_url( trans, repository ):
+ """Generate the URL for cloning a repository."""
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received datatypes_config. This method is used by the InstallManager,
- which does not have access to trans.
- """
- # Parse datatypes_config.
+ """Update the received metadata_dict with changes that have been applied to the received datatypes_config."""
tree = ElementTree.parse( datatypes_config )
root = tree.getroot()
ElementInclude.include( root )
@@ -156,23 +258,33 @@
if datatypes:
metadata_dict[ 'datatypes' ] = datatypes
return metadata_dict
-def generate_metadata( toolbox, relative_install_dir, repository_clone_url, tool_section_dict=None ):
+def generate_metadata( toolbox, relative_install_dir, repository_clone_url, tool_section_dict=None, tool_panel_dict=None ):
"""
- Browse the repository files on disk to generate metadata. Since we are using disk files, it
- is imperative that the repository is updated to the desired change set revision before metadata
- is generated. This method is used by the InstallManager, which does not have access to trans.
+ Browse the repository files on disk to generate metadata. Since we are using disk files, it is imperative that the
+ repository is updated to the desired change set revision before metadata is generated.
"""
metadata_dict = {}
sample_files = []
datatypes_config = None
- # Keep track of the section in the tool panel in which this repository's tools will be contained.
+ new_tool_panel_dict = {}
+ # Keep track of the section in the tool panel in which this repository's tools will be contained by using the information in either the
+ # tool_section_dict or the tool_panel_dict (at least 1 of these 2 dictionaries should be None). The tool_section_dict is passed when the
+ # Admin is manually installing a repository into a single selected section. It looks something like this.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ # The tool_panel_dict is fully populated with all tools in the repository that should be loaded into the tool panel. It is received when
+ # this method is called by the InstallManager or when metadata is being reset on an existing repository. This dictionary looks something
+ # like this.
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ # The tool_panel_dict enables each tool in the repository to be contained inside or outside a specified ToolSection in the tool panel. The
+ # new_tool_panel_dict with be populated with a subset of the items in the received tool_panel_dict. This will allow for the case where
+ # repository metadata is being updated where the previous change set revision included more tools than the current change set revision.
if tool_section_dict:
+ # The received tool_panel_dict must be None, so we'll populate it here. Currently tools must all be installed in a single
+ # section or outside any sections.
for k, v in tool_section_dict.items():
if v is None:
+ # Coerce None values into empty strings because ElementTree.tostring() throws exceptions on None values.
tool_section_dict[ k ] = ''
- metadata_dict[ 'tool_panel_section' ] = tool_section_dict
- else:
- metadata_dict[ 'tool_panel_section' ] = dict( id='', version='', name='' )
# Find datatypes_conf.xml if it exists.
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
@@ -207,7 +319,28 @@
tool = None
if tool is not None:
tool_config = os.path.join( root, name )
+ guid = generate_tool_guid( repository_clone_url, tool )
metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
+ # Populate the tool_section_dict if necessary.
+ if tool_panel_dict and guid in tool_panel_dict:
+ # We're updating metadata on a previously installed repository.
+ old_tool_panel_dict = tool_panel_dict[ guid ]
+ if 'tool_config' not in old_tool_panel_dict or old_tool_panel_dict[ 'tool_config' ] in [ None, '' ]:
+ old_tool_panel_dict[ 'tool_config' ] = name
+ new_tool_panel_dict[ guid ] = old_tool_panel_dict
+ else:
+ # The admin is manually installing a new repository.
+ new_tool_section_dict = {}
+ if tool_section_dict:
+ for k, v in tool_section_dict.items():
+ new_tool_section_dict[ k ] = v
+ else:
+ new_tool_section_dict[ 'id' ] = ''
+ new_tool_section_dict[ 'name' ] = ''
+ new_tool_section_dict[ 'version' ] = ''
+ if 'tool_config' not in new_tool_section_dict or new_tool_section_dict[ 'tool_config' ] in [ None, '' ]:
+ new_tool_section_dict[ 'tool_config' ] = name
+ new_tool_panel_dict[ guid ] = new_tool_section_dict
# Find all exported workflows
elif name.endswith( '.ga' ):
relative_path = os.path.join( root, name )
@@ -217,6 +350,7 @@
exported_workflow_dict = from_json_string( workflow_text )
if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ metadata_dict[ 'tool_panel_section' ] = new_tool_panel_dict
return metadata_dict
def generate_tool_guid( repository_clone_url, tool ):
"""
@@ -227,11 +361,7 @@
tmp_url = clean_repository_clone_url( repository_clone_url )
return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been
- applied to the received tool. This method is used by the InstallManager,
- which does not have access to trans.
- """
+ """Update the received metadata_dict with changes that have been applied to the received tool."""
# Generate the guid
guid = generate_tool_guid( repository_clone_url, tool )
# Handle tool.requirements.
@@ -276,31 +406,38 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
-def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, repository_tools_tups, tool_section=None, owner='' ):
- """Generate a list of ElementTree Element objects for each section or list of tools."""
+def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ):
+ """Generate a list of ElementTree Element objects for each section or tool."""
+ # Each entry in the tool_panel_dict looks like this.
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
elem_list = []
tool_elem = None
tmp_url = clean_repository_clone_url( repository_clone_url )
if not owner:
owner = get_repository_owner( tmp_url )
- if tool_section:
- root_elem = Element( 'section' )
- if tool_section.name is None:
- root_elem.attrib[ 'name' ] = ''
- else:
- root_elem.attrib[ 'name' ] = tool_section.name
- if tool_section.id is None:
- root_elem.attrib[ 'id' ] = ''
- else:
- root_elem.attrib[ 'id' ] = tool_section.id
- if tool_section.version is None:
- root_elem.attrib[ 'version' ] = ''
- else:
- root_elem.attrib[ 'version' ] = tool_section.version
- for repository_tool_tup in repository_tools_tups:
- tool_file_path, guid, tool = repository_tool_tup
- if tool_section:
- tool_elem = SubElement( root_elem, 'tool' )
+ for guid, tool_section_dict in tool_panel_dict.items():
+ tool_section = None
+ inside_section = False
+ section_in_elem_list = False
+ if tool_section_dict[ 'id' ]:
+ inside_section = True
+ # Create a new section element only if we haven't already created it.
+ for index, elem in enumerate( elem_list ):
+ if elem.tag == 'section':
+ section_id = elem.get( 'id', None )
+ if section_id == tool_section_dict[ 'id' ]:
+ section_in_elem_list = True
+ tool_section = elem
+ break
+ if tool_section is None:
+ tool_section = generate_tool_section_element_from_dict( tool_section_dict )
+ # Find the tuple containing the current guid from the list of repository_tools_tups.
+ for repository_tool_tup in repository_tools_tups:
+ tool_file_path, tup_guid, tool = repository_tool_tup
+ if tup_guid == guid:
+ break
+ if inside_section:
+ tool_elem = SubElement( tool_section, 'tool' )
else:
tool_elem = Element( 'tool' )
tool_elem.attrib[ 'file' ] = tool_file_path
@@ -317,24 +454,133 @@
id_elem.text = tool.id
version_elem = SubElement( tool_elem, 'version' )
version_elem.text = tool.version
- if not tool_section:
+ if inside_section:
+ if section_in_elem_list:
+ elem_list[ index ] = tool_section
+ else:
+ elem_list.append( tool_section )
+ else:
elem_list.append( tool_elem )
+ return elem_list
+def generate_tool_panel_dict_for_repository_tools( repository_metadata, tool_section=None, tool_section_dict=None ):
+ """
+ Create a dictionary of the following type for every tool in the repository where the tools are all
+ contained in the same tool section or no tool section. If tool_section is None, tools will be displayed
+ outside of any sections in the tool panel.
+ {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ """
+ tool_panel_dict = {}
+ tool_dicts = repository_metadata[ 'tools' ]
+ for tool_dict in tool_dicts:
+ guid = tool_dict[ 'guid' ]
+ tool_config = tool_dict[ 'tool_config' ]
+ new_tool_section_dict = {}
+ if tool_section_dict:
+ for k, v in tool_section_dict.items():
+ new_tool_section_dict[ k ] = v
+ file_path, file_name = os.path.split( tool_config )
+ new_tool_section_dict[ 'tool_config' ] = file_name
+ else:
+ new_tool_section_dict = generate_tool_section_dict( tool_config=tool_config, tool_section=tool_section )
+ tool_panel_dict[ guid ] = tool_section_dict
+ return tool_panel_dict
+def generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_section=None ):
+ """
+ Create a dictionary of the following type for a single tool config file name. The intent is to call this method for every tool config
+ in a repository and append each of these as entries to a tool panel dictionary for the repository. This allows for each tool to be
+ loaded into a different section in the tool panel.
+ {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ """
+ tool_panel_dict = {}
+ file_path, file_name = os.path.split( tool_config )
+ tool_section_dict = generate_tool_section_dict( tool_config=file_name, tool_section=tool_section )
+ tool_panel_dict[ guid ] = tool_section_dict
+ return tool_panel_dict
+def generate_tool_section_dict( tool_config=None, tool_section=None ):
+ if tool_config is None:
+ tool_config = ''
if tool_section:
- elem_list.append( root_elem )
- return elem_list
+ section_id = tool_section.id or ''
+ section_version = tool_section.version or ''
+ section_name = tool_section.name or ''
+ tool_section_dict = dict( tool_config=tool_config, id=section_id, version=section_version, name=section_name )
+ else:
+ tool_section_dict = dict( tool_config=tool_config, id='', version='', name='' )
+ return tool_section_dict
+def generate_tool_section_element_from_dict( tool_section_dict ):
+ # The value of tool_section_dict looks like the following.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ if tool_section_dict[ 'id' ]:
+ # Create a new tool section.
+ tool_section = Element( 'section' )
+ tool_section.attrib[ 'id' ] = tool_section_dict[ 'id' ]
+ tool_section.attrib[ 'name' ] = tool_section_dict[ 'name' ]
+ tool_section.attrib[ 'version' ] = tool_section_dict[ 'version' ]
+ else:
+ tool_section = None
+ return tool_section
def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received exported_workflow_dict. Store everything in the database.
- This method is used by the InstallManager, which does not have access to trans.
- """
+ """Update the received metadata_dict with changes that have been applied to the received exported_workflow_dict."""
if 'workflows' in metadata_dict:
metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) )
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
+def get_converter_and_display_paths( registration_elem, relative_install_dir ):
+ """Find the relative path to data type converters and display applications included in installed tool shed repositories."""
+ converter_path = None
+ display_path = None
+ for elem in registration_elem.findall( 'datatype' ):
+ if not converter_path:
+ # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
+ # if it is not already set. This requires developers to place all converters in the
+ # same subdirectory within the repository hierarchy.
+ for converter in elem.findall( 'converter' ):
+ converter_config = converter.get( 'file', None )
+ if converter_config:
+ relative_head, relative_tail = os.path.split( converter_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == relative_tail:
+ # The value of converter_path must be absolute due to job_working_directory.
+ converter_path = os.path.abspath( root )
+ break
+ if converter_path:
+ break
+ if not display_path:
+ # If any of the <datatype> tag sets contain <display> tags, set the display_path
+ # if it is not already set. This requires developers to place all display acpplications
+ # in the same subdirectory within the repository hierarchy.
+ for display_app in elem.findall( 'display' ):
+ display_config = display_app.get( 'file', None )
+ if display_config:
+ relative_head, relative_tail = os.path.split( display_config )
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == relative_tail:
+ # The value of display_path must be absolute due to job_working_directory.
+ display_path = os.path.abspath( root )
+ break
+ if display_path:
+ break
+ if converter_path and display_path:
+ break
+ return converter_path, display_path
+def get_shed_tool_conf_dict( app, shed_tool_conf ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+ in the shed_tool_conf_dict associated with the file.
+ """
+ for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
+ if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
+ return index, shed_tool_conf_dict
+ else:
+ file_path, file_name = os.path.split( shed_tool_conf_dict[ 'config_filename' ] )
+ if shed_tool_conf == file_name:
+ return index, shed_tool_conf_dict
def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
- # This method is used by the InstallManager, which does not have access to trans.
sa_session = app.model.context.current
if tool_shed.find( '//' ) > 0:
tool_shed = tool_shed.split( '//' )[1]
@@ -361,7 +607,6 @@
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
def get_tool_version( app, tool_id ):
- # This method is used by the InstallManager, which does not have access to trans.
sa_session = app.model.context.current
return sa_session.query( app.model.ToolVersion ) \
.filter( app.model.ToolVersion.table.c.tool_id == tool_id ) \
@@ -375,7 +620,6 @@
.first()
def get_url_from_repository_tool_shed( app, repository ):
"""
- This method is used by the UpdateManager, which does not have access to trans.
The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu
We need the URL to this tool shed, which is something like: http://toolshed.g2.bx.psu.edu/
"""
@@ -384,14 +628,12 @@
if shed_url.endswith( '/' ):
shed_url = shed_url.rstrip( '/' )
return shed_url
- # The tool shed from which the repository was originally
- # installed must no longer be configured in tool_sheds_conf.xml.
+ # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
"""
- Inspect each tool to see if any have input parameters that are dynamically
- generated select lists that require entries in the tool_data_table_conf.xml file.
- This method is used by the InstallManager, which does not have access to trans.
+ Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
+ tool_data_table_conf.xml file.
"""
missing_data_table_entry = False
for index, repository_tools_tup in enumerate( repository_tools_tups ):
@@ -400,8 +642,7 @@
missing_data_table_entry = True
break
if missing_data_table_entry:
- # The repository must contain a tool_data_table_conf.xml.sample file that includes
- # all required entries for all tools in the repository.
+ # The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
for sample_file in sample_files:
head, tail = os.path.split( sample_file )
if tail == 'tool_data_table_conf.xml.sample':
@@ -415,11 +656,7 @@
repository_tools_tups[ index ] = ( tup_path, repository_tool )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ):
- """
- Inspect each tool to see if it has any input parameters that
- are dynamically generated select lists that depend on a .loc file.
- This method is used by the InstallManager, which does not have access to trans.
- """
+ """Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file."""
missing_files_handled = []
for index, repository_tools_tup in enumerate( repository_tools_tups ):
tup_path, guid, repository_tool = repository_tools_tup
@@ -445,7 +682,6 @@
"""
Inspect each tool to see if it includes a "requirement" that refers to a fabric
script. For those that do, execute the fabric script to install tool dependencies.
- This method is used by the InstallManager, which does not have access to trans.
"""
for index, repository_tools_tup in enumerate( repository_tools_tups ):
tup_path, guid, repository_tool = repository_tools_tup
@@ -479,10 +715,8 @@
log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
- This method is used by the InstallManager, which does not have access to trans. Using the list
- of tool_version_dicts retrieved from the tool shed (one per chngeset revison up to the currently
- installed changeset revision), create the parent / child pairs of tool versions. Each dictionary
- contains { tool id : parent tool id } pairs.
+ Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
+ create the parent / child pairs of tool versions. Each dictionary contains { tool id : parent tool id } pairs.
"""
sa_session = app.model.context.current
for tool_version_dict in tool_version_dicts:
@@ -527,201 +761,64 @@
if display_path:
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict, deactivate=deactivate )
-def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
+def load_repository_contents( trans, repository_name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
+ current_working_dir, tmp_name, tool_panel_dict=None, tool_shed=None, tool_section=None, shed_tool_conf=None,
+ new_install=True ):
"""
- Parse a proprietary datatypes config (a datatypes_conf.xml file included in an installed tool shed repository) and
- add information to appropriate elements that will enable proprietary datatype class modules, datatypes converters
- and display application to be discovered and properly imported by the datatypes registry. The value of override will
- be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
- has been initialized, its contents cannot be overridden by conflicting data types. This method is used by the InstallManager,
- which does not have access to trans.
+ Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository
+ is updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository
+ files on disk. If this method is called when a new repository is being installed, the value of tool_panel_dict will be None and
+ the value of tool_section (a ToolSection or None) will be used. This method is also called when updates have been pulled to a
+ previously installed repository, in which case the value of tool_panel_dict will be used and the value of new_install will be False.
"""
- tree = util.parse_xml( datatypes_config )
- datatypes_config_root = tree.getroot()
- # Path to datatype converters
- converter_path = None
- # Path to datatype display applications
- display_path = None
- relative_path_to_datatype_file_name = None
- datatype_files = datatypes_config_root.find( 'datatype_files' )
- datatype_class_modules = []
- if datatype_files:
- # The <datatype_files> tag set contains any number of <datatype_file> tags.
- # <datatype_files>
- # <datatype_file name="gmap.py"/>
- # <datatype_file name="metagenomics.py"/>
- # </datatype_files>
- # We'll add attributes to the datatype tag sets so that the modules can be properly imported by the datatypes registry.
- for elem in datatype_files.findall( 'datatype_file' ):
- datatype_file_name = elem.get( 'name', None )
- if datatype_file_name:
- # Find the file in the installed repository.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == datatype_file_name:
- datatype_class_modules.append( os.path.join( root, name ) )
- break
- break
- if datatype_class_modules:
- registration = datatypes_config_root.find( 'registration' )
- converter_path, display_path = get_converter_and_display_paths( registration, relative_install_dir )
- if converter_path:
- registration.attrib[ 'proprietary_converter_path' ] = converter_path
- if display_path:
- registration.attrib[ 'proprietary_display_path' ] = display_path
- for relative_path_to_datatype_file_name in datatype_class_modules:
- relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
- for elem in registration.findall( 'datatype' ):
- # Handle 'type' attribute which should be something like one of the following:
- # type="gmap:GmapDB"
- # type="galaxy.datatypes.gmap:GmapDB"
- dtype = elem.get( 'type', None )
- if dtype:
- fields = dtype.split( ':' )
- proprietary_datatype_module = fields[ 0 ]
- if proprietary_datatype_module.find( '.' ) >= 0:
- # Handle the case where datatype_module is "galaxy.datatypes.gmap".
- proprietary_datatype_module = proprietary_datatype_module.split( '.' )[ -1 ]
- # The value of proprietary_path must be an absolute path due to job_working_directory.
- elem.attrib[ 'proprietary_path' ] = os.path.abspath( relative_head )
- elem.attrib[ 'proprietary_datatype_module' ] = proprietary_datatype_module
-
- sniffers = datatypes_config_root.find( 'sniffers' )
- fd, proprietary_datatypes_config = tempfile.mkstemp()
- os.write( fd, '<?xml version="1.0"?>\n' )
- os.write( fd, '<datatypes>\n' )
- os.write( fd, '%s' % util.xml_to_string( registration ) )
- os.write( fd, '%s' % util.xml_to_string( sniffers ) )
- os.write( fd, '</datatypes>\n' )
- os.close( fd )
- os.chmod( proprietary_datatypes_config, 0644 )
+ if tool_panel_dict:
+ # We're resetting metadata on a previously installed repository. For backward compatibility we have to handle 2 types of dictionaries.
+ # In the past, all repository tools had to be installed into a single ToolSection (or outside of any sections) in the tool panel.
+ if panel_entry_per_tool( tool_panel_dict ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_panel_dict=tool_panel_dict )
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_panel_dict )
else:
- proprietary_datatypes_config = datatypes_config
- # Load proprietary datatypes
- app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=proprietary_datatypes_config, deactivate=deactivate, override=override )
- try:
- os.unlink( proprietary_datatypes_config )
- except:
- pass
- return converter_path, display_path
-def get_converter_and_display_paths( registration_elem, relative_install_dir ):
- """
- Find the relative path to data type converters and display
- applications included in installed tool shed repositories.
- """
- converter_path = None
- display_path = None
- for elem in registration_elem.findall( 'datatype' ):
- if not converter_path:
- # If any of the <datatype> tag sets contain <converter> tags, set the converter_path
- # if it is not already set. This requires developers to place all converters in the
- # same subdirectory within the repository hierarchy.
- for converter in elem.findall( 'converter' ):
- converter_config = converter.get( 'file', None )
- if converter_config:
- relative_head, relative_tail = os.path.split( converter_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == relative_tail:
- # The value of converter_path must be absolute due to job_working_directory.
- converter_path = os.path.abspath( root )
- break
- if converter_path:
- break
- if not display_path:
- # If any of the <datatype> tag sets contain <display> tags, set the display_path
- # if it is not already set. This requires developers to place all display acpplications
- # in the same subdirectory within the repository hierarchy.
- for display_app in elem.findall( 'display' ):
- display_config = display_app.get( 'file', None )
- if display_config:
- relative_head, relative_tail = os.path.split( display_config )
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == relative_tail:
- # The value of display_path must be absolute due to job_working_directory.
- display_path = os.path.abspath( root )
- break
- if display_path:
- break
- if converter_path and display_path:
- break
- return converter_path, display_path
-def load_repository_contents( app, repository_name, description, owner, changeset_revision, tool_path, repository_clone_url, relative_install_dir,
- current_working_dir, tmp_name, tool_shed=None, tool_section=None, shed_tool_conf=None, new_install=True, dist_to_shed=False ):
- """
- This method is used by the InstallManager (which does not have access to trans), to generate the metadata
- for the installed tool shed repository, among other things. It is imperative that the installed repository
- is updated to the desired changeset_revision before metadata is set because the process for setting metadata
- uses the repository files on disk. This method is called when new tools have been installed (in which case
- values should be received for tool_section and shed_tool_conf, and new_install should be left at it's default
- value) and when updates have been pulled to previously installed repositories (in which case the default value
- None is set for tool_section and shed_tool_conf, and the value of new_install is passed as False). When a new
- install is being done by the InstallManager (and not a user manually installing a repository from the Admin
- perspective), the value of dist_to_shed will be set to True, enabling determinatin of which installed repositories
- resulted from the InstallManager installing a repository that contains tools that used to be in the Galaxy
- distribution but are now in the main Galaxy tool shed.
- """
- if tool_section:
- if tool_section.id is None:
- section_id = ''
- else:
- section_id = tool_section.id
- if tool_section.version is None:
- section_version = ''
- else:
- section_version = tool_section.version
- if tool_section.name is None:
- section_name = ''
- else:
- section_name = tool_section.name
- else:
- section_id = ''
- section_version = ''
- section_name = ''
- tool_section_dict = dict( id=section_id, version=section_version, name=section_name )
- metadata_dict = generate_metadata( app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
- # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked
- # deleted, undelete it. It is imperative that this happens before the call to alter_tool_panel() below because
- # tools will not be properly loaded if the repository is marked deleted.
+ # We're installing a new repository or reinstalling an uninstalled repository where all tools are contained in the same tool panel section
+ # or outside of any sections in the tool panel. We cannot pass a specific tool_config since we do not yet have one.
+ tool_section_dict = generate_tool_section_dict( tool_config=None, tool_section=tool_section )
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
+ # Add a new record to the tool_shed_repository table if one doesn't already exist. If one exists but is marked deleted, undelete it. This
+ # must happen before the call to add_to_tool_panel() below because tools will not be properly loaded if the repository is marked deleted.
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name )
- tool_shed_repository = create_or_update_tool_shed_repository( app,
+ tool_shed_repository = create_or_update_tool_shed_repository( trans.app,
repository_name,
description,
changeset_revision,
repository_clone_url,
metadata_dict,
- dist_to_shed=dist_to_shed )
+ dist_to_shed=False )
if 'tools' in metadata_dict:
- repository_tools_tups = get_repository_tools_tups( app, metadata_dict )
+ repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups )
+ repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups )
+ repository_tools_tups = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
# Handle tools that use fabric scripts to install dependencies.
handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
if new_install:
- alter_tool_panel( app=app,
- repository_name=repository_name,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- repository_tools_tups=repository_tools_tups,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- tool_path=tool_path,
- owner=owner,
- new_install=new_install,
- deactivate=False,
- uninstall=False )
- elif app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools. In the
- # condition above, this happens in the alter_tool_panel() method.
- app.toolbox_search = ToolBoxSearch( app.toolbox )
+ add_to_tool_panel( app=trans.app,
+ repository_name=repository_name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=new_install )
+ elif trans.app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools. In the condition above, this happens in the
+ # add_to_tool_panel() method.
+ trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox )
# Remove the temporary file
try:
os.unlink( tmp_name )
@@ -731,7 +828,7 @@
datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
# Load data types required by tools.
override = not new_install
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, override=override )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=override )
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
@@ -743,118 +840,22 @@
display_path=display_path )
if converter_path:
# Load proprietary datatype converters
- app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=repository_dict )
+ trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
if display_path:
# Load proprietary datatype display applications
- app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
+ trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
return tool_shed_repository, metadata_dict
-def alter_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, tool_section,
- shed_tool_conf, tool_path, owner, new_install=True, deactivate=False, uninstall=False ):
- """
- A tool shed repository is being installed / updated / deactivated / uninstalled,
- so handle tool panel alterations accordingly.
- """
- # We need to change the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
- # in the shed_tool_conf_dict associated with the file.
- for index, shed_tool_conf_dict in enumerate( app.toolbox.shed_tool_confs ):
- config_filename = shed_tool_conf_dict[ 'config_filename' ]
- if config_filename == shed_tool_conf:
- config_elems = shed_tool_conf_dict[ 'config_elems' ]
- break
- else:
- head, tail = os.path.split( config_filename )
- if tail == shed_tool_conf:
- config_elems = shed_tool_conf_dict[ 'config_elems' ]
- break
- # Geneate the list of ElementTree Element objects for each section or list of tools.
- elem_list = generate_tool_panel_elem_list( repository_name,
- repository_clone_url,
- changeset_revision,
- repository_tools_tups,
- tool_section=tool_section,
- owner=owner )
- if deactivate:
- # Remove appropriate entries from the shed_tool_conf file on disk. We first create an list of
- # guids for all tools that will be removed from the tool config.
- tool_elements_removed = 0
- guids_to_remove = [ repository_tool_tup[1] for repository_tool_tup in repository_tools_tups ]
- if tool_section:
- # Remove all appropriate tool sub-elements from the section element.
- for section_elem in elem_list:
- section_elem_id = section_elem.get( 'id' )
- section_elem_name = section_elem.get( 'name' )
- section_elem_version = section_elem.get( 'version' )
- for config_elem in config_elems:
- config_elems_to_remove = []
- if config_elem.tag == 'section' and \
- config_elem.get( 'id' ) == section_elem_id and \
- config_elem.get( 'name' ) == section_elem_name and \
- config_elem.get( 'version' ) == section_elem_version:
- # We've located the section element in the in-memory list of config_elems, so we can remove
- # all of the appropriate tools sub-elements from the section.
- tool_elems_to_remove = []
- for tool_elem in config_elem:
- tool_elem_guid = tool_elem.get( 'guid' )
- if tool_elem_guid in guids_to_remove:
- tool_elems_to_remove.append( tool_elem )
- for tool_elem in tool_elems_to_remove:
- # Remove all of the appropriate tool sub-elements from the section element.
- tool_elem_guid = tool_elem.get( 'guid' )
- config_elem.remove( tool_elem )
- log.debug( "Removed tool with guid '%s'." % str( tool_elem_guid ) )
- tool_elements_removed += 1
- if len( config_elem ) < 1:
- # Keep a list of all empty section elements so they can be removed.
- config_elems_to_remove.append( config_elem )
- if tool_elements_removed == len( guids_to_remove ):
- break
- for config_elem in config_elems_to_remove:
- # The section element includes no tool sub-elements, so remove it.
- config_elems.remove( config_elem )
- log.debug( "Removed section with id '%s'." % str( section_elem_id ) )
- if tool_elements_removed == len( guids_to_remove ):
- break
- else:
- # Remove all appropriate tool elements from the root (tools are outside of any sections).
- tool_elems_to_remove = []
- for tool_elem in elem_list:
- tool_elem_guid = tool_elem.get( 'guid' )
- for config_elem in config_elems:
- if config_elem.tag == 'tool' and config_elem.get( 'guid' ) == tool_elem_guid:
- tool_elems_to_remove.append( tool_elem )
- for config_elem in config_elems:
- for tool_elem in tool_elems_to_remove:
- try:
- # Remove the tool element from the in-memory list of config_elems.
- config_elem.remove( tool_elem )
- log.debug( "Removed tool with guid '%s'." % str( tool_elem_guid ) )
- except:
- # The tool_elem is not a sub-element of the current config_elem.
- pass
- else:
- # Generate a new entry for the tool config.
- if tool_section:
- for section_elem in elem_list:
- # Load the section into the tool panel.
- app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
- else:
- # Load the tools into the tool panel outside of any sections.
- for tool_elem in elem_list:
- guid = tool_elem.get( 'guid' )
- app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
- if new_install:
- # Append the new entry (either section or list of tools) to the shed_tool_config file,
- # and add the xml element to the in-memory list of config_elems.
- for elem_entry in elem_list:
- config_elems.append( elem_entry )
- shed_tool_conf_dict[ 'config_elems' ] = config_elems
- app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
- if uninstall or not deactivate:
- # Persist the altered in-memory version of the tool config.
- config_elems_to_xml_file( app, shed_tool_conf_dict )
- if app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools.
- app.toolbox_search = ToolBoxSearch( app.toolbox )
+def panel_entry_per_tool( tool_section_dict ):
+ # Return True if tool_section_dict looks like this.
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ # But not like this.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ if len( tool_section_dict ) != 3:
+ return True
+ for k, v in tool_section_dict:
+ if k not in [ 'id', 'version', 'name' ]:
+ return True
+ return False
def pull_repository( current_working_dir, repo_files_dir, name ):
# Pull the latest possible contents to the repository.
log.debug( "Pulling latest updates to the repository named '%s'" % name )
@@ -867,6 +868,60 @@
os.chdir( current_working_dir )
tmp_stderr.close()
return returncode, tmp_name
+def remove_from_tool_panel( app, shed_tool_conf, tool_panel_dict, uninstall=False ):
+ # A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly. We need to change the
+ # in-memory version and the file system version of the shed_tool_conf file. The value of tool_panel_dict is a dictionary of entries
+ # that look like this.
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ index, shed_tool_conf_dict = get_shed_tool_conf_dict( app, shed_tool_conf )
+ config_elems = shed_tool_conf_dict[ 'config_elems' ]
+ tool_elements_removed = 0
+ config_elems_to_remove = []
+ # Create a list of guids for all tools that will be removed from the tool panel config.
+ guids_to_remove = [ k for k in tool_panel_dict.keys() ]
+ for config_elem in config_elems:
+ config_elems_to_remove = []
+ if config_elem.tag == 'section':
+ tool_elems_to_remove = []
+ for tool_elem in config_elem:
+ if tool_elem.get( 'guid' ) in guids_to_remove:
+ tool_elems_to_remove.append( tool_elem )
+ for tool_elem in tool_elems_to_remove:
+ # Remove all of the appropriate tool sub-elements from the section element.
+ config_elem.remove( tool_elem )
+ log.debug( "Removed tool with guid '%s'." % str( tool_elem.get( 'guid' ) ) )
+ tool_elements_removed += 1
+ if len( config_elem ) < 1:
+ # Keep a list of all empty section elements so they can be removed.
+ config_elems_to_remove.append( config_elem )
+ if tool_elements_removed == len( guids_to_remove ):
+ break
+ elif config_elem.tag == 'tool':
+ if config_elem.get( 'guid' ) in guids_to_remove:
+ config_elems_to_remove.append( config_elem )
+ log.debug( "Removed tool with guid '%s'." % str( config_elem.get( 'guid' ) ) )
+ tool_elements_removed += 1
+ if tool_elements_removed == len( guids_to_remove ):
+ break
+ for config_elem in config_elems_to_remove:
+ # Remove the element from the in-menory tool panel.
+ if config_elem.tag == 'section':
+ key = 'section_%s' % str( config_elem.get( "id" ) )
+ del app.toolbox.tool_panel[ key ]
+ elif config_elem.tag == 'tool':
+ key = 'tool_%s' % str( config_elem.get( 'guid' ) )
+ del app.toolbox.tool_panel[ key ]
+ # Remove the element from the in-memory list of elements.
+ config_elems.remove( config_elem )
+ # Update the config_elems of the in-memory shed_tool_conf_dict.
+ shed_tool_conf_dict[ 'config_elems' ] = config_elems
+ app.toolbox.shed_tool_confs[ index ] = shed_tool_conf_dict
+ if uninstall:
+ # Persist the altered in-memory version of the tool config.
+ config_elems_to_xml_file( app, shed_tool_conf_dict )
+ if app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools.
+ app.toolbox_search = ToolBoxSearch( app.toolbox )
def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
# Update the cloned repository to changeset_revision. It is imperative that the
# installed repository is updated to the desired changeset_revision before metadata
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -76,6 +76,95 @@
@web.expose
@web.require_admin
+ def activate_repository( self, trans, **kwd ):
+ """Activate a repository that was deactivated but not uninstalled."""
+ repository = get_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
+ repository.deleted = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ if repository.includes_tools:
+ # Reload tools into the appropriate tool panel section.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ # TODO: Fix this to handle the case where the tools are distributed across in more than 1 section. The
+ # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ tool_section_dict = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ else:
+ # The value of tool_panel_section is the old dictionary type like this, so update to the new dictionary type like that above.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ original_section_id = tool_section_dict[ 'id' ]
+ else:
+ # Tools were loaded outside of any panel sections.
+ original_section_id = ''
+ repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
+ guids_to_activate = [ repository_tool_tup[1] for repository_tool_tup in repository_tools_tups ]
+ # Make sure we have a tool_version for each guid.
+ for guid_to_activate in guids_to_activate:
+ if not get_tool_version( trans.app, guid_to_activate ):
+ # We're somehow missing a tool_version, so create a new one.
+ tool_version = trans.model.ToolVersion( tool_id=guid_to_activate, tool_shed_repository=repository )
+ trans.sa_session.add( tool_version )
+ trans.sa_session.flush()
+ if original_section_id in [ '' ]:
+ # If the repository includes tools, reload them into the tool panel outside of any sections.
+ self.__add_tools_to_tool_panel( trans, repository, repository_tools_tups, tool_section=None, section_key=None )
+ else:
+ original_section_name = tool_section_dict[ 'name' ]
+ original_section_version = tool_section_dict[ 'version' ]
+ section_key = 'section_%s' % str( original_section_id )
+ if section_key in trans.app.toolbox.tool_panel:
+ # Load the repository tools into a section that still exists in the tool panel.
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
+ self.__add_tools_to_tool_panel( trans, repository, repository_tools_tups, tool_section=tool_section, section_key=section_key )
+ else:
+ # Load the repository tools into a section that no longer exists in the tool panel. The section must
+ # still exist in the tool config since the repository was only deactivated and not uninstalled.
+ sections_to_load = []
+ tool_elems_found = 0
+ # Only inspect tool configs that contain installed tool shed repositories.
+ for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
+ config_filename = shed_tool_conf_dict[ 'config_filename' ]
+ log.info( "Parsing the tool configuration %s" % config_filename )
+ tree = util.parse_xml( config_filename )
+ root = tree.getroot()
+ tool_path = root.get( 'tool_path' )
+ if tool_path is not None:
+ # Tool configs that contain tools installed from tool shed repositories must have a tool_path attribute.
+ for elem in root:
+ if elem.tag == 'section' and \
+ elem.get( 'id' ) == original_section_id and \
+ elem.get( 'name' ) == original_section_name and \
+ elem.get( 'version' ) == original_section_version:
+ # We've found the section, but we have to make sure it contains the
+ # correct tool tag set. This is necessary because the shed tool configs
+ # can include multiple sections of the same id, name and version, each
+ # containing one or more tool tag sets.
+ for tool_elem in elem:
+ if tool_elem.get( 'guid' ) in guids_to_activate:
+ tool_elems_found += 1
+ if elem not in sections_to_load:
+ sections_to_load.append( elem )
+ if tool_elems_found == len( guids_to_activate ):
+ break
+ if tool_elems_found == len( guids_to_activate ):
+ break
+ if tool_elems_found == len( guids_to_activate ):
+ break
+ for elem in sections_to_load:
+ trans.app.toolbox.load_section_tag_set( elem, trans.app.toolbox.tool_panel, tool_path )
+ message = 'The <b>%s</b> repository has been activated.' % repository.name
+ status = 'done'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
+ @web.expose
+ @web.require_admin
def browse_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -96,10 +185,14 @@
return self.check_for_updates( trans, **kwd )
if operation == "activate or reinstall":
repository = get_repository( trans, kwd[ 'id' ] )
- if repository.uninstalled and repository.includes_tools:
- # Only allow selecting a different section in the tool panel if the repository was uninstalled.
- return self.reselect_tool_panel_section( trans, **kwd )
- return self.activate_or_reinstall_repository( trans, **kwd )
+ if repository.uninstalled:
+ if repository.includes_tools:
+ # Only allow selecting a different section in the tool panel if the repository was uninstalled.
+ return self.reselect_tool_panel_section( trans, **kwd )
+ else:
+ return self.reinstall_repository( trans, **kwd )
+ else:
+ return self.activate_repository( trans, **kwd )
if operation == "deactivate or uninstall":
return self.deactivate_or_uninstall_repository( trans, **kwd )
if 'message' not in kwd or not kwd[ 'message' ]:
@@ -107,6 +200,13 @@
return self.repository_list_grid( trans, **kwd )
@web.expose
@web.require_admin
+ def browse_tool_shed( self, trans, **kwd ):
+ tool_shed_url = kwd[ 'tool_shed_url' ]
+ galaxy_url = url_for( '/', qualified=True )
+ url = '%s/repository/browse_valid_repositories?galaxy_url=%s&webapp=galaxy' % ( tool_shed_url, galaxy_url )
+ return trans.response.send_redirect( url )
+ @web.expose
+ @web.require_admin
def browse_tool_sheds( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -117,6 +217,88 @@
status='error' )
@web.expose
@web.require_admin
+ def check_for_updates( self, trans, **kwd ):
+ # Send a request to the relevant tool shed to see if there are any updates.
+ repository = get_repository( trans, kwd[ 'id' ] )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
+ return trans.response.send_redirect( url )
+ @web.expose
+ @web.require_admin
+ def deactivate_or_uninstall_repository( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ remove_from_disk = params.get( 'remove_from_disk', '' )
+ remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
+ repository = get_repository( trans, kwd[ 'id' ] )
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
+ if params.get( 'deactivate_or_uninstall_repository_button', False ):
+ metadata = repository.metadata
+ if repository.includes_tools:
+ repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
+ # Generate the list of tool panel keys derived from the tools included in the repository.
+ if repository_tools_tups:
+ repository_tool_panel_keys = [ 'tool_%s' % repository_tools_tup[ 1 ] for repository_tools_tup in repository_tools_tups ]
+ else:
+ repository_tool_panel_keys = []
+ if 'tool_panel_section' in metadata:
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # The tool_panel_section dictionary contains entries that look like this.
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ else:
+ # The tool_panel_section dictionary looks like the following. This is the old definition of the tool_panel_section,
+ # so update it to the current dictionary like that above. All of the repository tools will be installed in the same
+ # section or outside of any sections.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata, tool_section_dict=tool_section_dict )
+ repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
+ else:
+ # The tool_panel_section was introduced late, so set it's value if its missing in the metadata.
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata )
+ repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
+ repository_clone_url = generate_clone_url( trans, repository )
+ # The repository is either being deactivated or uninstalled, so handle tool panel alterations accordingly.
+ # If the repository is being uninstalled, the appropriate tools or tool sections will be removed from the
+ # appropriate shed-related tool config file on disk.
+ remove_from_tool_panel( app=trans.app,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ uninstall=remove_from_disk_checked )
+ if repository.includes_datatypes:
+ # Deactivate proprietary datatypes.
+ load_datatype_items( trans.app, repository, relative_install_dir, deactivate=True )
+ if remove_from_disk_checked:
+ # Remove the repository from disk.
+ try:
+ shutil.rmtree( relative_install_dir )
+ log.debug( "Removed repository installation directory: %s" % str( relative_install_dir ) )
+ except Exception, e:
+ log.debug( "Error removing repository installation directory %s: %s" % ( str( relative_install_dir ), str( e ) ) )
+ repository.uninstalled = True
+ repository.deleted = True
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ if remove_from_disk_checked:
+ message = 'The repository named <b>%s</b> has been uninstalled.' % repository.name
+ else:
+ message = 'The repository named <b>%s</b> has been deactivated.' % repository.name
+ status = 'done'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
+ remove_from_disk_check_box = CheckboxField( 'remove_from_disk', checked=remove_from_disk_checked )
+ return trans.fill_template( '/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako',
+ repository=repository,
+ remove_from_disk_check_box=remove_from_disk_check_box,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def find_tools_in_tool_shed( self, trans, **kwd ):
tool_shed_url = kwd[ 'tool_shed_url' ]
galaxy_url = url_for( '/', qualified=True )
@@ -131,13 +313,6 @@
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
- def browse_tool_shed( self, trans, **kwd ):
- tool_shed_url = kwd[ 'tool_shed_url' ]
- galaxy_url = url_for( '/', qualified=True )
- url = '%s/repository/browse_valid_repositories?galaxy_url=%s&webapp=galaxy' % ( tool_shed_url, galaxy_url )
- return trans.response.send_redirect( url )
- @web.expose
- @web.require_admin
def install_repository( self, trans, **kwd ):
if not trans.app.toolbox.shed_tool_confs:
message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a '
@@ -159,10 +334,9 @@
if includes_tools:
shed_tool_conf = kwd[ 'shed_tool_conf' ]
else:
- # If installing a repository that includes no tools, get the relative
- # tool_path from the file to which the install_tool_config_file config
- # setting points.
- shed_tool_conf = trans.app.config.install_tool_config
+ # If installing a repository that includes no tools, get the relative tool_path from the file
+ # to which the migrated_tools_config setting points.
+ shed_tool_conf = trans.app.config.migrated_tools_config
# Get the tool path by searching the list of shed_tool_confs for the dictionary
# that contains the information about shed_tool_conf.
for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
@@ -208,7 +382,6 @@
relative_install_dir = os.path.join( clone_dir, name )
if os.path.exists( clone_dir ):
# Repository and revision has already been cloned.
- # TODO: implement the ability to re-install or revert an existing repository.
message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name )
else:
returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
@@ -217,7 +390,7 @@
if returncode == 0:
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
repository_name=name,
description=description,
owner=owner,
@@ -227,11 +400,11 @@
relative_install_dir=relative_install_dir,
current_working_dir=current_working_dir,
tmp_name=tmp_name,
+ tool_panel_dict=None,
tool_shed=tool_shed,
tool_section=tool_section,
shed_tool_conf=shed_tool_conf,
- new_install=True,
- dist_to_shed=False )
+ new_install=True )
if 'tools' in metadata_dict:
@@ -299,140 +472,214 @@
status=status )
@web.expose
@web.require_admin
- def set_tool_versions( self, trans, **kwd ):
- # Get the tool_versions from the tool shed for each tool in the installed change set.
+ def manage_repository( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
repository = get_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = from_json_string( text )
- handle_tool_versions( trans.app, tool_version_dicts, repository )
- message = "Tool versions have been set for all included tools."
- status = 'done'
- else:
- message = "Version information for the tools included in the <b>%s</b> repository is missing. " % repository.name
- message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
- message ++ "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
+ description = util.restore_text( params.get( 'description', repository.description ) )
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ if params.get( 'edit_repository_button', False ):
+ if description != repository.description:
+ repository.description = description
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = "The repository information has been updated."
+ elif params.get( 'set_metadata_button', False ):
+ repository_clone_url = generate_clone_url( trans, repository )
+ # In case metadata was previously generated for this repository, we'll check to see if it has information needed for the tool_section_dict.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ # For backward compatibility we have to handle 2 types of dictionaries. In the past, all repository tools had to be installed into
+ # a single ToolSection (or outside of any sections) in the tool panel. In this case. the dictionary looks like this.
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ # Later, each tool from a repository could be installed into the tool panel inside or outside a specified ToolSection in the tool panel.
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_panel_dict=tool_panel_dict )
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
+ else:
+ # Not sure if we'll ever reach here, but just in case...
+ tool_section_dict = dict( id='', version='', name='' )
+ metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
+ if metadata_dict:
+ repository.metadata = metadata_dict
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = "Repository metadata has been reset."
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
- description=repository.description,
+ description=description,
repo_files_dir=repo_files_dir,
message=message,
status=status )
@web.expose
@web.require_admin
- def deactivate_or_uninstall_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- remove_from_disk = params.get( 'remove_from_disk', '' )
- remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
+ def reinstall_repository( self, trans, **kwd ):
repository = get_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
- if params.get( 'deactivate_or_uninstall_repository_button', False ):
- metadata = repository.metadata
- if repository.includes_tools:
- # Deactivate repository tools.
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
- # Generate the list of tool panel keys derived from the tools included in the repository.
- repository_tool_panel_keys = []
- if repository_tools_tups:
- repository_tool_panel_keys = [ 'tool_%s' % repository_tools_tup[ 1 ] for repository_tools_tup in repository_tools_tups ]
- tool_panel_section = metadata[ 'tool_panel_section' ]
- section_id = tool_panel_section[ 'id' ]
- if section_id in [ '' ]:
- # If the repository includes tools, they were loaded into the tool panel outside of any sections.
- tool_section = None
- self.__remove_tools_from_tool_panel( trans, repository_tool_panel_keys )
+ no_changes = kwd.get( 'no_changes', '' )
+ no_changes_checked = CheckboxField.is_checked( no_changes )
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
+ current_working_dir = os.getcwd()
+ repository_clone_url = generate_clone_url( trans, repository )
+ clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, repository.installed_changeset_revision ) )
+ relative_install_dir = os.path.join( clone_dir, repository.name )
+ returncode, tmp_name = clone_repository( repository.name, clone_dir, current_working_dir, repository_clone_url )
+ if returncode == 0:
+ returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, repository.installed_changeset_revision )
+ if returncode == 0:
+ # Get the location in the tool panel in which the tool was originally loaded.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
+ # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ tool_section_dict = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ tool_section = generate_tool_section_element_from_dict( tool_section_dict )
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata, tool_section=tool_section )
+ original_section_id = tool_section_dict[ 'id' ]
+ original_section_name = tool_section_dict[ 'name' ]
else:
- # If the repository includes tools, they were loaded into the tool panel inside a section.
- section_key = 'section_%s' % str( section_id )
- if section_key in trans.app.toolbox.tool_panel:
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata )
+ original_section_id = ''
+ original_section_name = ''
+ if no_changes_checked:
+ if original_section_id in [ '' ]:
+ tool_section = None
+ else:
+ section_key = 'section_%s' % str( original_section_id )
+ if section_key in trans.app.toolbox.tool_panel:
+ tool_section = trans.app.toolbox.tool_panel[ section_key ]
+ else:
+ # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = original_section_name
+ elem.attrib[ 'id' ] = original_section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ # The user elected to change the tool panel section to contain the tools.
+ new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
+ tool_panel_section = kwd.get( 'tool_panel_section', '' )
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ # Update each tool_section dictionary in tool_panel_dict with the new section attributes.
+ for guid, tool_section_dict in tool_panel_dict.items():
+ tool_section_dict[ 'id' ] = section_id
+ tool_section_dict[ 'name' ] = new_tool_panel_section
+ tool_section_dict[ 'version' ] = ''
+ tool_panel_dict[ guid ] = tool_section_dict
+ new_section_key = 'section_%s' % str( section_id )
+ if new_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
+ else:
+ # Appending a new section to trans.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section
+ elem.attrib[ 'id' ] = section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
+ elif tool_panel_section:
+ section_key = 'section_%s' % tool_panel_section
tool_section = trans.app.toolbox.tool_panel[ section_key ]
- self.__remove_tools_from_tool_panel( trans, repository_tool_panel_keys, tool_section=tool_section, section_key=section_key )
else:
- # The tool panel section could not be found, so handle deactivating tools
- # as if they were loaded into the tool panel outside of any sections.
+ # Update each tool_section dictionary in tool_panel_dict in case the tools used to be contained in a panel section
+ # but are now being moved outside of any panel sections.
+ for guid, tool_section_dict in tool_panel_dict.items():
+ tool_section_dict[ 'id' ] = ''
+ tool_section_dict[ 'name' ] = ''
+ tool_section_dict[ 'version' ] = ''
+ tool_panel_dict[ guid ] = tool_section_dict
tool_section = None
- self.__remove_tools_from_tool_panel( trans, repository_tool_panel_keys )
- repository_clone_url = self.__generate_clone_url( trans, repository )
- # The repository is either being deactivated or uninstalled, so handle tool panel alterations accordingly.
- # If the repository is being uninstalled, the appropriate tools or tool sections will be removed from the
- # appropriate tool config file on disk.
- alter_tool_panel( app=trans.app,
- repository_name=repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=repository.installed_changeset_revision,
- repository_tools_tups=repository_tools_tups,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- tool_path=tool_path,
- owner=repository.owner,
- new_install=False,
- deactivate=True,
- uninstall=remove_from_disk_checked )
- if repository.includes_datatypes:
- # Deactivate proprietary datatypes.
- load_datatype_items( trans.app, repository, relative_install_dir, deactivate=True )
- if remove_from_disk_checked:
- # Remove the repository from disk.
- try:
- shutil.rmtree( relative_install_dir )
- log.debug( "Removed repository installation directory: %s" % str( relative_install_dir ) )
- except Exception, e:
- log.debug( "Error removing repository installation directory %s: %s" % ( str( relative_install_dir ), str( e ) ) )
- repository.uninstalled = True
- repository.deleted = True
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- if remove_from_disk_checked:
- message = 'The repository named <b>%s</b> has been uninstalled.' % repository.name
- else:
- message = 'The repository named <b>%s</b> has been deactivated.' % repository.name
- status = 'done'
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- message=message,
- status=status ) )
- remove_from_disk_check_box = CheckboxField( 'remove_from_disk', checked=remove_from_disk_checked )
- return trans.fill_template( '/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako',
- repository=repository,
- remove_from_disk_check_box=remove_from_disk_check_box,
- message=message,
- status=status )
- def __remove_tools_from_tool_panel( self, trans, repository_tool_panel_keys, tool_section=None, section_key=None ):
- # Delete tools loaded into the tool panel by altering the in-memory tool_panel dictionary appropriately. If the
- # tool_section is not received, the tools were loaded into the tool_panel dictionary outside of any sections.
- # Otherwise all tools were loaded into the received tool_section.
- if tool_section:
- # The tool_section.elems dictionary looks something like:
- # {'tool_gvk.bx.psu.edu:9009/repos/test/filter/Filter1/1.0.1': <galaxy.tools.Tool instance at 0x10769ae60>}
- for item_id, item in tool_section.elems.items():
- if item_id in repository_tool_panel_keys:
- del tool_section.elems[ item_id ]
- if not tool_section.elems:
- del trans.app.toolbox.tool_panel[ section_key ]
- else:
- # The tool panel looks something like:
- # {'tool_gvk.bx.psu.edu:9009/repos/test/blast2go/blast2go/0.0.2': <galaxy.tools.Tool instance at 0x1076a5f80>}
- for item_id, item in trans.app.toolbox.tool_panel.items():
- if item_id in repository_tool_panel_keys:
- del trans.app.toolbox.tool_panel[ item_id ]
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_panel_dict=tool_panel_dict,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ new_install=True )
+ repository.uninstalled = False
+ repository.deleted = False
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = 'The <b>%s</b> repository has been reinstalled.' % repository.name
+ status = 'done'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
+ """
+ INSIDE SECTION:
+ "{"tool_panel_section": {"gvk.bx.psu.edu:9009/repos/test/grouping/Grouping1/1.9.0": {"id": "group",
+ "name": "Group",
+ "tool_config": "grouping.xml",
+ "version": ""}},
+ "tools": [{"description": "data by a column and perform aggregate operation on other columns.",
+ "guid": "gvk.bx.psu.edu:9009/repos/test/grouping/Grouping1/1.9.0",
+ "id": "Grouping1",
+ "name": "Group",
+ "requirements": [{"fabfile": null, "method": null, "name": "rpy", "type": "python-module", "version": null}],
+ "tests": [{"inputs": [["input1", "1.bed", {"children": [], "value": "1.bed"}], ["groupcol", "1", {"children": [], "value": "1"}], ["ignorecase", "true", {"children": [], "value": "true"}], ["optype", "mean", {"children": [], "value": "mean"}], ["opcol", "2", {"children": [], "value": "2"}], ["opround", "no", {"children": [], "value": "no"}]], "name": "Test-1", "outputs": [["out_file1", "groupby_out1.dat", {"assert_list": null, "compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["1.bed", {"children": [], "value": "1.bed"}]]}, {"inputs": [["input1", "1.tabular", {"children": [], "value": "1.tabular"}], ["groupcol", "1", {"children": [], "value": "1"}], ["ignorecase", "true", {"children": [], "value": "true"}], ["optype", "mean", {"children": [], "value": "mean"}], ["opcol", "2", {"children": [], "value": "2"}], ["opround", "no", {"children": [], "value": "no"}]], "name": "Test-2", "outputs": [["out_file1", "groupby_out2.dat", {"assert_list": null, "compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["1.tabular", {"children": [], "value": "1.tabular"}]]}],
+ "tool_config": "../shed_tools/gvk.bx.psu.edu/repos/test/grouping/935f00105de8/grouping/grouping.xml",
+ "version": "1.9.0",
+ "version_string_cmd": null}]}"
+ OUTSIDE SECTION:
+ "{"tool_panel_section": {"gvk.bx.psu.edu:9009/repos/test/filter/Filter1/1.0.1": {"id": "",
+ "name": "",
+ "tool_config": "filtering.xml",
+ "version": ""}},
+ "tools": [{"description": "data on any column using simple expressions",
+ "guid": "gvk.bx.psu.edu:9009/repos/test/filter/Filter1/1.0.1",
+ "id": "Filter1",
+ "name": "Filter",
+ "requirements": [],
+ "tests": [{"inputs": [["input", "1.bed", {"children": [], "value": "1.bed"}], ["cond", "c1=='chr22'", {"children": [], "value": "c1=='chr22'"}]], "name": "Test-1", "outputs": [["out_file1", "filter1_test1.bed", {"assert_list": null, "compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["1.bed", {"children": [], "value": "1.bed"}]]}, {"inputs": [["input", "7.bed", {"children": [], "value": "7.bed"}], ["cond", "c1=='chr1' and c3-c2>=2000 and c6=='+'", {"children": [], "value": "c1=='chr1' and c3-c2>=2000 and c6=='+'"}]], "name": "Test-2", "outputs": [["out_file1", "filter1_test2.bed", {"assert_list": null, "compare": "diff", "delta": 10000, "extra_files": [], "lines_diff": 0, "sort": false}]], "required_files": [["7.bed", {"children": [], "value": "7.bed"}]]}],
+ "tool_config": "../shed_tools/gvk.bx.psu.edu/repos/test/filter/b2df18d723c5/filter/filtering.xml",
+ "version": "1.0.1",
+ "version_string_cmd": null}]}"
+ """
repository = get_repository( trans, kwd[ 'id' ] )
# Get the location in the tool panel in which the tool was originally loaded.
metadata = repository.metadata
- tool_panel_section = metadata[ 'tool_panel_section' ]
- original_section_name = tool_panel_section[ 'name' ]
+ if 'tool_panel_section' in metadata:
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ if panel_entry_per_tool( tool_panel_dict ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ # TODO: Fix this to handle the case where the tools are distributed across in more than 1 ToolSection. The
+ # following assumes everything was loaded into 1 section (or no section) in the tool panel.
+ tool_panel_dict = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
+ original_section_name = tool_panel_dict[ 'name' ]
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ original_section_name = tool_panel_dict[ 'name' ]
+ else:
+ original_section_name = ''
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
@@ -453,252 +700,35 @@
status=status )
@web.expose
@web.require_admin
- def activate_or_reinstall_repository( self, trans, **kwd ):
+ def set_tool_versions( self, trans, **kwd ):
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
repository = get_repository( trans, kwd[ 'id' ] )
- no_changes = kwd.get( 'no_changes', '' )
- no_changes_checked = CheckboxField.is_checked( no_changes )
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
- uninstalled = repository.uninstalled
- if uninstalled:
- if repository.dist_to_shed:
- href = '<a href="http://wiki.g2.bx.psu.edu/Tool%20Shed#Migrating_tools_from_the_Galaxy_distr…" '
- href += 'target="_blank">in this section of the Galaxy Tool Shed wiki</a>'
- message = "The <b>%s</b> repository should be reinstalled using the approach described %s. " % ( repository.name, href )
- message += "If <b>enable_tool_shed_install = True</b> and the contents of the file configured for the "
- message += "<b>tool_shed_install_config_file</b> setting in your universe_wsgi.ini file enable installation of the "
- message += "<b>%s</b> repository, then restarting your Galaxy server will reinstall the repository." % repository.name
- new_kwd = {}
- new_kwd[ 'sort' ] = 'name'
- new_kwd[ 'f-deleted' ] = 'True'
- new_kwd[ 'message' ] = message
- new_kwd[ 'status' ] = 'error'
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- **new_kwd ) )
- else:
- current_working_dir = os.getcwd()
- repository_clone_url = self.__generate_clone_url( trans, repository )
- clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, repository.installed_changeset_revision ) )
- relative_install_dir = os.path.join( clone_dir, repository.name )
- returncode, tmp_name = clone_repository( repository.name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, repository.installed_changeset_revision )
- if returncode == 0:
- # Get the location in the tool panel in which the tool was originally loaded.
- metadata = repository.metadata
- tool_panel_section = metadata[ 'tool_panel_section' ]
- original_section_id = tool_panel_section[ 'id' ]
- original_section_name = tool_panel_section[ 'name' ]
- if no_changes_checked:
- if original_section_id in [ '' ]:
- tool_section = None
- else:
- section_key = 'section_%s' % str( original_section_id )
- if section_key in trans.app.toolbox.tool_panel:
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- # The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = original_section_name
- elem.attrib[ 'id' ] = original_section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ section_key ] = tool_section
- else:
- # The user elected to change the tool panel section to contain the tools.
- new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
- tool_panel_section = kwd.get( 'tool_panel_section', '' )
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- new_section_key = 'section_%s' % str( section_id )
- if new_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ new_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ new_section_key ] = tool_section
- elif tool_panel_section:
- section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- else:
- tool_section = None
- tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- new_install=True,
- dist_to_shed=False )
- repository.uninstalled = False
- repository.deleted = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = from_json_string( text )
+ handle_tool_versions( trans.app, tool_version_dicts, repository )
+ message = "Tool versions have been set for all included tools."
+ status = 'done'
else:
- # The repository was deactivated, but not uninstalled.
- repository.deleted = False
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- if repository.includes_tools:
- metadata = repository.metadata
- repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
- guids_to_activate = [ repository_tool_tup[1] for repository_tool_tup in repository_tools_tups ]
- # Make sure we have a tool_version for each guid.
- for guid_to_activate in guids_to_activate:
- if not get_tool_version( trans.app, guid_to_activate ):
- # We're somehow missing a tool_version, so create a new one.
- tool_version = trans.model.ToolVersion( tool_id=guid_to_activate, tool_shed_repository=repository )
- trans.sa_session.add( tool_version )
- trans.sa_session.flush()
- tool_panel_section = metadata[ 'tool_panel_section' ]
- original_section_id = tool_panel_section[ 'id' ]
- if original_section_id in [ '' ]:
- # If the repository includes tools, reload them into the tool panel outside of any sections.
- self.__add_tools_to_tool_panel( trans, repository, repository_tools_tups, tool_section=None, section_key=None )
- else:
- original_section_id = tool_panel_section[ 'id' ]
- original_section_name = tool_panel_section[ 'name' ]
- original_section_version = tool_panel_section[ 'version' ]
- # If the repository includes tools, reload them into the appropriate tool panel section.
- section_key = 'section_%s' % str( original_section_id )
- if section_key in trans.app.toolbox.tool_panel:
- # Load the repository tools into a section that still exists in the tool panel.
- tool_section = trans.app.toolbox.tool_panel[ section_key ]
- self.__add_tools_to_tool_panel( trans, repository, repository_tools_tups, tool_section=tool_section, section_key=section_key )
- else:
- # Load the repository tools into a section that no longer exists in the tool panel. The section must
- # still exist in the tool config since the repository was only deactivated and not uninstalled.
- sections_to_load = []
- tool_elems_found = 0
- # Only inspect tool configs that contain installed tool shed repositories.
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- config_filename = shed_tool_conf_dict[ 'config_filename' ]
- log.info( "Parsing the tool configuration %s" % config_filename )
- tree = util.parse_xml( config_filename )
- root = tree.getroot()
- tool_path = root.get( 'tool_path' )
- if tool_path is not None:
- # Tool configs that contain tools installed from tool shed repositories
- # must have a tool_path attribute.
- for elem in root:
- if elem.tag == 'section' and \
- elem.get( 'id' ) == original_section_id and \
- elem.get( 'name' ) == original_section_name and \
- elem.get( 'version' ) == original_section_version:
- # We've found the section, but we have to make sure it contains the
- # correct tool tag set. This is necessary because the shed tool configs
- # can include multiple sections of the same id, name and version, each
- # containing one or more tool tag sets.
- for tool_elem in elem:
- if tool_elem.get( 'guid' ) in guids_to_activate:
- tool_elems_found += 1
- if elem not in sections_to_load:
- sections_to_load.append( elem )
- if tool_elems_found == len( guids_to_activate ):
- break
- if tool_elems_found == len( guids_to_activate ):
- break
- if tool_elems_found == len( guids_to_activate ):
- break
- for elem in sections_to_load:
- trans.app.toolbox.load_section_tag_set( elem, trans.app.toolbox.tool_panel, tool_path )
- if uninstalled:
- message = 'The <b>%s</b> repository has been reinstalled.' % repository.name
- else:
- message = 'The <b>%s</b> repository has been activated.' % repository.name
- status = 'done'
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- message=message,
- status=status ) )
- def __add_tools_to_tool_panel( self, trans, repository, repository_tools_tups, tool_section=None, section_key=None ):
- # Load tools.
- if tool_section:
- elems = tool_section.elems
- for repository_tools_tup in repository_tools_tups:
- relative_path, guid, tool = repository_tools_tup
- tool.tool_shed = repository.tool_shed
- tool.repository_name = repository.name
- tool.repository_owner = repository.owner
- tool.installed_changeset_revision = repository.installed_changeset_revision
- tool.guid = guid
- # Set the tool's old_id to the id used before the tool shed existed.
- tool.old_id = tool.id
- # Set the tool's id to the tool shed guid.
- tool.id = guid
- if tool_section:
- if tool.id not in elems:
- elems[ 'tool_%s' % tool.id ] = tool
- log.debug( "Reactivated tool id: %s, version: %s" % ( tool.id, tool.version ) )
- else:
- if tool.id not in trans.app.toolbox.tools_by_id:
- # Allow for the same tool to be loaded into multiple places in the tool panel.
- trans.app.toolbox.tools_by_id[ tool.id ] = tool
- trans.app.toolbox.tool_panel[ 'tool_%s' % tool.id ] = tool
- log.debug( "Reactivated tool id: %s, version: %s" % ( tool.id, tool.version ) )
- if tool_section:
- trans.app.toolbox.tool_panel[ section_key ] = tool_section
- log.debug( "Appended reactivated tool to section: %s" % tool_section.name )
- @web.expose
- @web.require_admin
- def manage_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
- description = util.restore_text( params.get( 'description', repository.description ) )
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
+ message = "Version information for the tools included in the <b>%s</b> repository is missing. " % repository.name
+ message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
+ message ++ "from the installed repository's <b>Repository Actions</b> menu. "
+ status = 'error'
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- if params.get( 'edit_repository_button', False ):
- if description != repository.description:
- repository.description = description
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = "The repository information has been updated."
- elif params.get( 'set_metadata_button', False ):
- repository_clone_url = self.__generate_clone_url( trans, repository )
- # In case metadata was previously generated for this repository, we'll
- # check to see if it has information needed for the tool_section_dict.
- metadata = repository.metadata
- if 'tool_panel_section' in metadata:
- tool_section_dict = metadata[ 'tool_panel_section' ]
- metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url, tool_section_dict=tool_section_dict )
- if metadata_dict:
- repository.metadata = metadata_dict
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = "Repository metadata has been reset."
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
- description=description,
+ description=repository.description,
repo_files_dir=repo_files_dir,
message=message,
status=status )
@web.expose
@web.require_admin
- def check_for_updates( self, trans, **kwd ):
- # Send a request to the relevant tool shed to see if there are any updates.
- repository = get_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
- return trans.response.send_redirect( url )
- @web.expose
- @web.require_admin
def update_to_changeset_revision( self, trans, **kwd ):
"""Update a cloned repository to the latest revision possible."""
params = util.Params( kwd )
@@ -710,12 +740,25 @@
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ # Get the location in the tool panel in which the tool was originally loaded.
+ metadata = repository.metadata
+ if 'tool_panel_section' in metadata:
+ if panel_entry_per_tool( metadata[ 'tool_panel_section' ] ):
+ # {<Tool guid> : { tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}}
+ tool_panel_dict = metadata[ 'tool_panel_section' ]
+ else:
+ # { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
+ tool_section_dict = metadata[ 'tool_panel_section' ]
+ tool_section = generate_tool_section_element_from_dict( tool_section_dict )
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata, tool_section=tool_section )
+ else:
+ tool_panel_dict = generate_tool_panel_dict_for_repository_tools( metadata )
if changeset_revision and latest_changeset_revision:
if changeset_revision == latest_changeset_revision:
message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
else:
current_working_dir = os.getcwd()
- shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_panel_config_tool_path_install_dir( trans, repository )
if relative_install_dir:
repo_files_dir = os.path.join( relative_install_dir, name )
returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
@@ -725,7 +768,7 @@
# Update the repository metadata.
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
tool_shed = clean_tool_shed_url( tool_shed_url )
- tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
+ tool_shed_repository, metadata_dict = load_repository_contents( trans,
repository_name=name,
description=repository.description,
owner=owner,
@@ -735,11 +778,11 @@
relative_install_dir=relative_install_dir,
current_working_dir=current_working_dir,
tmp_name=tmp_name,
+ tool_panel_dict=tool_panel_dict,
tool_shed=tool_shed,
tool_section=None,
shed_tool_conf=None,
- new_install=False,
- dist_to_shed=False )
+ new_install=False )
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
repository.update_available = False
@@ -790,20 +833,38 @@
metadata=metadata,
message=message,
status=status )
- def __get_tool_path_and_relative_install_dir( self, trans, repository ):
- # Return both the tool_path configured in the relative shed_tool_conf and
- # the relative path to the directory where the repository is installed.
- tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
- # Get the relative tool installation paths from each of the shed tool configs.
- relative_install_dir = None
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if os.path.isdir( relative_install_dir ):
- break
- return shed_tool_conf, tool_path, relative_install_dir
+ def __add_tools_to_tool_panel( self, trans, repository, repository_tools_tups, tool_section=None, section_key=None ):
+ # Load tools.
+ if tool_section:
+ elems = tool_section.elems
+ for repository_tools_tup in repository_tools_tups:
+ relative_path, guid, tool = repository_tools_tup
+ tool.tool_shed = repository.tool_shed
+ tool.repository_name = repository.name
+ tool.repository_owner = repository.owner
+ tool.installed_changeset_revision = repository.installed_changeset_revision
+ tool.guid = guid
+ # Set the tool's old_id to the id used before the tool shed existed.
+ tool.old_id = tool.id
+ # Set the tool's id to the tool shed guid.
+ tool.id = guid
+ if tool_section:
+ if tool.id not in elems:
+ elems[ 'tool_%s' % tool.id ] = tool
+ log.debug( "Reactivated tool id: %s, version: %s" % ( tool.id, tool.version ) )
+ else:
+ if tool.id not in trans.app.toolbox.tools_by_id:
+ # Allow for the same tool to be loaded into multiple places in the tool panel.
+ trans.app.toolbox.tools_by_id[ tool.id ] = tool
+ trans.app.toolbox.tool_panel[ 'tool_%s' % tool.id ] = tool
+ log.debug( "Reactivated tool id: %s, version: %s" % ( tool.id, tool.version ) )
+ if tool_section:
+ trans.app.toolbox.tool_panel[ section_key ] = tool_section
+ log.debug( "Appended reactivated tool to section: %s" % tool_section.name )
+ def __generate_clone_url( self, trans, repository ):
+ """Generate the URL for cloning a repository."""
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
def __generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
Generate a tool path that guarantees repositories with the same name will always be installed
@@ -818,10 +879,77 @@
repo_path = items[ 1 ]
tool_shed_url = clean_tool_shed_url( tool_shed_url )
return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision )
- def __generate_clone_url( self, trans, repository ):
- """Generate the URL for cloning a repository."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
+ def __get_tool_panel_config_tool_path_install_dir( self, trans, repository ):
+ # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
+ # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
+ tool_shed = clean_tool_shed_url( repository.tool_shed )
+ partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
+ # Get the relative tool installation paths from each of the shed tool configs.
+ relative_install_dir = None
+ for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
+ shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
+ if repository.dist_to_shed:
+ # The repository is owned by devteam and contains tools migrated from the Galaxy distribution to the tool shed, so
+ # the reserved tool panel config is migrated_tools_conf.xml, to which trans.app.config.migrated_tools_config refers.
+ if shed_tool_conf == trans.app.config.migrated_tools_config:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_install_dir = os.path.join( tool_path, partial_install_dir )
+ if tool_path and relative_install_dir:
+ return shed_tool_conf, tool_path, relative_install_dir
+ elif repository.uninstalled:
+ # Since the repository is uninstalled we don't know what tool panel config was originally used to
+ # define the tools in the repository, so we'll just make sure not to use the reserved migrated_tools_conf.xml.
+ if shed_tool_conf != trans.app.config.migrated_tools_config:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_install_dir = os.path.join( tool_path, partial_install_dir )
+ if tool_path and relative_install_dir:
+ return shed_tool_conf, tool_path, relative_install_dir
+ else:
+ if repository.includes_tools:
+ # We'll check config_elems until we find an element that matches one of the tools in the repository's metadata.
+ metadata = repository.metadata
+ tool_dict = metadata[ 'tools' ][ 0 ]
+ config_elems = shed_tool_conf_dict[ 'config_elems' ]
+ if config_elems:
+ tool_path, relative_install_dir = self.__get_tool_path_install_dir( partial_install_dir,
+ shed_tool_conf_dict,
+ tool_dict,
+ config_elems )
+ if tool_path and relative_install_dir:
+ return shed_tool_conf, tool_path, relative_install_dir
+ else:
+ # Parse the tool panel config since we have no in-memory config_elems (not sure if this will ever occur).
+ tool_panel_config = shed_tool_conf_dict[ 'config_filename' ]
+ tree = util.parse_xml( tool_panel_config )
+ root = tree.getroot()
+ tool_path, relative_install_dir = self.__get_tool_path_install_dir( partial_install_dir,
+ shed_tool_conf_dict,
+ tool_dict,
+ root )
+ if tool_path and relative_install_dir:
+ return shed_tool_conf, tool_path, relative_install_dir
+ else:
+ # Nothing will be loaded into the tool panel, so look for the installed repository on disk.
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_install_dir = os.path.join( tool_path, partial_install_dir )
+ if tool_path and relative_install_dir and os.path.isdir( relative_install_dir ):
+ return shed_tool_conf, tool_path, relative_install_dir
+ return None, None, None
+ def __get_tool_path_install_dir( self, partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
+ for elem in config_elems:
+ if elem.tag == 'tool':
+ if elem.get( 'guid' ) == tool_dict[ 'guid' ]:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_install_dir = os.path.join( tool_path, partial_install_dir )
+ return tool_path, relative_install_dir
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ if section_elem.get( 'guid' ) == tool_dict[ 'guid' ]:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_install_dir = os.path.join( tool_path, partial_install_dir )
+ return tool_path, relative_install_dir
+ return None, None
## ---- Utility methods -------------------------------------------------------
@@ -830,11 +958,12 @@
options = []
for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
shed_tool_conf_filename = shed_tool_conf_dict[ 'config_filename' ]
- if shed_tool_conf_filename.startswith( './' ):
- option_label = shed_tool_conf_filename.replace( './', '', 1 )
- else:
- option_label = shed_tool_conf_filename
- options.append( ( option_label, shed_tool_conf_filename ) )
+ if shed_tool_conf_filename != trans.app.config.migrated_tools_config:
+ if shed_tool_conf_filename.startswith( './' ):
+ option_label = shed_tool_conf_filename.replace( './', '', 1 )
+ else:
+ option_label = shed_tool_conf_filename
+ options.append( ( option_label, shed_tool_conf_filename ) )
select_field = SelectField( name='shed_tool_conf' )
for option_tup in options:
select_field.add_option( option_tup[0], option_tup[1] )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -151,8 +151,7 @@
Column( "name", TrimmedString(255) ),
UniqueConstraint( "name" ) )
-# With the tables defined we can define the mappers and setup the
-# relationships between the model objects.
+# With the tables defined we can define the mappers and setup the relationships between the model objects.
assign_mapper( context, User, User.table,
properties=dict( active_repositories=relation( Repository, primaryjoin=( ( Repository.table.c.user_id == User.table.c.id ) & ( not_( Repository.table.c.deleted ) ) ), order_by=( Repository.table.c.name ) ),
galaxy_sessions=relation( GalaxySession, order_by=desc( GalaxySession.table.c.update_time ) ) ) )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 migrated_tools_conf.xml.sample
--- /dev/null
+++ b/migrated_tools_conf.xml.sample
@@ -0,0 +1,3 @@
+<?xml version="1.0"?>
+<toolbox tool_path="../shed_tools">
+</toolbox>
\ No newline at end of file
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 run.sh
--- a/run.sh
+++ b/run.sh
@@ -6,8 +6,10 @@
[ $? -ne 0 ] && exit 1
SAMPLES="
+ community_wsgi.ini.sample
datatypes_conf.xml.sample
external_service_types_conf.xml.sample
+ migrated_tools_conf.xml.sample
reports_wsgi.ini.sample
shed_tool_conf.xml.sample
tool_conf.xml.sample
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 scripts/manage_tools.py
--- /dev/null
+++ b/scripts/manage_tools.py
@@ -0,0 +1,58 @@
+import sys, os.path, logging
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+
+import pkg_resources
+pkg_resources.require( "sqlalchemy-migrate" )
+
+from migrate.versioning.shell import main
+from ConfigParser import SafeConfigParser
+
+log = logging.getLogger( __name__ )
+
+config_file = 'universe_wsgi.ini'
+if '-c' in sys.argv:
+ pos = sys.argv.index( '-c' )
+ sys.argv.pop( pos )
+ config_file = sys.argv.pop( pos )
+if not os.path.exists( config_file ):
+ print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
+ sys.exit( 1 )
+repo = 'lib/galaxy/tool_shed/migrate'
+
+cp = SafeConfigParser()
+cp.read( config_file )
+
+if config_file == 'universe_wsgi.ini.sample' and 'GALAXY_TEST_DBURI' in os.environ:
+ # Running functional tests.
+ db_url = os.environ[ 'GALAXY_TEST_DBURI' ]
+elif cp.has_option( "app:main", "database_connection" ):
+ db_url = cp.get( "app:main", "database_connection" )
+elif cp.has_option( "app:main", "database_file" ):
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
+else:
+ db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
+
+dialect_to_egg = {
+ "sqlite" : "pysqlite>=2",
+ "postgres" : "psycopg2",
+ "mysql" : "MySQL_python"
+}
+dialect = ( db_url.split( ':', 1 ) )[0]
+try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+
+main( repository=repo, url=db_url )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 scripts/migrate_tools/0002_tools.sh
--- /dev/null
+++ b/scripts/migrate_tools/0002_tools.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+cd `dirname $0`/../..
+python ./scripts/migrate_tools/migrate_tools.py 0002_tools.xml $@
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 scripts/migrate_tools/0002_tools.xml
--- /dev/null
+++ b/scripts/migrate_tools/0002_tools.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0"?>
+<toolshed name="toolshed.g2.bx.psu.edu">
+ <repository name="emboss_datatypes" description="Datatypes for Emboss tools" changeset_revision="a89163f31369" />
+ <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5.0.0 tools" changeset_revision="b94ca591877b">
+ <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
+ <tool id="EMBOSS: backtranseq2" version="5.0.0" file="emboss_backtranseq.xml" />
+ <tool id="EMBOSS: banana3" version="5.0.0" file="emboss_banana.xml" />
+ <tool id="EMBOSS: biosed4" version="5.0.0" file="emboss_biosed.xml" />
+ <tool id="EMBOSS: btwisted5" version="5.0.0" file="emboss_btwisted.xml" />
+ <tool id="EMBOSS: cai_custom6" version="5.0.0" file="emboss_cai_custom.xml" />
+ <tool id="EMBOSS: cai6" version="5.0.0" file="emboss_cai.xml" />
+ <tool id="EMBOSS: chaos7" version="5.0.0" file="emboss_chaos.xml" />
+ <tool id="EMBOSS: charge8" version="5.0.0" file="emboss_charge.xml" />
+ <tool id="EMBOSS: checktrans9" version="5.0.0" file="emboss_checktrans.xml" />
+ <tool id="EMBOSS: chips10" version="5.0.0" file="emboss_chips.xml" />
+ <tool id="EMBOSS: cirdna11" version="5.0.0" file="emboss_cirdna.xml" />
+ <tool id="EMBOSS: codcmp12" version="5.0.0" file="emboss_codcmp.xml" />
+ <tool id="EMBOSS: coderet13" version="5.0.0" file="emboss_coderet.xml" />
+ <tool id="EMBOSS: compseq14" version="5.0.0" file="emboss_compseq.xml" />
+ <tool id="EMBOSS: cpgplot15" version="5.0.0" file="emboss_cpgplot.xml" />
+ <tool id="EMBOSS: cpgreport16" version="5.0.0" file="emboss_cpgreport.xml" />
+ <tool id="EMBOSS: cusp17" version="5.0.0" file="emboss_cusp.xml" />
+ <tool id="EMBOSS: cutseq18" version="5.0.0" file="emboss_cutseq.xml" />
+ <tool id="EMBOSS: dan19" version="5.0.0" file="emboss_dan.xml" />
+ <tool id="EMBOSS: degapseq20" version="5.0.0" file="emboss_degapseq.xml" />
+ <tool id="EMBOSS: descseq21" version="5.0.0" file="emboss_descseq.xml" />
+ <tool id="EMBOSS: diffseq22" version="5.0.0" file="emboss_diffseq.xml" />
+ <tool id="EMBOSS: digest23" version="5.0.0" file="emboss_digest.xml" />
+ <tool id="EMBOSS: dotmatcher24" version="5.0.0" file="emboss_dotmatcher.xml" />
+ <tool id="EMBOSS: dotpath25" version="5.0.0" file="emboss_dotpath.xml" />
+ <tool id="EMBOSS: dottup26" version="5.0.0" file="emboss_dottup.xml" />
+ <tool id="EMBOSS: dreg27" version="5.0.0" file="emboss_dreg.xml" />
+ <tool id="EMBOSS: einverted28" version="5.0.0" file="emboss_einverted.xml" />
+ <tool id="EMBOSS: epestfind29" version="5.0.0" file="emboss_epestfind.xml" />
+ <tool id="EMBOSS: equicktandem31" version="5.0.0" file="emboss_equicktandem.xml" />
+ <tool id="EMBOSS: est2genome32" version="5.0.0" file="emboss_est2genome.xml" />
+ <tool id="EMBOSS: etandem33" version="5.0.0" file="emboss_etandem.xml" />
+ <tool id="EMBOSS: extractfeat34" version="5.0.0" file="emboss_extractfeat.xml" />
+ <tool id="EMBOSS: extractseq35" version="5.0.0" file="emboss_extractseq.xml" />
+ <tool id="EMBOSS: freak36" version="5.0.0" file="emboss_freak.xml" />
+ <tool id="EMBOSS: fuzznuc37" version="5.0.0" file="emboss_fuzznuc.xml" />
+ <tool id="EMBOSS: fuzzpro38" version="5.0.0" file="emboss_fuzzpro.xml" />
+ <tool id="EMBOSS: fuzztran39" version="5.0.0" file="emboss_fuzztran.xml" />
+ <tool id="EMBOSS: garnier40" version="5.0.0" file="emboss_garnier.xml" />
+ <tool id="EMBOSS: geecee41" version="5.0.0" file="emboss_geecee.xml" />
+ <tool id="EMBOSS: getorf42" version="5.0.0" file="emboss_getorf.xml" />
+ <tool id="EMBOSS: helixturnhelix43" version="5.0.0" file="emboss_helixturnhelix.xml" />
+ <tool id="EMBOSS: hmoment44" version="5.0.0" file="emboss_hmoment.xml" />
+ <tool id="EMBOSS: iep45" version="5.0.0" file="emboss_iep.xml" />
+ <tool id="EMBOSS: infoseq46" version="5.0.0" file="emboss_infoseq.xml" />
+ <tool id="EMBOSS: isochore47" version="5.0.0" file="emboss_isochore.xml" />
+ <tool id="EMBOSS: lindna48" version="5.0.0" file="emboss_lindna.xml" />
+ <tool id="EMBOSS: marscan49" version="5.0.0" file="emboss_marscan.xml" />
+ <tool id="EMBOSS: maskfeat50" version="5.0.0" file="emboss_maskfeat.xml" />
+ <tool id="EMBOSS: maskseq51" version="5.0.0" file="emboss_maskseq.xml" />
+ <tool id="EMBOSS: matcher52" version="5.0.0" file="emboss_matcher.xml" />
+ <tool id="EMBOSS: megamerger53" version="5.0.0" file="emboss_megamerger.xml" />
+ <tool id="EMBOSS: merger54" version="5.0.0" file="emboss_merger.xml" />
+ <tool id="EMBOSS: msbar55" version="5.0.0" file="emboss_msbar.xml" />
+ <tool id="EMBOSS: needle56" version="5.0.0" file="emboss_needle.xml" />
+ <tool id="EMBOSS: newcpgreport57" version="5.0.0" file="emboss_newcpgreport.xml" />
+ <tool id="EMBOSS: newcpgseek58" version="5.0.0" file="emboss_newcpgseek.xml" />
+ <tool id="EMBOSS: newseq59" version="5.0.0" file="emboss_newseq.xml" />
+ <tool id="EMBOSS: noreturn60" version="5.0.0" file="emboss_noreturn.xml" />
+ <tool id="EMBOSS: notseq61" version="5.0.0" file="emboss_notseq.xml" />
+ <tool id="EMBOSS: nthseq62" version="5.0.0" file="emboss_nthseq.xml" />
+ <tool id="EMBOSS: octanol63" version="5.0.0" file="emboss_octanol.xml" />
+ <tool id="EMBOSS: oddcomp64" version="5.0.0" file="emboss_oddcomp.xml" />
+ <tool id="EMBOSS: palindrome65" version="5.0.0" file="emboss_palindrome.xml" />
+ <tool id="EMBOSS: pasteseq66" version="5.0.0" file="emboss_pasteseq.xml" />
+ <tool id="EMBOSS: patmatdb67" version="5.0.0" file="emboss_patmatdb.xml" />
+ <tool id="EMBOSS: pepcoil68" version="5.0.0" file="emboss_pepcoil.xml" />
+ <tool id="EMBOSS: pepinfo69" version="5.0.0" file="emboss_pepinfo.xml" />
+ <tool id="EMBOSS: pepnet70" version="5.0.0" file="emboss_pepnet.xml" />
+ <tool id="EMBOSS: pepstats71" version="5.0.0" file="emboss_pepstats.xml" />
+ <tool id="EMBOSS: pepwheel72" version="5.0.0" file="emboss_pepwheel.xml" />
+ <tool id="EMBOSS: pepwindow73" version="5.0.0" file="emboss_pepwindow.xml" />
+ <tool id="EMBOSS: pepwindowall74" version="5.0.0" file="emboss_pepwindowall.xml" />
+ <tool id="EMBOSS: plotcon75" version="5.0.0" file="emboss_plotcon.xml" />
+ <tool id="EMBOSS: plotorf76" version="5.0.0" file="emboss_plotorf.xml" />
+ <tool id="EMBOSS: polydot77" version="5.0.0" file="emboss_polydot.xml" />
+ <tool id="EMBOSS: preg78" version="5.0.0" file="emboss_preg.xml" />
+ <tool id="EMBOSS: prettyplot79" version="5.0.0" file="emboss_prettyplot.xml" />
+ <tool id="EMBOSS: prettyseq80" version="5.0.0" file="emboss_prettyseq.xml" />
+ <tool id="EMBOSS: primersearch81" version="5.0.0" file="emboss_primersearch.xml" />
+ <tool id="EMBOSS: revseq82" version="5.0.0" file="emboss_revseq.xml" />
+ <tool id="EMBOSS: seqmatchall83" version="5.0.0" file="emboss_seqmatchall.xml" />
+ <tool id="EMBOSS: seqret84" version="5.0.0" file="emboss_seqret.xml" />
+ <tool id="EMBOSS: showfeat85" version="5.0.0" file="emboss_showfeat.xml" />
+ <tool id="EMBOSS: shuffleseq87" version="5.0.0" file="emboss_shuffleseq.xml" />
+ <tool id="EMBOSS: sigcleave88" version="5.0.0" file="emboss_sigcleave.xml" />
+ <tool id="EMBOSS: sirna89" version="5.0.0" file="emboss_sirna.xml" />
+ <tool id="EMBOSS: sixpack90" version="5.0.0" file="emboss_sixpack.xml" />
+ <tool id="EMBOSS: skipseq91" version="5.0.0" file="emboss_skipseq.xml" />
+ <tool id="EMBOSS: splitter92" version="5.0.0" file="emboss_splitter.xml" />
+ <tool id="EMBOSS: supermatcher95" version="5.0.0" file="emboss_supermatcher.xml" />
+ <tool id="EMBOSS: syco96" version="5.0.0" file="emboss_syco.xml" />
+ <tool id="EMBOSS: tcode97" version="5.0.0" file="emboss_tcode.xml" />
+ <tool id="EMBOSS: textsearch98" version="5.0.0" file="emboss_textsearch.xml" />
+ <tool id="EMBOSS: tmap99" version="5.0.0" file="emboss_tmap.xml" />
+ <tool id="EMBOSS: tranalign100" version="5.0.0" file="emboss_tranalign.xml" />
+ <tool id="EMBOSS: transeq101" version="5.0.0" file="emboss_transeq.xml" />
+ <tool id="EMBOSS: trimest102" version="5.0.0" file="emboss_trimest.xml" />
+ <tool id="EMBOSS: trimseq103" version="5.0.0" file="emboss_trimseq.xml" />
+ <tool id="EMBOSS: twofeat104" version="5.0.0" file="emboss_twofeat.xml" />
+ <tool id="EMBOSS: union105" version="5.0.0" file="emboss_union.xml" />
+ <tool id="EMBOSS: vectorstrip106" version="5.0.0" file="emboss_vectorstrip.xml" />
+ <tool id="EMBOSS: water107" version="5.0.0" file="emboss_water.xml" />
+ <tool id="EMBOSS: wobble108" version="5.0.0" file="emboss_wobble.xml" />
+ <tool id="EMBOSS: wordcount109" version="5.0.0" file="emboss_wordcount.xml" />
+ <tool id="EMBOSS: wordmatch110" version="5.0.0" file="emboss_wordmatch.xml" />
+ </repository>
+</toolshed>
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 scripts/migrate_tools/migrate_tools.py
--- /dev/null
+++ b/scripts/migrate_tools/migrate_tools.py
@@ -0,0 +1,26 @@
+
+"""
+This script will start up its own web application which includes an InstallManager (~/lib/galaxy/tool_shed/install_manager.py).
+For each tool discovered missing, the tool shed repository that contains it will be installed on disk and a new entry will be
+created for it in the migrated_tools_conf.xml file. These entries will be made so that the tool panel will be displayed the same
+as it was before the tools were eliminated from the Galaxy distribution. The InstallManager will properly handle entries in
+migrated_tools_conf.xml for tools outside tool panel sections as well as tools inside tool panel sections, depending upon the
+layout of the local tool_conf.xml file. Entries will not be created in migrated_tools_conf.xml for tools included in the tool
+shed repository but not defined in tool_conf.xml.
+"""
+import sys, os
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+# Remove scripts/ from the path.
+new_path.extend( sys.path[1:] )
+sys.path = new_path
+
+from galaxy import eggs
+from galaxy.tool_shed.migrate.common import *
+
+app = MigrateToolsApplication( sys.argv[ 1 ] )
+non_shed_tool_conf = app.install_manager.proprietary_tool_conf
+print "\nThe installation process is finished. You should now remove entries for the installed tools from"
+print "your file named %s and start your Galaxy server." % non_shed_tool_conf
+app.shutdown()
+sys.exit( 0 )
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -8,7 +8,7 @@
<div class="toolForm"><div class="toolFormTitle">Choose the tool panel section to contain the installed tools (optional)</div><div class="toolFormBody">
- <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='activate_or_reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" >
+ <form name="reselect_tool_panel_section" id="reselect_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='reinstall_repository', id=trans.security.encode_id( repository.id ) )}" method="post" ><div class="form-row">
${no_changes_check_box.get_html()}
<label style="display: inline;">No changes</label>
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 tool_shed_install.xml.sample
--- a/tool_shed_install.xml.sample
+++ /dev/null
@@ -1,116 +0,0 @@
-<?xml version="1.0"?>
-<toolshed name="toolshed.g2.bx.psu.edu">
- <!-- The following repository includes no tools, so nothing will be loaded into the tool panel. -->
- <repository name="emboss_datatypes" description="Datatypes for Emboss tools" changeset_revision="a89163f31369" />
- <section name="EMBOSS" id="EMBOSSLite">
- <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5.0.0 tools" changeset_revision="b94ca591877b">
- <tool id="EMBOSS: antigenic1" version="5.0.0" />
- <tool id="EMBOSS: backtranseq2" version="5.0.0" />
- <tool id="EMBOSS: banana3" version="5.0.0" />
- <tool id="EMBOSS: biosed4" version="5.0.0" />
- <tool id="EMBOSS: btwisted5" version="5.0.0" />
- <tool id="EMBOSS: cai_custom6" version="5.0.0" />
- <tool id="EMBOSS: cai6" version="5.0.0" />
- <tool id="EMBOSS: chaos7" version="5.0.0" />
- <tool id="EMBOSS: charge8" version="5.0.0" />
- <tool id="EMBOSS: checktrans9" version="5.0.0" />
- <tool id="EMBOSS: chips10" version="5.0.0" />
- <tool id="EMBOSS: cirdna11" version="5.0.0" />
- <tool id="EMBOSS: codcmp12" version="5.0.0" />
- <tool id="EMBOSS: coderet13" version="5.0.0" />
- <tool id="EMBOSS: compseq14" version="5.0.0" />
- <tool id="EMBOSS: cpgplot15" version="5.0.0" />
- <tool id="EMBOSS: cpgreport16" version="5.0.0" />
- <tool id="EMBOSS: cusp17" version="5.0.0" />
- <tool id="EMBOSS: cutseq18" version="5.0.0" />
- <tool id="EMBOSS: dan19" version="5.0.0" />
- <tool id="EMBOSS: degapseq20" version="5.0.0" />
- <tool id="EMBOSS: descseq21" version="5.0.0" />
- <tool id="EMBOSS: diffseq22" version="5.0.0" />
- <tool id="EMBOSS: digest23" version="5.0.0" />
- <tool id="EMBOSS: dotmatcher24" version="5.0.0" />
- <tool id="EMBOSS: dotpath25" version="5.0.0" />
- <tool id="EMBOSS: dottup26" version="5.0.0" />
- <tool id="EMBOSS: dreg27" version="5.0.0" />
- <tool id="EMBOSS: einverted28" version="5.0.0" />
- <tool id="EMBOSS: epestfind29" version="5.0.0" />
- <tool id="EMBOSS: equicktandem31" version="5.0.0" />
- <tool id="EMBOSS: est2genome32" version="5.0.0" />
- <tool id="EMBOSS: etandem33" version="5.0.0" />
- <tool id="EMBOSS: extractfeat34" version="5.0.0" />
- <tool id="EMBOSS: extractseq35" version="5.0.0" />
- <tool id="EMBOSS: freak36" version="5.0.0" />
- <tool id="EMBOSS: fuzznuc37" version="5.0.0" />
- <tool id="EMBOSS: fuzzpro38" version="5.0.0" />
- <tool id="EMBOSS: fuzztran39" version="5.0.0" />
- <tool id="EMBOSS: garnier40" version="5.0.0" />
- <tool id="EMBOSS: geecee41" version="5.0.0" />
- <tool id="EMBOSS: getorf42" version="5.0.0" />
- <tool id="EMBOSS: helixturnhelix43" version="5.0.0" />
- <tool id="EMBOSS: hmoment44" version="5.0.0" />
- <tool id="EMBOSS: iep45" version="5.0.0" />
- <tool id="EMBOSS: infoseq46" version="5.0.0" />
- <tool id="EMBOSS: isochore47" version="5.0.0" />
- <tool id="EMBOSS: lindna48" version="5.0.0" />
- <tool id="EMBOSS: marscan49" version="5.0.0" />
- <tool id="EMBOSS: maskfeat50" version="5.0.0" />
- <tool id="EMBOSS: maskseq51" version="5.0.0" />
- <tool id="EMBOSS: matcher52" version="5.0.0" />
- <tool id="EMBOSS: megamerger53" version="5.0.0" />
- <tool id="EMBOSS: merger54" version="5.0.0" />
- <tool id="EMBOSS: msbar55" version="5.0.0" />
- <tool id="EMBOSS: needle56" version="5.0.0" />
- <tool id="EMBOSS: newcpgreport57" version="5.0.0" />
- <tool id="EMBOSS: newcpgseek58" version="5.0.0" />
- <tool id="EMBOSS: newseq59" version="5.0.0" />
- <tool id="EMBOSS: noreturn60" version="5.0.0" />
- <tool id="EMBOSS: notseq61" version="5.0.0" />
- <tool id="EMBOSS: nthseq62" version="5.0.0" />
- <tool id="EMBOSS: octanol63" version="5.0.0" />
- <tool id="EMBOSS: oddcomp64" version="5.0.0" />
- <tool id="EMBOSS: palindrome65" version="5.0.0" />
- <tool id="EMBOSS: pasteseq66" version="5.0.0" />
- <tool id="EMBOSS: patmatdb67" version="5.0.0" />
- <tool id="EMBOSS: pepcoil68" version="5.0.0" />
- <tool id="EMBOSS: pepinfo69" version="5.0.0" />
- <tool id="EMBOSS: pepnet70" version="5.0.0" />
- <tool id="EMBOSS: pepstats71" version="5.0.0" />
- <tool id="EMBOSS: pepwheel72" version="5.0.0" />
- <tool id="EMBOSS: pepwindow73" version="5.0.0" />
- <tool id="EMBOSS: pepwindowall74" version="5.0.0" />
- <tool id="EMBOSS: plotcon75" version="5.0.0" />
- <tool id="EMBOSS: plotorf76" version="5.0.0" />
- <tool id="EMBOSS: polydot77" version="5.0.0" />
- <tool id="EMBOSS: preg78" version="5.0.0" />
- <tool id="EMBOSS: prettyplot79" version="5.0.0" />
- <tool id="EMBOSS: prettyseq80" version="5.0.0" />
- <tool id="EMBOSS: primersearch81" version="5.0.0" />
- <tool id="EMBOSS: revseq82" version="5.0.0" />
- <tool id="EMBOSS: seqmatchall83" version="5.0.0" />
- <tool id="EMBOSS: seqret84" version="5.0.0" />
- <tool id="EMBOSS: showfeat85" version="5.0.0" />
- <tool id="EMBOSS: shuffleseq87" version="5.0.0" />
- <tool id="EMBOSS: sigcleave88" version="5.0.0" />
- <tool id="EMBOSS: sirna89" version="5.0.0" />
- <tool id="EMBOSS: sixpack90" version="5.0.0" />
- <tool id="EMBOSS: skipseq91" version="5.0.0" />
- <tool id="EMBOSS: splitter92" version="5.0.0" />
- <tool id="EMBOSS: supermatcher95" version="5.0.0" />
- <tool id="EMBOSS: syco96" version="5.0.0" />
- <tool id="EMBOSS: tcode97" version="5.0.0" />
- <tool id="EMBOSS: textsearch98" version="5.0.0" />
- <tool id="EMBOSS: tmap99" version="5.0.0" />
- <tool id="EMBOSS: tranalign100" version="5.0.0" />
- <tool id="EMBOSS: transeq101" version="5.0.0" />
- <tool id="EMBOSS: trimest102" version="5.0.0" />
- <tool id="EMBOSS: trimseq103" version="5.0.0" />
- <tool id="EMBOSS: twofeat104" version="5.0.0" />
- <tool id="EMBOSS: union105" version="5.0.0" />
- <tool id="EMBOSS: vectorstrip106" version="5.0.0" />
- <tool id="EMBOSS: water107" version="5.0.0" />
- <tool id="EMBOSS: wobble108" version="5.0.0" />
- <tool id="EMBOSS: wordcount109" version="5.0.0" />
- <tool id="EMBOSS: wordmatch110" version="5.0.0" />
- </repository>
- </section>
-</toolshed>
diff -r 1ae7433d67d9a954a8f5043962b72becf4f55473 -r 4d692c86cf6ee977dabb1f0034b663c11d55e721 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -136,22 +136,6 @@
# http://wiki.g2.bx.psu.edu/Admin/Config/Tool%20Dependencies
#tool_dependency_dir = None
-# Enable automatic installation of tools that used to be in the Galaxy
-# distribution but are now in the main Galaxy tool shed. The tools
-# that will be installed are configured in the config file named
-# tool_shed_install.xml, which is located in the Galaxy install directory.
-# Tools already installed will not be re-installed even if they are
-# referenced in the tool_shed_install.xml file.
-#enable_tool_shed_install = False
-#tool_shed_install_config_file = tool_shed_install.xml
-
-# CRITICAL NOTE: the location in which the tools will be installed is the
-# location pointed to by the "tool_path" attribute in the following file.
-# The default location setting in shed_tool_conf.xml ("../shed_tools") may
-# be problematic for some cluster environments, so make sure to change it
-# if appropriate or use a different file name for the setting.
-#install_tool_config_file = shed_tool_conf.xml
-
# Enable automatic polling of relative tool sheds to see if any updates
# are available for installed repositories. Ideally only one Galaxy
# server process should be able to check for repository updates. The
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Workflows: Additional rename options available to Rename Datasets action -- from Dave Walton.
by Bitbucket 28 Feb '12
by Bitbucket 28 Feb '12
28 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1ae7433d67d9/
changeset: 1ae7433d67d9
user: dannon
date: 2012-02-28 17:00:16
summary: Workflows: Additional rename options available to Rename Datasets action -- from Dave Walton.
Syntax:
#{inputs_file_variable | option 1 | option n}
where:
input_file_variable = is the name of a module input variable
| = the delimiter for added options. Optional if no options.
options = basename, upper, lower
basename = keep all of the file name except the extension
(everything before the final ".")
upper = force the file name to upper case
lower = force the file name to lower case
Example:
#{input1 | basename | upper}
affected #: 1 file
diff -r c2cb11dd86d8b6fd8799a3b0fa6d685eae0c5e98 -r 1ae7433d67d9a954a8f5043962b72becf4f55473 lib/galaxy/jobs/actions/post.py
--- a/lib/galaxy/jobs/actions/post.py
+++ b/lib/galaxy/jobs/actions/post.py
@@ -126,10 +126,73 @@
verbose_name = "Rename Dataset"
@classmethod
- def execute(cls, app, sa_session, action, job, replacement_dict):
+ def execute(cls, app, sa_session, action, job, replacement_dict):
# Prevent renaming a dataset to the empty string.
if action.action_arguments and action.action_arguments.has_key('newname') and action.action_arguments['newname'] != '':
new_name = action.action_arguments['newname']
+
+ # TODO: Unify and simplify replacement options.
+ # Add interface through workflow editor UI
+
+ # The following if statement will process a request to rename
+ # using an input file name.
+ # TODO: Replace all matching code with regex
+ # Proper syntax is #{input_file_variable | option 1 | option n}
+ # where
+ # input_file_variable = is the name of an module input variable
+ # | = the delimiter for added options. Optional if no options.
+ # options = basename, upper, lower
+ # basename = keep all of the file name except the extension
+ # (everything before the final ".")
+ # upper = force the file name to upper case
+ # lower = force the file name to lower case
+ # suggested additions:
+ # "replace" option so you can replace a portion of the name,
+ # support multiple #{name} in one rename action...
+
+ if new_name.find("#{") > -1:
+ to_be_replaced = ""
+ # This assumes a single instance of #{variable} will exist
+ start_pos = new_name.find("#{") + 2
+ end_pos = new_name.find("}")
+ to_be_replaced = new_name[start_pos:end_pos]
+ input_file_var = to_be_replaced
+ # Pull out the piped controls and store them for later
+ # parsing.
+ tokens = to_be_replaced.split("|")
+ operations = []
+ if len(tokens) > 1:
+ input_file_var = tokens[0].strip()
+ for i in range(1, len(tokens)):
+ operations.append(tokens[i].strip())
+
+ replacement = ""
+ # Lookp through inputs find one with "to_be_replaced" input
+ # variable name, and get the replacement name
+ for input_assoc in job.input_datasets:
+ if input_assoc.name == input_file_var:
+ replacement = input_assoc.dataset.name
+
+ # Do operations on replacement
+ # Any control that is not defined will be ignored.
+ # This should be moved out to a class or module function
+ for operation in operations:
+ # Basename returns everything prior to the final '.'
+ if operation == "basename":
+ fields = replacement.split(".")
+ replacement = fields[0]
+ if len(fields) > 1:
+ temp = ""
+ for i in range(1, len(fields) - 1):
+ temp += "." + fields[i]
+ replacement += temp
+ elif operation == "upper":
+ replacement = replacement.upper()
+ elif operation == "lower":
+ replacement = replacement.lower()
+
+ new_name = new_name.replace("#{%s}" % to_be_replaced, replacement)
+
if replacement_dict:
for k, v in replacement_dict.iteritems():
new_name = new_name.replace("${%s}" % k, v)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Trackster bug fixes: (a) prevent dragging tracks above reference track and (b) fix tile's low location when showing tile.
by Bitbucket 27 Feb '12
by Bitbucket 27 Feb '12
27 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c2cb11dd86d8/
changeset: c2cb11dd86d8
user: jgoecks
date: 2012-02-27 19:47:41
summary: Trackster bug fixes: (a) prevent dragging tracks above reference track and (b) fix tile's low location when showing tile.
affected #: 1 file
diff -r 167fddae9646f761a87c99a15984f52a605cc498 -r c2cb11dd86d8b6fd8799a3b0fa6d685eae0c5e98 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -328,11 +328,16 @@
// Handle sibling movement, aka sorting.
// Determine new position
+ var child;
for ( i = 0; i < children.length; i++ ) {
- if ( d.offsetY < $(children.get(i)).position().top ) {
+ child = $(children.get(i));
+ if ( d.offsetY < child.position().top &&
+ // Cannot move tracks above reference track or intro div.
+ !(child.hasClass("reference-track") || child.hasClass("intro")) ) {
break;
}
}
+
// If not already in the right place, move. Need
// to handle the end specially since we don't have
// insert at index
@@ -2893,9 +2898,9 @@
var Tile = function(track, index, resolution, canvas, data) {
this.track = track;
this.index = index;
- // FIXME: find better way to calculate low, high.
- this.low = index * TILE_SIZE * resolution
- this.high = (index + 1) * TILE_SIZE * resolution;
+ var tile_bounds = this.track._get_tile_bounds(index, resolution);
+ this.low = tile_bounds[0];
+ this.high = tile_bounds[1];
this.resolution = resolution;
// Wrap element in div for background.
this.html_elt = $("<div class='track-tile'/>").append(canvas);
@@ -4153,7 +4158,7 @@
* Draw ReferenceTrack tile.
*/
draw_tile: function(seq, ctx, mode, resolution, tile_index, w_scale) {
- var track = this;
+ var track = this;
if (w_scale > this.view.canvas_manager.char_width_px) {
if (seq.data === null) {
@@ -4165,7 +4170,7 @@
ctx.textAlign = "center";
seq = seq.data;
for (var c = 0, str_len = seq.length; c < str_len; c++) {
- var c_start = Math.round(c * w_scale);
+ var c_start = Math.floor(c * w_scale);
ctx.fillText(seq[c], c_start, 10);
}
return new Tile(track, tile_index, resolution, canvas, seq);
@@ -4331,7 +4336,7 @@
painter = new painters.LinePainter(result.data, tile_low, tile_high, this.prefs, mode);
painter.draw(ctx, canvas.width, canvas.height, w_scale);
- return new Tile(this.track, tile_index, resolution, canvas, result.data);
+ return new Tile(this, tile_index, resolution, canvas, result.data);
},
/**
* LineTrack data cannot currently be subsetted.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0