1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e276278db3b0/
changeset: e276278db3b0
user: jgoecks
date: 2012-06-04 18:26:52
summary: Place all JavaScript visualization code under static/scripts/viz directory.
affected #: 14 files
Diff too large to display.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4b799028749a/
changeset: 4b799028749a
user: jgoecks
date: 2012-06-04 15:29:51
summary: Clarify inputs for forward, reverse reads in Tophat2 wrapper.
affected #: 1 file
diff -r 82f11c6b5da6339115dd4499ad317eb341835fa8 -r 4b799028749a3e8c0138a8a507c79f78e6edd349 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -124,8 +124,8 @@
<param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33"/></when><when value="paired">
- <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
- <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
+ <param format="fastqsanger" name="input1" type="data" label="RNA-Seq FASTQ file, forward reads" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" />
+ <param format="fastqsanger" name="input2" type="data" label="RNA-Seq FASTQ file, reverse reads" help="Nucleotide-space: Must have Sanger-scaled quality values with ASCII offset 33" /><param name="mate_inner_distance" type="integer" value="300" label="Mean Inner Distance between Mate Pairs" /><param name="mate_std_dev" type="integer" value="20" label="Std. Dev for Distance between Mate Pairs" help="The standard deviation for the distribution on inner distances between mate pairs."/><!-- Discordant pairs. -->
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/82f11c6b5da6/
changeset: 82f11c6b5da6
user: jgoecks
date: 2012-06-04 15:07:19
summary: Add entry for Tophat2 indices to tool data table.
affected #: 1 file
diff -r 44cc39cf2bd41154aa1573e253b87f77ec97ae99 -r 82f11c6b5da6339115dd4499ad317eb341835fa8 tool_data_table_conf.xml.sample
--- a/tool_data_table_conf.xml.sample
+++ b/tool_data_table_conf.xml.sample
@@ -105,6 +105,11 @@
<columns>value, dbkey, name, path</columns><file path="tool-data/bowtie_indices_color.loc" /></table>
+ <!-- Locations of indexes in the Bowtie2 mapper format for TopHat2 to use -->
+ <table name="tophat2_indexes" comment_char="#">
+ <columns>value, dbkey, name, path</columns>
+ <file path="tool-data/bowtie2_indices.loc" />
+ </table><!-- Locations of configurations in the CCAT peak/region caller format --><table name="ccat_configurations" comment_char="#"><columns>value, name, path</columns>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/97338d705f3a/
changeset: 97338d705f3a
user: jgoecks
date: 2012-06-04 05:07:54
summary: Fix import to be compatible with previous changeset.
affected #: 1 file
diff -r 4051e35d4cfd4f0f692b392553d9d99c9f388724 -r 97338d705f3acb4baa4e2f4d460c74fd8dde5761 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -15,7 +15,7 @@
from galaxy.datatypes.interval import Gff, Bed
from galaxy.model import NoConverterException, ConverterDependencyException
from galaxy.visualization.tracks.data_providers import *
-from galaxy.visualization.tracks.genomes import decode_dbkey, Genomes
+from galaxy.visualization.genomes import decode_dbkey, Genomes
from galaxy.visualization.tracks.visual_analytics import get_tool_def, get_dataset_job
# Message strings returned to browser
https://bitbucket.org/galaxy/galaxy-central/changeset/44cc39cf2bd4/
changeset: 44cc39cf2bd4
user: jgoecks
date: 2012-06-04 05:09:08
summary: Merge
affected #: 1 file
diff -r 97338d705f3acb4baa4e2f4d460c74fd8dde5761 -r 44cc39cf2bd41154aa1573e253b87f77ec97ae99 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -32,47 +32,12 @@
self.key = key
self.len_file = len_file
self.twobit_file = twobit_file
-
-
-class Genomes( object ):
- """
- Provides information about available genome data and methods for manipulating that data.
- """
-
- def __init__( self, app ):
- # Create list of known genomes from len files.
- self.genomes = {}
- len_files = glob.glob( os.path.join( app.config.len_file_path, "*.len" ) )
- for f in len_files:
- key = os.path.split( f )[1].split( ".len" )[0]
- self.genomes[ key ] = Genome( key, len_file=f )
-
- # Add genome data (twobit files) to genomes.
- for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
- if line.startswith("#"): continue
- val = line.split()
- if len( val ) == 2:
- key, path = val
- if key in self.genomes:
- self.genomes[ key ].twobit_file = path
-
- def get_dbkeys_with_chrom_info( self, trans ):
- """ Returns all valid dbkeys that have chromosome information. """
-
- # All user keys have a len file.
- user_keys = {}
- user = trans.get_user()
- if 'dbkeys' in user.preferences:
- user_keys = from_json_string( user.preferences['dbkeys'] )
-
- dbkeys = [ (v, k) for k, v in trans.db_builds if ( ( k in self.genomes and self.genomes[ k ].len_file ) or k in user_keys ) ]
- return dbkeys
-
- def chroms( self, trans, dbkey=None, num=None, chrom=None, low=None ):
+
+ def to_dict( self, num=None, chrom=None, low=None ):
"""
- Returns a naturally sorted list of chroms/contigs for a given dbkey.
- Use either chrom or low to specify the starting chrom in the return list.
+ Returns representation of self as a dictionary.
"""
+
def check_int(s):
if s.isdigit():
return int(s)
@@ -97,47 +62,13 @@
else:
low = 0
- # If there is no dbkey owner, default to current user.
- dbkey_owner, dbkey = decode_dbkey( dbkey )
- if dbkey_owner:
- dbkey_user = trans.sa_session.query( trans.app.model.User ).filter_by( username=dbkey_owner ).first()
- else:
- dbkey_user = trans.user
-
- #
- # Get len file.
- #
-
- # Look first in user's custom builds.
- len_file = None
- len_ds = None
- user_keys = {}
- if dbkey_user and 'dbkeys' in dbkey_user.preferences:
- user_keys = from_json_string( dbkey_user.preferences['dbkeys'] )
- if dbkey in user_keys:
- dbkey_attributes = user_keys[ dbkey ]
- if 'fasta' in dbkey_attributes:
- build_fasta = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dbkey_attributes[ 'fasta' ] )
- len_file = build_fasta.get_converted_dataset( trans, 'len' ).file_name
- # Backwards compatibility: look for len file directly.
- elif 'len' in dbkey_attributes:
- len_file = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( user_keys[ dbkey ][ 'len' ] ).file_name
-
- # Look in system builds.
- if not len_file:
- len_ds = trans.db_dataset_for( dbkey )
- if not len_ds:
- len_file = self.genomes[ dbkey ].len_file
- else:
- len_file = len_ds.file_name
-
#
# Get chroms data:
# (a) chrom name, len;
# (b) whether there are previous, next chroms;
# (c) index of start chrom.
#
- len_file_enumerate = enumerate( open( len_file ) )
+ len_file_enumerate = enumerate( open( self.len_file ) )
chroms = {}
prev_chroms = False
start_index = 0
@@ -169,11 +100,6 @@
start_index = low
# Read chrom data from len file.
- # TODO: this may be too slow for very large numbers of chroms/contigs,
- # but try it out for now.
- if not os.path.exists( len_file ):
- return None
-
for line_num, line in len_file_enumerate:
if line_num < low:
continue
@@ -197,9 +123,99 @@
to_sort = [{ 'chrom': chrom, 'len': length } for chrom, length in chroms.iteritems()]
to_sort.sort(lambda a,b: cmp( split_by_number(a['chrom']), split_by_number(b['chrom']) ))
- return { 'reference': self.has_reference_data( trans, dbkey, dbkey_user ), 'chrom_info': to_sort,
- 'prev_chroms' : prev_chroms, 'next_chroms' : next_chroms, 'start_index' : start_index }
+ return {
+ 'id': self.key,
+ 'reference': self.twobit_file is not None,
+ 'chrom_info': to_sort,
+ 'prev_chroms' : prev_chroms,
+ 'next_chroms' : next_chroms,
+ 'start_index' : start_index
+ }
+
+class Genomes( object ):
+ """
+ Provides information about available genome data and methods for manipulating that data.
+ """
+
+ def __init__( self, app ):
+ # Create list of known genomes from len files.
+ self.genomes = {}
+ len_files = glob.glob( os.path.join( app.config.len_file_path, "*.len" ) )
+ for f in len_files:
+ key = os.path.split( f )[1].split( ".len" )[0]
+ self.genomes[ key ] = Genome( key, len_file=f )
+
+ # Add genome data (twobit files) to genomes.
+ for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
+ if line.startswith("#"): continue
+ val = line.split()
+ if len( val ) == 2:
+ key, path = val
+ if key in self.genomes:
+ self.genomes[ key ].twobit_file = path
+
+ def get_build( self, dbkey ):
+ """ Returns build for the given key. """
+ rval = None
+ if dbkey in self.genomes:
+ rval = self.genomes[ dbkey ]
+ return rval
+
+ def get_dbkeys_with_chrom_info( self, trans ):
+ """ Returns all valid dbkeys that have chromosome information. """
+ # All user keys have a len file.
+ user_keys = {}
+ user = trans.get_user()
+ if 'dbkeys' in user.preferences:
+ user_keys = from_json_string( user.preferences['dbkeys'] )
+
+ dbkeys = [ (v, k) for k, v in trans.db_builds if ( ( k in self.genomes and self.genomes[ k ].len_file ) or k in user_keys ) ]
+ return dbkeys
+
+ def chroms( self, trans, dbkey=None, num=None, chrom=None, low=None ):
+ """
+ Returns a naturally sorted list of chroms/contigs for a given dbkey.
+ Use either chrom or low to specify the starting chrom in the return list.
+ """
+
+ # If there is no dbkey owner, default to current user.
+ dbkey_owner, dbkey = decode_dbkey( dbkey )
+ if dbkey_owner:
+ dbkey_user = trans.sa_session.query( trans.app.model.User ).filter_by( username=dbkey_owner ).first()
+ else:
+ dbkey_user = trans.user
+
+ #
+ # Get/create genome object.
+ #
+ genome = None
+
+ # Look first in user's custom builds.
+ if dbkey_user and 'dbkeys' in dbkey_user.preferences:
+ user_keys = from_json_string( dbkey_user.preferences['dbkeys'] )
+ if dbkey in user_keys:
+ dbkey_attributes = user_keys[ dbkey ]
+ if 'fasta' in dbkey_attributes:
+ build_fasta = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dbkey_attributes[ 'fasta' ] )
+ len_file = build_fasta.get_converted_dataset( trans, 'len' ).file_name
+ # Backwards compatibility: look for len file directly.
+ elif 'len' in dbkey_attributes:
+ len_file = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( user_keys[ dbkey ][ 'len' ] ).file_name
+ if len_file:
+ genome = Genome( dbkey, len_file=len_file )
+
+
+ # Look in system builds.
+ if not genome:
+ len_ds = trans.db_dataset_for( dbkey )
+ if not len_ds:
+ genome = self.genomes[ dbkey ]
+ else:
+ gneome = Genome( dbkey, len_file=len_ds.file_name )
+
+ return genome.to_dict( num=num, chrom=chrom, low=low )
+
def has_reference_data( self, trans, dbkey, dbkey_owner=None ):
"""
Returns true if there is reference data for the specified dbkey. If dbkey is custom,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1890cb0d1cfb/
changeset: 1890cb0d1cfb
user: greg
date: 2012-06-01 21:37:38
summary: Fix for finding the desired file within a specified change set of a tool shed reposity manifest.
affected #: 1 file
diff -r 018179ad4c9bcec30baea5aee0918f45d254deb2 -r 1890cb0d1cfbb3ef5a09affcdd18d2b8acf7d811 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -360,20 +360,32 @@
"""Copy a file named filename from somewhere in the repository manifest to the directory to which dir refers."""
filename = strip_path( filename )
fctx = None
- # First see if the file is in ctx.
+ found = False
+ # First see if the file is in ctx. We have to be careful in determining if we found the correct file because multiple files
+ # with the same name may be in different directories within ctx if the repository owner moved the files as part of the change set.
+ # For example, in the following ctx.files() list, the former may have been moved to the latter:
+ # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']
for ctx_file in ctx.files():
ctx_file_name = strip_path( ctx_file )
if filename == ctx_file_name:
- fctx = ctx[ ctx_file ]
- else:
+ try:
+ fctx = ctx[ ctx_file ]
+ found = True
+ break
+ except:
+ continue
+ if not found:
# Find the file in the repository manifest.
for changeset in repo.changelog:
prev_ctx = repo.changectx( changeset )
for ctx_file in prev_ctx.files():
ctx_file_name = strip_path( ctx_file )
if filename == ctx_file_name:
- fctx = prev_ctx[ ctx_file ]
- break
+ try:
+ fctx = prev_ctx[ ctx_file ]
+ break
+ except:
+ continue
if fctx:
file_path = os.path.join( dir, filename )
fh = open( file_path, 'wb' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/018179ad4c9b/
changeset: 018179ad4c9b
user: greg
date: 2012-06-01 20:51:41
summary: 1) Enhance the InstallManager to provide the installer the ability to choose to install tool dependenceis (or not) rather than automatically installing them.
2) Persist changes to the tool_data_table_conf.xml file only if within the Galaxy webapp.
3) Apply the 1-liner fix from change set 9ffef0de07f5 in Peter van Heusden's pull request which fixes a problem where the __init__ tests if config.database_connection is None.
affected #: 8 files
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -31,6 +31,11 @@
db_url = self.config.database_connection
else:
db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
+ # Set up the tool sheds registry
+ if os.path.isfile( self.config.tool_sheds_config ):
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ else:
+ self.tool_shed_registry = None
# Initialize database / check for appropriate schema version. # If this
# is a new installation, we'll restrict the tool migration messaging.
from galaxy.model.migrate.check import create_or_verify_database
@@ -47,11 +52,6 @@
self.config.database_engine_options,
database_query_profiling_proxy = self.config.database_query_profiling_proxy,
object_store = self.object_store )
- # Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
- else:
- self.tool_shed_registry = None
# Manage installed tool shed repositories.
self.installed_repository_manager = galaxy.tool_shed.InstalledRepositoryManager( self )
# Create an empty datatypes registry.
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -7,9 +7,12 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy.util.odict import odict
+from galaxy.tool_shed.migrate.common import *
+
+REPOSITORY_OWNER = 'devteam'
class InstallManager( object ):
- def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config ):
+ def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
"""
Check tool settings in tool_shed_install_config and install all repositories that are not already installed. The tool
panel configuration file is the received migrated_tools_config, which is the reserved file named migrated_tools_conf.xml.
@@ -30,9 +33,9 @@
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = 'devteam'
+ self.repository_owner = REPOSITORY_OWNER
for repository_elem in root:
- self.install_repository( repository_elem )
+ self.install_repository( repository_elem, install_dependencies )
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
found = False
for root, dirs, files in os.walk( relative_install_dir ):
@@ -117,7 +120,8 @@
if not is_displayed:
is_displayed = True
return is_displayed, tool_sections
- def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision, ctx_rev ):
+ def handle_repository_contents( self, repository_clone_url, relative_install_dir, repository_elem, repository_name, description, changeset_revision,
+ ctx_rev, install_dependencies ):
# Generate the metadata for the installed tool shed repository, among other things. It is critical that the installed repository is
# updated to the desired changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
# The values for the keys in each of the following dictionaries will be a list to allow for the same tool to be displayed in multiple places
@@ -162,7 +166,7 @@
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
- if 'tool_dependencies' in metadata_dict:
+ if install_dependencies and 'tool_dependencies' in metadata_dict:
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( self.app,
'tool_dependencies.xml',
@@ -220,7 +224,7 @@
except:
pass
return tool_shed_repository, metadata_dict
- def install_repository( self, repository_elem ):
+ def install_repository( self, repository_elem, install_dependencies ):
# Install a single repository, loading contained tools into the tool panel.
name = repository_elem.get( 'name' )
description = repository_elem.get( 'description' )
@@ -241,7 +245,8 @@
name,
description,
changeset_revision,
- ctx_rev )
+ ctx_rev,
+ install_dependencies )
if 'tools' in metadata_dict:
# Get the tool_versions from the tool shed for each tool in the installed change set.
url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy&no_reset=true' % \
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -6,6 +6,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
from common import *
+from galaxy.util.odict import odict
log = logging.getLogger( __name__ )
@@ -44,13 +45,20 @@
if latest_tool_migration_script_number != db_schema.version:
if app.new_installation:
# New installations will not be missing tools, so we don't need to worry about them.
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
else:
tool_panel_configs = get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
- missing_tool_configs = check_for_missing_tools( tool_panel_configs, latest_tool_migration_script_number )
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
else:
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
+ have_tool_dependencies = False
+ for k, v in missing_tool_configs_dict.items():
+ if v:
+ have_tool_dependencies = True
+ break
config_arg = ''
if os.path.abspath( os.path.join( os.getcwd(), 'universe_wsgi.ini' ) ) != galaxy_config_file:
config_arg = ' -c %s' % galaxy_config_file.replace( os.path.abspath( os.getcwd() ), '.' )
@@ -62,7 +70,7 @@
output = proc.stdout.read( 32768 )
if return_code != 0:
raise Exception( "Error attempting to update the value of migrate_tools.version: %s" % output )
- elif missing_tool_configs:
+ elif missing_tool_configs_dict:
if len( tool_panel_configs ) == 1:
plural = ''
tool_panel_config_file_names = tool_panel_configs[ 0 ]
@@ -71,8 +79,8 @@
tool_panel_config_file_names = ', '.join( tool_panel_configs )
msg = "\n>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
msg += "\n\nThe list of files at the end of this message refers to tools that are configured to load into the tool panel for\n"
- msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools can be automatically installed\n"
- msg += "from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
+ msg += "this Galaxy instance, but have been removed from the Galaxy distribution. These tools and their dependencies can be\n"
+ msg += "automatically installed from the Galaxy tool shed at http://toolshed.g2.bx.psu.edu.\n\n"
msg += "To skip this process, attempt to start your Galaxy server again (e.g., sh run.sh or whatever you use). If you do this,\n"
msg += "be aware that these tools will no longer be available in your Galaxy tool panel, and entries for each of them should\n"
msg += "be removed from your file%s named %s.\n\n" % ( plural, tool_panel_config_file_names )
@@ -87,17 +95,45 @@
msg += "configured could result in undesired behavior when modifying or updating your local Galaxy instance or the tool shed\n"
msg += "repositories if they are in directories that pose conflicts. See mercurial's .hgignore documentation at the following\n"
msg += "URL for details.\n\nhttp://mercurial.selenic.com/wiki/.hgignore\n\n"
- msg += output
+ if have_tool_dependencies:
+ msg += "The following tool dependencies can also optionally be installed (see the option flag in the command below). If you\n"
+ msg += "choose to install them (recommended), they will be installed within the location specified by the 'tool_dependency_dir'\n"
+ msg += "setting in your main Galaxy configuration file (e.g., uninverse_wsgi.ini).\n"
+ processed_tool_dependencies = []
+ for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
+ for tool_dependencies_tup in tool_dependencies:
+ if tool_dependencies_tup not in processed_tool_dependencies:
+ msg += "------------------------------------\n"
+ msg += "Tool Dependency\n"
+ msg += "------------------------------------\n"
+ msg += "Name: %s, Version: %s, Type: %s\n" % ( tool_dependencies_tup[ 0 ],
+ tool_dependencies_tup[ 1 ],
+ tool_dependencies_tup[ 2 ] )
+ if tool_dependencies_tup[ 3 ]:
+ msg += "Requirements and installation information:\n"
+ msg += "%s\n" % tool_dependencies_tup[ 3 ]
+ else:
+ msg += "\n"
+ msg += "------------------------------------\n"
+ processed_tool_dependencies.append( tool_dependencies_tup )
+ msg += "\n"
+ msg += "%s" % output.replace( 'done', '' )
+ msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+ msg += "sh ./scripts/migrate_tools/%04d_tools.sh\n" % latest_tool_migration_script_number
+ msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+ if have_tool_dependencies:
+ msg += "The tool dependencies listed above will be installed along with the repositories if you add the 'install_dependencies'\n"
+ msg += "option to the above command like this:\n\n"
+ msg += "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv\n"
+ msg += "sh ./scripts/migrate_tools/%04d_tools.sh install_dependencies\n" % latest_tool_migration_script_number
+ msg += "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n"
+ msg += "Tool dependencies can be installed after the repositories have been installed, but installing them now is better.\n\n"
msg += "After the installation process finishes, you can start your Galaxy server. As part of this installation process,\n"
msg += "entries for each of the following tool config files will be added to the file named ./migrated_tool_conf.xml, so these\n"
msg += "tools will continue to be loaded into your tool panel. Because of this, existing entries for these files should be\n"
msg += "removed from your file%s named %s, but only after the installation process finishes.\n\n" % ( plural, tool_panel_config_file_names )
- for i, missing_tool_config in enumerate( missing_tool_configs ):
+ for missing_tool_config, tool_dependencies in missing_tool_configs_dict.items():
msg += "%s\n" % missing_tool_config
- # Should we do the following?
- #if i > 10:
- # msg += "\n...and %d more tools...\n" % ( len( missing_tool_configs ) - ( i + 1 ) )
- # break
msg += "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n"
raise Exception( msg )
else:
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/common.py
--- a/lib/galaxy/tool_shed/migrate/common.py
+++ b/lib/galaxy/tool_shed/migrate/common.py
@@ -1,4 +1,4 @@
-import sys, os, ConfigParser
+import sys, os, ConfigParser, urllib2
import galaxy.config
import galaxy.datatypes.registry
from galaxy import util, tools
@@ -7,39 +7,63 @@
from galaxy.objectstore import build_object_store_from_config
import galaxy.tool_shed.tool_shed_registry
from galaxy.tool_shed import install_manager
-from galaxy.tool_shed.migrate.common import *
+from galaxy.tool_shed.encoding_util import *
+from galaxy.util.odict import odict
-def check_for_missing_tools( tool_panel_configs, latest_tool_migration_script_number ):
+REPOSITORY_OWNER = 'devteam'
+
+def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
# Get the 000x_tools.xml file associated with the current migrate_tools version number.
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
- migrated_tool_configs = []
+ migrated_tool_configs_dict = odict()
tree = util.parse_xml( tools_xml_file_path )
root = tree.getroot()
+ tool_shed = root.get( 'name' )
+ tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
for elem in root:
if elem.tag == 'repository':
+ tool_dependencies = []
+ tool_dependencies_dict = {}
+ repository_name = elem.get( 'name' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&webapp=install_manager&no_reset=true' % \
+ ( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_dependencies_dict = tool_shed_decode( text )
+ for dependency_key, requirements_dict in tool_dependencies_dict.items():
+ tool_dependency_name = requirements_dict[ 'name' ]
+ tool_dependency_version = requirements_dict[ 'version' ]
+ tool_dependency_type = requirements_dict[ 'type' ]
+ tool_dependency_readme = requirements_dict.get( 'readme', '' )
+ tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
for tool_elem in elem.findall( 'tool' ):
- migrated_tool_configs.append( tool_elem.get( 'file' ) )
+ migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
- missing_tool_configs = []
+ missing_tool_configs_dict = odict()
for tool_panel_config in tool_panel_configs:
tree = util.parse_xml( tool_panel_config )
root = tree.getroot()
for elem in root:
+ missing_tool_dependencies = []
if elem.tag == 'tool':
- missing_tool_configs = check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs )
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
elif elem.tag == 'section':
for section_elem in elem:
if section_elem.tag == 'tool':
- missing_tool_configs = check_tool_tag_set( section_elem, migrated_tool_configs, missing_tool_configs )
- return missing_tool_configs
-def check_tool_tag_set( elem, migrated_tool_configs, missing_tool_configs ):
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ return missing_tool_configs_dict
+def check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict ):
file_path = elem.get( 'file', None )
if file_path:
path, name = os.path.split( file_path )
- if name in migrated_tool_configs:
- missing_tool_configs.append( name )
- return missing_tool_configs
+ if name in migrated_tool_configs_dict:
+ tool_dependencies = migrated_tool_configs_dict[ name ]
+ missing_tool_configs_dict[ name ] = tool_dependencies
+ return missing_tool_configs_dict
def get_non_shed_tool_panel_configs( app ):
# Get the non-shed related tool panel configs - there can be more than one, and the default is tool_conf.xml.
config_filenames = []
@@ -52,9 +76,18 @@
if tool_path is None:
config_filenames.append( config_filename )
return config_filenames
+def get_tool_shed_url_from_tools_xml_file_path( app, tool_shed ):
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ return None
+
class MigrateToolsApplication( object ):
"""Encapsulates the state of a basic Galaxy Universe application in order to initiate the Install Manager"""
def __init__( self, tools_migration_config ):
+ install_dependencies = 'install_dependencies' in sys.argv
galaxy_config_file = 'universe_wsgi.ini'
if '-c' in sys.argv:
pos = sys.argv.index( '-c' )
@@ -69,7 +102,7 @@
for key, value in config_parser.items( "app:main" ):
galaxy_config_dict[ key ] = value
self.config = galaxy.config.Configuration( **galaxy_config_dict )
- if self.config.database_connection is None:
+ if not self.config.database_connection:
self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database
self.config.update_integrated_tool_panel = True
self.object_store = build_object_store_from_config( self.config )
@@ -106,7 +139,8 @@
'scripts',
'migrate_tools',
tools_migration_config ),
- migrated_tools_config=self.config.migrated_tools_config )
+ migrated_tools_config=self.config.migrated_tools_config,
+ install_dependencies=install_dependencies )
@property
def sa_session( self ):
return self.model.context.current
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tool_shed/migrate/versions/0002_tools.py
--- a/lib/galaxy/tool_shed/migrate/versions/0002_tools.py
+++ b/lib/galaxy/tool_shed/migrate/versions/0002_tools.py
@@ -3,10 +3,6 @@
datatypes_conf.xml.sample. You should remove the Emboss datatypes from your version of datatypes_conf.xml. The
repositories named emboss_5 and emboss_datatypes from the main Galaxy tool shed at http://toolshed.g2.bx.psu.edu
will be installed into your local Galaxy instance at the location discussed above by running the following command.
-
-vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv
-sh ./scripts/migrate_tools/0002_tools.sh
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
"""
import sys
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -41,7 +41,7 @@
self.data_tables[ table.name ] = table
log.debug( "Loaded tool data table '%s'", table.name )
return table_elems
- def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path ):
+ def add_new_entries_from_config_file( self, config_filename, tool_data_table_config_path, persist=False ):
"""
This method is called when a tool shed repository that includes a tool_data_table_conf.xml.sample file is being
installed into a local galaxy instance. We have 2 cases to handle, files whose root tag is <tables>, for example:
@@ -78,7 +78,7 @@
if table.name not in self.data_tables:
self.data_tables[ table.name ] = table
log.debug( "Added new tool data table '%s'", table.name )
- if self.data_table_elem_names != original_data_table_elem_names:
+ if persist and self.data_table_elem_names != original_data_table_elem_names:
# Persist Galaxy's version of the changed tool_data_table_conf.xml file.
self.to_xml_file( tool_data_table_config_path )
return table_elems
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -993,8 +993,8 @@
.first()
def get_url_from_repository_tool_shed( app, repository ):
"""
- The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu
- We need the URL to this tool shed, which is something like: http://toolshed.g2.bx.psu.edu/
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is
+ something like: http://toolshed.g2.bx.psu.edu/.
"""
for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
if shed_url.find( repository.tool_shed ) >= 0:
@@ -1019,7 +1019,7 @@
sample_tool_data_table_conf = get_config_from_repository( app, 'tool_data_table_conf.xml.sample', repository, changeset_revision, dir )
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
log.debug( correction_msg )
@@ -1055,15 +1055,15 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
-def handle_sample_tool_data_table_conf_file( app, filename ):
+def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
"""
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary as well as appending them to
- Galaxy's tool_data_table_conf.xml file on disk.
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur)
+ if call is from the Galaxy side (not the tool shed), the new entries will be appended to Galaxy's tool_data_table_conf.xml file on disk.
"""
error = False
message = ''
try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path )
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( filename, app.config.tool_data_table_config_path, persist=persist )
except Exception, e:
message = str( e )
error = True
diff -r 9ae91fb49fdd5762dc9a0374dfc471733da47143 -r 018179ad4c9bcec30baea5aee0918f45d254deb2 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,6 +11,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
+from galaxy.tool_shed.encoding_util import *
from common import *
from galaxy import eggs
@@ -995,6 +996,30 @@
url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
return trans.response.send_redirect( url )
@web.expose
+ def get_tool_dependencies( self, trans, **kwd ):
+ # Handle a request from a local Galaxy instance. If the request originated with the Galaxy instances' InstallManager, the value of 'webapp'
+ # will be 'install_manager'.
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ webapp = params.get( 'webapp', 'community' )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ for downloadable_revision in repository.downloadable_revisions:
+ if downloadable_revision.changeset_revision == changeset_revision:
+ break
+ metadata = downloadable_revision.metadata
+ tool_dependencies = metadata.get( 'tool_dependencies', '' )
+ if webapp == 'install_manager':
+ if tool_dependencies:
+ return tool_shed_encode( tool_dependencies )
+ return ''
+ # TODO: future handler where request comes from some Galaxy admin feature.
+ @web.expose
def browse_repositories( self, trans, **kwd ):
# We add params to the keyword dict in this method in order to rename the param
# with an "f-" prefix, simulating filtering by clicking a search link. We have
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.