galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: jgoecks: Trackster: make FeatureTracks compatible with 09c6c980e463 in preparation for enabling FeatureTracks in composite tracks.
by Bitbucket 14 Dec '11
by Bitbucket 14 Dec '11
14 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/13ba6909faae/
changeset: 13ba6909faae
user: jgoecks
date: 2011-12-14 23:36:18
summary: Trackster: make FeatureTracks compatible with 09c6c980e463 in preparation for enabling FeatureTracks in composite tracks.
affected #: 1 file
diff -r 501ef487e37f0f6ff2e6def235f3878a88d0a7c0 -r 13ba6909faae12b0a7efcdc76e0797c5e7903929 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -3111,17 +3111,27 @@
if ( can_draw_now ) {
// Set up and draw tile.
extend(tile_data, more_tile_data);
+
+ // HACK: this is FeatureTrack-specific.
+ // If track mode is Auto, determine mode and update.
+ var mode = track.mode;
+ if (mode === "Auto") {
+ mode = track.get_mode(tile_data);
+ track.update_auto_mode(mode);
+ }
+
+ // Draw canvas.
var
canvas = track.view.canvas_manager.new_canvas(),
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
width = Math.ceil( (tile_high - tile_low) * w_scale ) + track.left_offset,
- height = track.get_canvas_height(tile_data);
+ height = track.get_canvas_height(tile_data, mode, w_scale, width);
canvas.width = width;
canvas.height = height;
- var tile = track.draw_tile(tile_data, canvas, track.mode, resolution, tile_index, w_scale, seq_data);
+ var tile = track.draw_tile(tile_data, canvas, mode, resolution, tile_index, w_scale, seq_data);
// Don't cache, show if no tile.
if (tile !== undefined) {
@@ -3145,7 +3155,7 @@
* Returns canvas height needed to display data; return value is an integer that denotes the
* number of pixels required.
*/
- get_canvas_height: function(data) {
+ get_canvas_height: function(result, mode, w_scale, canvas_width) {
return this.height_px;
},
/**
@@ -3416,13 +3426,14 @@
if ( can_draw_now ) {
// Set up and draw tile.
extend(tile_data, more_tile_data);
+
var
canvas = track.view.canvas_manager.new_canvas(),
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
width = Math.ceil( (tile_high - tile_low) * w_scale ),
- height = track.get_canvas_height(tile_data);
+ height = track.get_canvas_height(tile_data, mode, w_scale, width);
// FIXME:
// (a) right now, only LineTracks respect width/height setting and do not set it in draw_tile;
@@ -3914,6 +3925,58 @@
return {max: max_count, delta: bin_size, data: bins};
},
/**
+ * Returns appropriate display mode based on data.
+ */
+ get_mode: function(data) {
+ if (data.dataset_type === "summary_tree") {
+ mode = "summary_tree";
+ }
+ // HACK: use no_detail mode track is in overview to prevent overview from being too large.
+ else if (data.extra_info === "no_detail" || this.is_overview) {
+ mode = "no_detail";
+ }
+ else {
+ // Choose b/t Squish and Pack.
+ // Proxy measures for using Squish:
+ // (a) error message re: limiting number of features shown;
+ // (b) X number of features shown;
+ // (c) size of view shown.
+ // TODO: cannot use (a) and (b) because it requires coordinating mode across tiles;
+ // fix this so that tiles are redrawn as necessary to use the same mode.
+ //if ( (result.message && result.message.match(/^Only the first [\d]+/)) ||
+ // (result.data && result.data.length > 2000) ||
+ //var data = result.data;
+ // if ( (data.length && data.length < 4) ||
+ // (this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH) ) {
+ if ( this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH ) {
+ mode = "Squish";
+ } else {
+ mode = "Pack";
+ }
+ }
+ return mode;
+ },
+ /**
+ * Returns canvas height needed to display data; return value is an integer that denotes the
+ * number of pixels required.
+ */
+ get_canvas_height: function(result, mode, w_scale, canvas_width) {
+ if (mode === "summary_tree" || mode === "Histogram") {
+ // Extra padding at top of summary tree so label does not overlap data.
+ return this.summary_draw_height + SUMMARY_TREE_TOP_PADDING;
+ }
+ else {
+ var rows_required = 1;
+ if (mode === "no_detail" || mode === "Squish" || mode === "Pack") {
+ var rows_required = this.incremental_slots(w_scale, result.data, mode);
+ }
+ // HACK: use dummy painter to get required height. Painter should be extended so that get_required_height
+ // works as a static function.
+ var dummy_painter = new (this.painter)(null, null, null, this.prefs, mode);
+ return Math.max(MIN_TRACK_HEIGHT, dummy_painter.get_required_height(rows_required, canvas_width) );
+ }
+ },
+ /**
* Draw FeatureTrack tile.
* @param result result from server
* @param canvas canvas to draw on
@@ -3925,50 +3988,14 @@
*/
draw_tile: function(result, canvas, mode, resolution, tile_index, w_scale, ref_seq) {
var track = this,
- tile_bounds = track._get_tile_bounds(tile_index, resolution),
+ tile_bounds = this._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
- tile_span = tile_high - tile_low,
- width = Math.ceil(tile_span * w_scale),
min_height = 25,
- left_offset = this.left_offset,
- slots,
- required_height;
-
- // Set display mode if Auto.
- if (mode === "Auto") {
- if (result.dataset_type === "summary_tree") {
- mode = result.dataset_type;
- }
- // HACK: use no_detail mode track is in overview to prevent overview from being too large.
- else if (result.extra_info === "no_detail" || track.is_overview) {
- mode = "no_detail";
- }
- else {
- // Choose b/t Squish and Pack.
- // Proxy measures for using Squish:
- // (a) error message re: limiting number of features shown;
- // (b) X number of features shown;
- // (c) size of view shown.
- // TODO: cannot use (a) and (b) because it requires coordinating mode across tiles;
- // fix this so that tiles are redrawn as necessary to use the same mode.
- //if ( (result.message && result.message.match(/^Only the first [\d]+/)) ||
- // (result.data && result.data.length > 2000) ||
- var data = result.data;
- // if ( (data.length && data.length < 4) ||
- // (this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH) ) {
- if ( this.view.high - this.view.low > MIN_SQUISH_VIEW_WIDTH ) {
- mode = "Squish";
- } else {
- mode = "Pack";
- }
- }
- this.update_auto_mode( mode );
- }
+ left_offset = this.left_offset;
// Drawing the summary tree (feature coverage histogram)
if (mode === "summary_tree" || mode === "Histogram") {
- required_height = this.summary_draw_height;
// Add label to container div showing maximum count
// TODO: this shouldn't be done at the tile level
this.container_div.find(".yaxislabel").remove();
@@ -3976,9 +4003,6 @@
max_label.text(result.max);
max_label.css({ position: "absolute", top: "24px", left: "10px", color: this.prefs.label_color });
max_label.prependTo(this.container_div);
- canvas.width = width + left_offset;
- // Extra padding at top of summary tree
- canvas.height = required_height + SUMMARY_TREE_TOP_PADDING;
// Get summary tree data if necessary and set max if there is one.
if (result.dataset_type != "summary_tree") {
@@ -3993,18 +4017,11 @@
var ctx = canvas.getContext("2d");
// Deal with left_offset by translating.
ctx.translate(left_offset, SUMMARY_TREE_TOP_PADDING);
- painter.draw(ctx, width, required_height);
+ painter.draw(ctx, canvas.width, canvas.height);
return new SummaryTreeTile(track, tile_index, resolution, canvas, result.data, result.max);
}
- // Start dealing with row-by-row tracks
-
- // If working with a mode where slotting is necessary, update the incremental slotting
- var slots, slots_required = 1;
- if ( mode === "no_detail" || mode === "Squish" || mode === "Pack" ) {
- slots_required = this.incremental_slots(w_scale, result.data, mode);
- slots = this.inc_slots[w_scale].slots;
- }
+ // Handle row-by-row tracks
// Filter features.
var filtered = [];
@@ -4028,16 +4045,12 @@
}
}
- // Create painter, and canvas of sufficient size to contain all features.
+ // Create painter.
var filter_alpha_scaler = (this.filters_manager.alpha_filter ? new FilterScaler(this.filters_manager.alpha_filter) : null);
var filter_height_scaler = (this.filters_manager.height_filter ? new FilterScaler(this.filters_manager.height_filter) : null);
// HACK: ref_seq will only be defined for ReadTracks, and only the ReadPainter accepts that argument
var painter = new (this.painter)(filtered, tile_low, tile_high, this.prefs, mode, filter_alpha_scaler, filter_height_scaler, ref_seq);
- var required_height = Math.max(MIN_TRACK_HEIGHT, painter.get_required_height(slots_required,width));
var feature_mapper = null;
-
- canvas.width = width + left_offset;
- canvas.height = required_height;
// console.log(( tile_low - this.view.low ) * w_scale, tile_index, w_scale);
var ctx = canvas.getContext("2d");
@@ -4048,8 +4061,9 @@
if (result.data) {
// Draw features.
+ slots = this.inc_slots[w_scale].slots;
ctx.translate(left_offset, 0);
- feature_mapper = painter.draw(ctx, width, required_height, slots);
+ feature_mapper = painter.draw(ctx, canvas.width, canvas.height, slots);
feature_mapper.translation = -left_offset;
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Trackster: small fixes so that ReferenceTracks are compatible with changes in 09c6c980e463
by Bitbucket 14 Dec '11
by Bitbucket 14 Dec '11
14 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/501ef487e37f/
changeset: 501ef487e37f
user: jgoecks
date: 2011-12-14 21:25:03
summary: Trackster: small fixes so that ReferenceTracks are compatible with changes in 09c6c980e463
affected #: 1 file
diff -r 0c804033ae5a766f1d12a2030b73fe306c27f660 -r 501ef487e37f0f6ff2e6def235f3878a88d0a7c0 static/scripts/trackster.js
--- a/static/scripts/trackster.js
+++ b/static/scripts/trackster.js
@@ -3017,7 +3017,7 @@
var all_tiles_drawn = true;
var drawn_tiles = [];
var tile_count = 0;
- var is_tile = function(o) { return ('track' in o) };
+ var is_tile = function(o) { return (o && 'track' in o) };
// Draw or fetch and show tiles.
while ( ( tile_index * DENSITY * resolution ) < high ) {
var draw_result = this.draw_helper( force, width, tile_index, resolution, parent_element, w_scale );
@@ -3116,7 +3116,7 @@
tile_bounds = track._get_tile_bounds(tile_index, resolution),
tile_low = tile_bounds[0],
tile_high = tile_bounds[1],
- width = Math.ceil( (tile_high - tile_low) * w_scale ),
+ width = Math.ceil( (tile_high - tile_low) * w_scale ) + track.left_offset,
height = track.get_canvas_height(tile_data);
canvas.width = width;
@@ -3483,17 +3483,14 @@
* Draw ReferenceTrack tile.
*/
draw_tile: function(seq, canvas, mode, resolution, tile_index, w_scale) {
- var track = this,
- tile_length = DENSITY * resolution;
+ var track = this;
if (w_scale > this.view.canvas_manager.char_width_px) {
if (seq.data === null) {
track.content_div.css("height", "0px");
return;
}
- var ctx = canvas.getContext("2d");
- canvas.width = Math.ceil(tile_length * w_scale + track.left_offset);
- canvas.height = track.height_px;
+ var ctx = canvas.getContext("2d");
ctx.font = ctx.canvas.manager.default_font;
ctx.textAlign = "center";
seq = seq.data;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0c804033ae5a/
changeset: 0c804033ae5a
user: greg
date: 2011-12-14 16:18:43
summary: 1. Add a new UpdateManager for use with tool shed repositories installed into a local Galaxy instance. The UpdateManager will poll all appropriate tool sheds to see if updates are available for each of the installed repositories. Polling occurs when the Galaxy server is started. In addition, a config setting tells the UpdateManager to poll after the configured number of hours have passed. If updates are available for an installed repository, a table column is updated, and the repository name is highlighted in red, alerting the Galaxy admin that updates are available for that repository.
2. Add a new ToolIdGuidMap grid that displays all of the mappings between tool ids whose tools used to be in the distribution and guids, which is the new tool id for tools that are installed with repositories from tool sheds.
3. Add a new column named installed_changeset_revsion to the tool_shed_repository table. This column is set when the repository is installed and remains static thereafter.
4. Move several tool shed related components to a new ~/lib/galaxy/tool_shed directory in the code base.
affected #: 15 files
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -3,7 +3,7 @@
from galaxy import config, jobs, util, tools, web
import galaxy.tools.search
import galaxy.tools.data
-import galaxy.tools.tool_shed_registry
+import galaxy.tool_shed.tool_shed_registry
from galaxy.web import security
import galaxy.model
import galaxy.datatypes.registry
@@ -28,7 +28,7 @@
galaxy.model.set_datatypes_registry( self.datatypes_registry )
# Set up the tool sheds registry
if os.path.isfile( self.config.tool_sheds_config ):
- self.tool_shed_registry = galaxy.tools.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
+ self.tool_shed_registry = galaxy.tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config )
else:
self.tool_shed_registry = None
# Determine the database url
@@ -61,8 +61,13 @@
# If enabled, check for tools missing from the distribution because they
# have been moved to the tool shed and install all such discovered tools.
if self.config.get_bool( 'enable_tool_shed_install', False ):
- from tools import install_manager
+ from tool_shed import install_manager
self.install_manager = install_manager.InstallManager( self, self.config.tool_shed_install_config, self.config.install_tool_config )
+ # If enabled, poll respective tool sheds to see if updates are
+ # available for any installed tool shed repositories.
+ if self.config.get_bool( 'enable_tool_shed_check', False ):
+ from tool_shed import update_manager
+ self.update_manager = update_manager.UpdateManager( self )
# Load datatype converters
self.datatypes_registry.load_datatype_converters( self.toolbox )
# Load history import/export tools
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -47,12 +47,12 @@
self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
self.tool_sheds_config = kwargs.get( 'tool_sheds_config_file', 'tool_sheds_conf.xml' )
- self.enable_unique_workflow_defaults = string_as_bool( kwargs.get ( 'enable_unique_workflow_defaults', False ) )
+ self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
self.len_file_path = kwargs.get( "len_file_path", resolve_path(os.path.join(self.tool_data_path, 'shared','ucsc','chrom'), self.root) )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
- self.enable_tool_shed_install = string_as_bool( kwargs.get ( 'enable_tool_shed_install', False ) )
+ self.enable_tool_shed_install = string_as_bool( kwargs.get( 'enable_tool_shed_install', False ) )
self.tool_shed_install_config = resolve_path( kwargs.get( "tool_shed_install_config_file", "tool_shed_install.xml" ), self.root )
self.install_tool_config = resolve_path( kwargs.get( "install_tool_config_file", "shed_tool_conf.xml" ), self.root )
if 'tool_config_file' in kwargs:
@@ -63,6 +63,13 @@
tcf = 'tool_conf.xml'
self.tool_configs = [ resolve_path( p, self.root ) for p in listify( tcf ) ]
self.tool_data_table_config_path = resolve_path( kwargs.get( 'tool_data_table_config_path', 'tool_data_table_conf.xml' ), self.root )
+ self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
+ try:
+ self.hours_between_check = int( kwargs.get( 'hours_between_check', 12 ) )
+ if self.hours_between_check < 1 or self.hours_between_check > 24:
+ self.hours_between_check = 12
+ except:
+ self.hours_between_check = 12
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2660,7 +2660,7 @@
pass
class ToolShedRepository( object ):
- def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None,
+ def __init__( self, id=None, create_time=None, tool_shed=None, name=None, description=None, owner=None, installed_changeset_revision=None,
changeset_revision=None, metadata=None, includes_datatypes=False, update_available=False, deleted=False ):
self.id = id
self.create_time = create_time
@@ -2668,6 +2668,7 @@
self.name = name
self.description = description
self.owner = owner
+ self.installed_changeset_revision = installed_changeset_revision
self.changeset_revision = changeset_revision
self.metadata = metadata
self.includes_datatypes = includes_datatypes
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -372,6 +372,7 @@
Column( "name", TrimmedString( 255 ), index=True ),
Column( "description" , TEXT ),
Column( "owner", TrimmedString( 255 ), index=True ),
+ Column( "installed_changeset_revision", TrimmedString( 255 ) ),
Column( "changeset_revision", TrimmedString( 255 ), index=True ),
Column( "metadata", JSONType, nullable=True ),
Column( "includes_datatypes", Boolean, index=True, default=False ),
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0088_add_installed_changeset_revison_column.py
@@ -0,0 +1,63 @@
+"""
+Migration script to add the installed_changeset_revision column to the tool_shed_repository table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ col = Column( "installed_changeset_revision", TrimmedString( 255 ) )
+ try:
+ col.create( ToolShedRepository_table )
+ assert col is ToolShedRepository_table.c.installed_changeset_revision
+ except Exception, e:
+ print "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e )
+ log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) )
+ # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
+ # This will be problematic if the value of changeset_revision was updated to something other than the value
+ # that it was when the repository was installed (because the install path determined in real time will attempt to
+ # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
+ # but at the time this script was written, this scenario is extremely unlikely.
+ cmd = "SELECT id AS id, " \
+ + "installed_changeset_revision AS installed_changeset_revision, " \
+ + "changeset_revision AS changeset_revision " \
+ + "FROM tool_shed_repository;"
+ tool_shed_repositories = db_session.execute( cmd ).fetchall()
+ update_count = 0
+ for row in tool_shed_repositories:
+ cmd = "UPDATE tool_shed_repository " \
+ + "SET installed_changeset_revision = '%s' " % row.changeset_revision \
+ + "WHERE changeset_revision = '%s';" % row.changeset_revision
+ db_session.execute( cmd )
+ update_count += 1
+ print "Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. "
+def downgrade():
+ metadata.reflect()
+ ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
+ try:
+ ToolShedRepository_table.c.installed_changeset_revision.drop()
+ except Exception, e:
+ print "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e )
+ log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/__init__.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -0,0 +1,3 @@
+"""
+Classes encapsulating the relationships between Galaxy and Galaxy tool sheds.
+"""
\ No newline at end of file
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/install_manager.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -0,0 +1,160 @@
+"""
+Manage automatic installation of tools configured in tool_shed_install.xml, all of which were
+at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool
+shed. Tools included in tool_shed_install.xml that have already been installed will not be
+re-installed.
+"""
+from galaxy.util.shed_util import *
+
+log = logging.getLogger( __name__ )
+
+class InstallManager( object ):
+ def __init__( self, app, tool_shed_install_config, install_tool_config ):
+ """
+ Check tool settings in tool_shed_install_config and install all tools that are
+ not already installed. The tool panel configuration file is the received
+ shed_tool_config, which defaults to shed_tool_conf.xml.
+ """
+ self.app = app
+ self.sa_session = self.app.model.context.current
+ self.install_tool_config = install_tool_config
+ # Parse shed_tool_config to get the install location (tool_path).
+ tree = util.parse_xml( install_tool_config )
+ root = tree.getroot()
+ self.tool_path = root.get( 'tool_path' )
+ self.app.toolbox.shed_tool_confs[ install_tool_config ] = self.tool_path
+ # Parse tool_shed_install_config to check each of the tools.
+ log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config )
+ self.tool_shed_install_config = tool_shed_install_config
+ tree = util.parse_xml( tool_shed_install_config )
+ root = tree.getroot()
+ self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
+ log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
+ self.repository_owner = 'devteam'
+ for elem in root:
+ if elem.tag == 'repository':
+ self.install_repository( elem )
+ elif elem.tag == 'section':
+ self.install_section( elem )
+ def install_repository( self, elem, section_name='', section_id='' ):
+ # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
+ # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402">
+ # <tool id="Cut1" version="1.0.1" />
+ # </repository>
+ name = elem.get( 'name' )
+ description = elem.get( 'description' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
+ clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
+ if self.__isinstalled( elem, clone_dir ):
+ log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
+ else:
+ if section_name and section_id:
+ section_key = 'section_%s' % str( section_id )
+ if section_key in self.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in self.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % section_name )
+ tool_section = self.app.toolbox.tool_panel[ section_key ]
+ else:
+ # Appending a new section to self.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % section_name )
+ new_section_elem = Element( 'section' )
+ new_section_elem.attrib[ 'name' ] = section_name
+ new_section_elem.attrib[ 'id' ] = section_id
+ tool_section = ToolSection( new_section_elem )
+ self.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ tool_section = None
+ current_working_dir = os.getcwd()
+ tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
+ relative_install_dir = os.path.join( clone_dir, name )
+ returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
+ if returncode == 0:
+ returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
+ if returncode == 0:
+ metadata_dict = load_repository_contents( self.app,
+ name,
+ description,
+ self.repository_owner,
+ changeset_revision,
+ repository_clone_url,
+ self.install_tool_config,
+ self.tool_path,
+ tool_section,
+ relative_install_dir,
+ current_working_dir,
+ tmp_name )
+ # Add a new record to the tool_id_guid_map table for each
+ # tool in the repository if one doesn't already exist.
+ if 'tools' in metadata_dict:
+ tools_mapped = 0
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ guid = tool_dict[ 'guid' ]
+ tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tool_id_guid_map:
+ if tool_id_guid_map.guid != guid:
+ tool_id_guid_map.guid = guid
+ flush_needed = True
+ else:
+ tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
+ tool_version=tool_version,
+ tool_shed=self.tool_shed,
+ repository_owner=self.repository_owner,
+ repository_name=name,
+ guid=guid )
+ flush_needed = True
+ if flush_needed:
+ self.sa_session.add( tool_id_guid_map )
+ self.sa_session.flush()
+ tools_mapped += 1
+ log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ def install_section( self, elem ):
+ # Install 1 or more repositories into a section in the tool config. An entry looks something like:
+ # <section name="EMBOSS" id="EMBOSSLite">
+ # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac">
+ # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
+ # ...
+ # </repository>
+ # </section>
+ section_name = elem.get( 'name' )
+ section_id = elem.get( 'id' )
+ for repository_elem in elem:
+ self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
+ def __get_url_from_tool_shed( self, tool_shed ):
+ # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
+ # We need the URL to this tool shed, which is something like:
+ # http://toolshed.g2.bx.psu.edu/
+ for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally
+ # installed must no longer be configured in tool_sheds_conf.xml.
+ return None
+ def __isinstalled( self, repository_elem, clone_dir ):
+ name = repository_elem.get( 'name' )
+ installed = False
+ for tool_elem in repository_elem:
+ tool_config = tool_elem.get( 'file' )
+ tool_id = tool_elem.get( 'id' )
+ tool_version = tool_elem.get( 'version' )
+ tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tigm:
+ # A record exists in the tool_id_guid_map table, so see if the repository is installed.
+ if os.path.exists( clone_dir ):
+ installed = True
+ break
+ return installed
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/tool_shed_registry.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/tool_shed_registry.py
@@ -0,0 +1,31 @@
+import sys, logging
+from galaxy.util import parse_xml
+from galaxy.util.odict import odict
+
+log = logging.getLogger( __name__ )
+
+if sys.version_info[:2] == ( 2, 4 ):
+ from galaxy import eggs
+ eggs.require( 'ElementTree' )
+ from elementtree import ElementTree
+else:
+ from xml.etree import ElementTree
+
+class Registry( object ):
+ def __init__( self, root_dir=None, config=None ):
+ self.tool_sheds = odict()
+ if root_dir and config:
+ # Parse datatypes_conf.xml
+ tree = parse_xml( config )
+ root = tree.getroot()
+ # Load datatypes and converters from config
+ log.debug( 'Loading references to tool sheds from %s' % config )
+ for elem in root.findall( 'tool_shed' ):
+ try:
+ name = elem.get( 'name', None )
+ url = elem.get( 'url', None )
+ if name and url:
+ self.tool_sheds[ name ] = url
+ log.debug( 'Loaded reference to tool shed: %s' % name )
+ except Exception, e:
+ log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tool_shed/update_manager.py
--- /dev/null
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -0,0 +1,67 @@
+"""
+Determine if installed tool shed repositories have updates available in their respective tool sheds.
+"""
+import threading, urllib2, logging
+from galaxy.util import string_as_bool
+from galaxy.util.shed_util import *
+
+log = logging.getLogger( __name__ )
+
+class UpdateManager( object ):
+ def __init__( self, app ):
+ """
+ Check tool settings in tool_shed_install_config and install all tools that are
+ not already installed. The tool panel configuration file is the received
+ shed_tool_config, which defaults to shed_tool_conf.xml.
+ """
+ self.app = app
+ self.sa_session = self.app.model.context.current
+ # Ideally only one Galaxy server process
+ # should be able to check for repository updates.
+ self.running = True
+ self.sleeper = Sleeper()
+ self.restarter = threading.Thread( target=self.__restarter )
+ self.restarter.start()
+ self.seconds_to_sleep = app.config.hours_between_check * 3600
+ def __restarter( self ):
+ log.info( 'Update manager restarter starting up...' )
+ while self.running:
+ flush_needed = False
+ for repository in self.sa_session.query( self.app.model.ToolShedRepository ) \
+ .filter( and_( self.app.model.ToolShedRepository.table.c.update_available == False,
+ self.app.model.ToolShedRepository.table.c.deleted == False ) ):
+ if self.check_for_update( repository ):
+ repository.update_available = True
+ self.sa_session.add( repository )
+ flush_needed = True
+ if flush_needed:
+ self.sa_session.flush()
+ self.sleeper.sleep( self.seconds_to_sleep )
+ log.info( 'Transfer job restarter shutting down...' )
+ def check_for_update( self, repository ):
+ tool_shed_url = get_url_from_repository_tool_shed( self.app, repository )
+ url = '%s/repository/check_for_updates?name=%s&owner=%s&changeset_revision=%s&webapp=update_manager' % \
+ ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ return string_as_bool( text )
+ def shutdown( self ):
+ self.running = False
+ self.sleeper.wake()
+
+class Sleeper( object ):
+ """
+ Provides a 'sleep' method that sleeps for a number of seconds *unless*
+ the notify method is called (from a different thread).
+ """
+ def __init__( self ):
+ self.condition = threading.Condition()
+ def sleep( self, seconds ):
+ self.condition.acquire()
+ self.condition.wait( seconds )
+ self.condition.release()
+ def wake( self ):
+ self.condition.acquire()
+ self.condition.notify()
+ self.condition.release()
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tools/install_manager.py
--- a/lib/galaxy/tools/install_manager.py
+++ /dev/null
@@ -1,160 +0,0 @@
-"""
-Manage automatic installation of tools configured in tool_shed_install.xml, all of which were
-at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool
-shed. Tools included in tool_shed_install.xml that have already been installed will not be
-re-installed.
-"""
-from galaxy.util.shed_util import *
-
-log = logging.getLogger( __name__ )
-
-class InstallManager( object ):
- def __init__( self, app, tool_shed_install_config, install_tool_config ):
- """
- Check tool settings in tool_shed_install_config and install all tools that are
- not already installed. The tool panel configuration file is the received
- shed_tool_config, which defaults to shed_tool_conf.xml.
- """
- self.app = app
- self.sa_session = self.app.model.context.current
- self.install_tool_config = install_tool_config
- # Parse shed_tool_config to get the install location (tool_path).
- tree = util.parse_xml( install_tool_config )
- root = tree.getroot()
- self.tool_path = root.get( 'tool_path' )
- self.app.toolbox.shed_tool_confs[ install_tool_config ] = self.tool_path
- # Parse tool_shed_install_config to check each of the tools.
- log.debug( "Parsing tool shed install configuration %s" % tool_shed_install_config )
- self.tool_shed_install_config = tool_shed_install_config
- tree = util.parse_xml( tool_shed_install_config )
- root = tree.getroot()
- self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
- log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
- self.repository_owner = 'devteam'
- for elem in root:
- if elem.tag == 'repository':
- self.install_repository( elem )
- elif elem.tag == 'section':
- self.install_section( elem )
- def install_repository( self, elem, section_name='', section_id='' ):
- # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
- # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402">
- # <tool id="Cut1" version="1.0.1" />
- # </repository>
- name = elem.get( 'name' )
- description = elem.get( 'description' )
- changeset_revision = elem.get( 'changeset_revision' )
- # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
- if self.__isinstalled( elem, clone_dir ):
- log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
- else:
- if section_name and section_id:
- section_key = 'section_%s' % str( section_id )
- if section_key in self.app.toolbox.tool_panel:
- # Appending a tool to an existing section in self.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % section_name )
- tool_section = self.app.toolbox.tool_panel[ section_key ]
- else:
- # Appending a new section to self.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % section_name )
- new_section_elem = Element( 'section' )
- new_section_elem.attrib[ 'name' ] = section_name
- new_section_elem.attrib[ 'id' ] = section_id
- tool_section = ToolSection( new_section_elem )
- self.app.toolbox.tool_panel[ section_key ] = tool_section
- else:
- tool_section = None
- current_working_dir = os.getcwd()
- tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
- repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( clone_dir, name )
- returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
- if returncode == 0:
- returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
- if returncode == 0:
- metadata_dict = load_repository_contents( self.app,
- name,
- description,
- self.repository_owner,
- changeset_revision,
- repository_clone_url,
- self.install_tool_config,
- self.tool_path,
- tool_section,
- relative_install_dir,
- current_working_dir,
- tmp_name )
- # Add a new record to the tool_id_guid_map table for each
- # tool in the repository if one doesn't already exist.
- if 'tools' in metadata_dict:
- tools_mapped = 0
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- guid = tool_dict[ 'guid' ]
- tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tool_id_guid_map:
- if tool_id_guid_map.guid != guid:
- tool_id_guid_map.guid = guid
- flush_needed = True
- else:
- tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
- tool_version=tool_version,
- tool_shed=self.tool_shed,
- repository_owner=self.repository_owner,
- repository_name=name,
- guid=guid )
- flush_needed = True
- if flush_needed:
- self.sa_session.add( tool_id_guid_map )
- self.sa_session.flush()
- tools_mapped += 1
- log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
- tmp_stderr.close()
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
- tmp_stderr.close()
- def install_section( self, elem ):
- # Install 1 or more repositories into a section in the tool config. An entry looks something like:
- # <section name="EMBOSS" id="EMBOSSLite">
- # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac">
- # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
- # ...
- # </repository>
- # </section>
- section_name = elem.get( 'name' )
- section_id = elem.get( 'id' )
- for repository_elem in elem:
- self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
- def __get_url_from_tool_shed( self, tool_shed ):
- # The value of tool_shed is something like: toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in self.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally
- # installed must no longer be configured in tool_sheds_conf.xml.
- return None
- def __isinstalled( self, repository_elem, clone_dir ):
- name = repository_elem.get( 'name' )
- installed = False
- for tool_elem in repository_elem:
- tool_config = tool_elem.get( 'file' )
- tool_id = tool_elem.get( 'id' )
- tool_version = tool_elem.get( 'version' )
- tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tigm:
- # A record exists in the tool_id_guid_map table, so see if the repository is installed.
- if os.path.exists( clone_dir ):
- installed = True
- break
- return installed
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/tools/tool_shed_registry.py
--- a/lib/galaxy/tools/tool_shed_registry.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys, logging
-from galaxy.util import parse_xml
-from galaxy.util.odict import odict
-
-log = logging.getLogger( __name__ )
-
-if sys.version_info[:2] == ( 2, 4 ):
- from galaxy import eggs
- eggs.require( 'ElementTree' )
- from elementtree import ElementTree
-else:
- from xml.etree import ElementTree
-
-class Registry( object ):
- def __init__( self, root_dir=None, config=None ):
- self.tool_sheds = odict()
- if root_dir and config:
- # Parse datatypes_conf.xml
- tree = parse_xml( config )
- root = tree.getroot()
- # Load datatypes and converters from config
- log.debug( 'Loading references to tool sheds from %s' % config )
- for elem in root.findall( 'tool_shed' ):
- try:
- name = elem.get( 'name', None )
- url = elem.get( 'url', None )
- if name and url:
- self.tool_sheds[ name ] = url
- log.debug( 'Loaded reference to tool shed: %s' % name )
- except Exception, e:
- log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -65,7 +65,8 @@
return tool_shed_url.rstrip( '/' )
def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ):
log.debug( "Installing repository '%s'" % name )
- os.makedirs( clone_dir )
+ if not os.path.exists( clone_dir ):
+ os.makedirs( clone_dir )
log.debug( 'Cloning %s' % repository_clone_url )
cmd = 'hg clone %s' % repository_clone_url
tmp_name = tempfile.NamedTemporaryFile().name
@@ -88,15 +89,18 @@
tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
if tool_shed_repository:
if tool_shed_repository.deleted:
+ tool_shed_repository.description = description
+ tool_shed_repository.changeset_revision = changeset_revision
+ tool_shed_repository.metadata = metadata_dict
+ tool_shed_repository.includes_datatypes = includes_datatypes
tool_shed_repository.deleted = False
- # Reset includes_datatypes in case metadata changed since last installed.
- tool_shed_repository.includes_datatypes = includes_datatypes
flush_needed = True
else:
tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
name=name,
description=description,
owner=owner,
+ installed_changeset_revision=changeset_revision,
changeset_revision=changeset_revision,
metadata=metadata_dict,
includes_datatypes=includes_datatypes )
@@ -318,6 +322,20 @@
app.model.ToolIdGuidMap.table.c.repository_owner == repository_owner,
app.model.ToolIdGuidMap.table.c.repository_name == repository_name ) ) \
.first()
+def get_url_from_repository_tool_shed( app, repository ):
+ """
+ This method is used by the UpdateManager, which does not have access to trans.
+ The stored value of repository.tool_shed is something like: toolshed.g2.bx.psu.edu
+ We need the URL to this tool shed, which is something like: http://toolshed.g2.bx.psu.edu/
+ """
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( repository.tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally
+ # installed must no longer be configured in tool_sheds_conf.xml.
+ return None
def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically
@@ -532,7 +550,19 @@
if level and ( not elem.tail or not elem.tail.strip() ):
elem.tail = i + pad
return elem
-def update_repository( current_working_dir, relative_install_dir, changeset_revision ):
+def pull_repository( current_working_dir, repo_files_dir, name ):
+ # Pull the latest possible contents to the repository.
+ log.debug( "Pulling latest updates to the repository named '%s'" % name )
+ cmd = 'hg pull'
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stderr = open( tmp_name, 'wb' )
+ os.chdir( repo_files_dir )
+ proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stderr.close()
+ return returncode, tmp_name
+def update_repository( current_working_dir, repo_files_dir, changeset_revision ):
# Update the cloned repository to changeset_revision. It is imperative that the
# installed repository is updated to the desired changeset_revision before metadata
# is set because the process for setting metadata uses the repository files on disk.
@@ -540,7 +570,7 @@
cmd = 'hg update -r %s' % changeset_revision
tmp_name = tempfile.NamedTemporaryFile().name
tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( relative_install_dir )
+ os.chdir( repo_files_dir )
proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
returncode = proc.wait()
os.chdir( current_working_dir )
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -3,6 +3,55 @@
log = logging.getLogger( __name__ )
+class ToolIdGuidMapGrid( grids.Grid ):
+ class ToolIdColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_id
+ class ToolVersionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_version
+ class ToolGuidColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.guid
+ class ToolShedColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.tool_shed
+ class RepositoryNameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.repository_name
+ class RepositoryOwnerColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, tool_id_guid_map ):
+ return tool_id_guid_map.repository_owner
+ # Grid definition
+ title = "Map tool id to guid"
+ model_class = model.ToolIdGuidMap
+ template='/admin/tool_shed_repository/grid.mako'
+ default_sort_key = "tool_id"
+ columns = [
+ ToolIdColumn( "Tool id" ),
+ ToolVersionColumn( "Version" ),
+ ToolGuidColumn( "Guid" ),
+ ToolShedColumn( "Tool shed" ),
+ RepositoryNameColumn( "Repository name" ),
+ RepositoryOwnerColumn( "Repository owner" )
+ ]
+ columns.append( grids.MulticolFilterColumn( "Search repository name",
+ cols_to_filter=[ columns[0], columns[2], columns[4], columns[5] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "Manage installed tool shed repositories", dict( controller='admin_toolshed', action='browse_repositories' ) )
+ ]
+ operations = []
+ standard_filters = []
+ default_filter = {}
+ num_rows_per_page = 50
+ preserve_state = False
+ use_paging = True
+ def build_initial_query( self, trans, **kwd ):
+ return trans.sa_session.query( self.model_class )
+
class RepositoryListGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
@@ -46,6 +95,9 @@
key="free-text-search",
visible=False,
filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "View tool id guid map", dict( controller='admin_toolshed', action='browse_tool_id_guid_map' ) )
+ ]
operations = [ grids.GridOperation( "Get updates",
allow_multiple=False,
condition=( lambda item: not item.deleted ),
@@ -62,9 +114,14 @@
class AdminToolshed( AdminGalaxy ):
repository_list_grid = RepositoryListGrid()
+ tool_id_guid_map_grid = ToolIdGuidMapGrid()
@web.expose
@web.require_admin
+ def browse_tool_id_guid_map( self, trans, **kwd ):
+ return self.tool_id_guid_map_grid( trans, **kwd )
+ @web.expose
+ @web.require_admin
def browse_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -272,7 +329,7 @@
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_repository( trans, kwd[ 'id' ] )
- tool_shed_url = get_url_from_repository_tool_shed( trans, repository )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
( tool_shed_url, url_for( '', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
return trans.response.send_redirect( url )
@@ -296,30 +353,14 @@
current_working_dir = os.getcwd()
relative_install_dir = self.__get_relative_install_dir( trans, repository )
if relative_install_dir:
- # Update the cloned repository to changeset_revision.
repo_files_dir = os.path.join( relative_install_dir, name )
- log.debug( "Updating cloned repository named '%s' from revision '%s' to revision '%s'..." % \
- ( name, changeset_revision, latest_changeset_revision ) )
- cmd = 'hg pull'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
+ returncode, tmp_name = pull_repository( current_working_dir, repo_files_dir, name )
if returncode == 0:
- cmd = 'hg update -r %s' % latest_changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
+ returncode, tmp_name = update_repository( current_working_dir, repo_files_dir, latest_changeset_revision )
if returncode == 0:
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
+ repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
@@ -370,7 +411,7 @@
def __get_relative_install_dir( self, trans, repository ):
# Get the directory where the repository is install.
tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.changeset_revision )
+ partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
shed_tool_confs = trans.app.toolbox.shed_tool_confs
relative_install_dir = None
@@ -396,7 +437,7 @@
return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision )
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
- tool_shed_url = get_url_from_repository_tool_shed( trans, repository )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return '%s/repos/%s/%s' % ( tool_shed_url, repository.owner, repository.name )
## ---- Utility methods -------------------------------------------------------
@@ -426,23 +467,3 @@
def get_repository( trans, id ):
"""Get a tool_shed_repository from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
-def get_repository_by_name_owner_changeset_revision( trans, name, owner, changeset_revision ):
- """Get a repository from the database via name owner and changeset_revision"""
- return trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( and_( trans.model.ToolShedRepository.table.c.name == name,
- trans.model.ToolShedRepository.table.c.owner == owner,
- trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
-def get_url_from_repository_tool_shed( trans, repository ):
- # The stored value of repository.tool_shed is something like:
- # toolshed.g2.bx.psu.edu
- # We need the URL to this tool shed, which is something like:
- # http://toolshed.g2.bx.psu.edu/
- for shed_name, shed_url in trans.app.tool_shed_registry.tool_sheds.items():
- if shed_url.find( repository.tool_shed ) >= 0:
- if shed_url.endswith( '/' ):
- shed_url = shed_url.rstrip( '/' )
- return shed_url
- # The tool shed from which the repository was originally
- # installed must no longer be configured in tool_sheds_conf.xml.
- return None
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -764,22 +764,32 @@
return trans.response.send_redirect( url )
@web.expose
def check_for_updates( self, trans, **kwd ):
+ # Handle a request from a local Galaxy instance. If the request originated with the
+ # Galaxy instances' UpdateManager, the value of 'webapp' will be 'update_manager'.
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- galaxy_url = kwd[ 'galaxy_url' ]
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
name = params.get( 'name', None )
owner = params.get( 'owner', None )
changeset_revision = params.get( 'changeset_revision', None )
webapp = params.get( 'webapp', 'community' )
- # Start building up the url to redirect back to the calling Galaxy instance.
- url = '%s/admin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '', qualified=True ) )
repository = get_repository_by_name_and_owner( trans, name, owner )
- url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( repository.name, repository.user.username, changeset_revision )
+ from_update_manager = webapp == 'update_manager'
+ if from_update_manager:
+ update = 'true'
+ no_update = 'false'
+ else:
+ # Start building up the url to redirect back to the calling Galaxy instance.
+ url = '%s/admin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '', qualified=True ) )
+ url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( repository.name, repository.user.username, changeset_revision )
if changeset_revision == repository.tip:
# If changeset_revision is the repository tip, then
# we know there are no additional updates for the tools.
+ if from_update_manager:
+ return no_update
url += repository.tip
else:
repository_metadata = get_repository_metadata_by_changeset_revision( trans,
@@ -788,6 +798,8 @@
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this
# repository, then we know there are no additional updates for the tools.
+ if from_update_manager:
+ return no_update
url += changeset_revision
else:
# The changeset_revision column in the repository_metadata table has been
@@ -836,15 +848,21 @@
if tool_guids == metadata_tool_guids:
# We've found the repository_metadata record whose changeset_revision
# value has been updated.
+ if from_update_manager:
+ return update
url += repository_metadata.changeset_revision
found = True
break
if not found:
# There must be a problem in the data, so we'll just send back the received changeset_revision.
log.debug( "Possible data corruption - updated repository_metadata cannot be found for repository id %d." % repository.id )
+ if from_update_manager:
+ return no_update
url += changeset_revision
else:
# There are not tools in the changeset_revision, so no tool updates are possible.
+ if from_update_manager:
+ return no_update
url += changeset_revision
return trans.response.send_redirect( url )
@web.expose
diff -r 4a39bc2094875a9878f07f27f7307976e05e8b87 -r 0c804033ae5a766f1d12a2030b73fe306c27f660 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -147,6 +147,13 @@
# if appropriate or use a different file name for the setting.
#install_tool_config_file = shed_tool_conf.xml
+# Enable automatic polling of relative tool sheds to see if any updates
+# are available for installed repositories. Ideally only one Galaxy
+# server process should be able to check for repository updates. The
+# setting for hours_between_check should be an integer between 1 and 24.
+#enable_tool_shed_check = False
+#hours_between_check = 12
+
# Directory where data used by tools is located, see the samples in that
# directory and the wiki for help:
# http://wiki.g2.bx.psu.edu/Admin/Data%20Integration
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4a39bc209487/
changeset: 4a39bc209487
user: dan
date: 2011-12-14 15:37:45
summary: Update EBI SRA tool's input action.
affected #: 1 file
diff -r 66a7957ed97403fb4e17c9c747c4624e1c1b2f3d -r 4a39bc2094875a9878f07f27f7307976e05e8b87 tools/data_source/ebi_sra.xml
--- a/tools/data_source/ebi_sra.xml
+++ b/tools/data_source/ebi_sra.xml
@@ -1,10 +1,10 @@
<?xml version="1.0"?>
-<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.0">
+<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.1"><description>ENA SRA</description><!-- This paython script imports the file into Galaxy --><command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
- <!-- The URL where Galaxy will forwars the user when this tool is accessed from the Get Data menu -->
- <inputs action="http://www.ebi.ac.uk/ena/" check_values="false" method="get">
+ <!-- The URL where Galaxy will forwards the user when this tool is accessed from the Get Data menu -->
+ <inputs action="http://www.ebi.ac.uk/ena/data/search" check_values="false" method="get"><display>go to EBI SRA server $GALAXY_URL</display></inputs><uihints minwidth="800"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Provide default for robots.txt being pulled from config.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/66a7957ed974/
changeset: 66a7957ed974
user: dannon
date: 2011-12-13 17:04:07
summary: Provide default for robots.txt being pulled from config.
affected #: 1 file
diff -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 -r 66a7957ed97403fb4e17c9c747c4624e1c1b2f3d lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -251,7 +251,7 @@
urlmap["/static/scripts"] = Static( conf.get( "static_scripts_dir" ), cache_time )
urlmap["/static/style"] = Static( conf.get( "static_style_dir" ), cache_time )
urlmap["/favicon.ico"] = Static( conf.get( "static_favicon_dir" ), cache_time )
- urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt" ), cache_time )
+ urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt", 'static/robots.txt'), cache_time )
# URL mapper becomes the root webapp
return urlmap
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Fix default static wrapping to work with favicon.ico and robots.txt (and other static files that may need to be mapped to / in the future)
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ad9a6d8afded/
changeset: ad9a6d8afded
user: dannon
date: 2011-12-13 16:42:38
summary: Fix default static wrapping to work with favicon.ico and robots.txt (and other static files that may need to be mapped to / in the future)
affected #: 3 files
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -251,6 +251,7 @@
urlmap["/static/scripts"] = Static( conf.get( "static_scripts_dir" ), cache_time )
urlmap["/static/style"] = Static( conf.get( "static_style_dir" ), cache_time )
urlmap["/favicon.ico"] = Static( conf.get( "static_favicon_dir" ), cache_time )
+ urlmap["/robots.txt"] = Static( conf.get( "static_robots_txt" ), cache_time )
# URL mapper becomes the root webapp
return urlmap
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 lib/galaxy/web/framework/middleware/static.py
--- a/lib/galaxy/web/framework/middleware/static.py
+++ b/lib/galaxy/web/framework/middleware/static.py
@@ -19,6 +19,12 @@
def __call__( self, environ, start_response ):
path_info = environ.get('PATH_INFO', '')
if not path_info:
+ #See if this is a static file hackishly mapped.
+ if os.path.exists(self.directory) and os.path.isfile(self.directory):
+ app = fileapp.FileApp(self.directory)
+ if self.cache_seconds:
+ app.cache_control( max_age = int( self.cache_seconds ) )
+ return app(environ, start_response)
return self.add_slash(environ, start_response)
if path_info == '/':
# @@: This should obviously be configurable
@@ -45,6 +51,6 @@
if self.cache_seconds:
app.cache_control( max_age = int( self.cache_seconds ) )
return app(environ, start_response)
-
+
def make_static( global_conf, document_root, cache_seconds=None ):
- return CacheableStaticURLParser( document_root, cache_seconds )
\ No newline at end of file
+ return CacheableStaticURLParser( document_root, cache_seconds )
diff -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 -r ad9a6d8afded63469e07cd5ef89098c9c6195f22 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -247,6 +247,7 @@
static_favicon_dir = %(here)s/static/favicon.ico
static_scripts_dir = %(here)s/static/scripts/
static_style_dir = %(here)s/static/june_2007_style/blue
+static_robots_txt = %(here)s/static/robots.txt
# -- Advanced proxy features
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Add EBI ENA SRA tool contributed by Iain Cleland.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d00337a396ff/
changeset: d00337a396ff
user: dan
date: 2011-12-13 15:45:49
summary: Add EBI ENA SRA tool contributed by Iain Cleland.
affected #: 3 files
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -5,6 +5,7 @@
<tool file="data_source/ucsc_tablebrowser.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" /><tool file="data_source/bx_browser.xml" />
+ <tool file="data_source/ebi_sra.xml"/><tool file="data_source/biomart.xml" /><tool file="data_source/gramene_mart.xml" /><tool file="data_source/flymine.xml" />
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tool_conf.xml.sample
--- a/tool_conf.xml.sample
+++ b/tool_conf.xml.sample
@@ -6,6 +6,7 @@
<tool file="data_source/ucsc_tablebrowser_test.xml" /><tool file="data_source/ucsc_tablebrowser_archaea.xml" /><tool file="data_source/bx_browser.xml" />
+ <tool file="data_source/ebi_sra.xml"/><tool file="data_source/microbial_import.xml" /><tool file="data_source/biomart.xml" /><tool file="data_source/biomart_test.xml" />
diff -r 7dd3a089101138a4796eb73a5f1391d2f436723e -r d00337a396ffd79e78ed38b9365e66d055b1cfc4 tools/data_source/ebi_sra.xml
--- /dev/null
+++ b/tools/data_source/ebi_sra.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<tool name="EBI SRA" id="ebi_sra_main" tool_type="data_source" version="1.0.0">
+ <description>ENA SRA</description>
+ <!-- This paython script imports the file into Galaxy -->
+ <command interpreter="python">data_source.py $output $__app__.config.output_size_limit</command>
+ <!-- The URL where Galaxy will forwars the user when this tool is accessed from the Get Data menu -->
+ <inputs action="http://www.ebi.ac.uk/ena/" check_values="false" method="get">
+ <display>go to EBI SRA server $GALAXY_URL</display>
+ </inputs>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="fastq"/>
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7dd3a0891011/
changeset: 7dd3a0891011
user: greg
date: 2011-12-13 15:44:50
summary: Enhance the tool shed repository installation process so that repository tools can be loaded into the tool panel outside of any sections. Enhance the install manager to use this enhancement. Create a new shed_util module that contains common mentods used between the install manager and the admin_toolshed controller, and modify these components to import these common methods. Significant code cleanup and miscellaneous bug fixes included as well.
affected #: 5 files
diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/tools/install_manager.py
--- a/lib/galaxy/tools/install_manager.py
+++ b/lib/galaxy/tools/install_manager.py
@@ -4,18 +4,7 @@
shed. Tools included in tool_shed_install.xml that have already been installed will not be
re-installed.
"""
-from galaxy import util
-from galaxy.tools import ToolSection
-from galaxy.tools.search import ToolBoxSearch
-from galaxy import model
-from galaxy.web.controllers.admin_toolshed import generate_metadata, generate_tool_panel_section, add_shed_tool_conf_entry, create_or_undelete_tool_shed_repository
-from galaxy.web.controllers.admin_toolshed import handle_missing_data_table_entry, handle_missing_index_file, handle_tool_dependencies
-from galaxy.model.orm import *
-import os, subprocess, tempfile, logging
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
-from elementtree.ElementTree import Element
+from galaxy.util.shed_util import *
log = logging.getLogger( __name__ )
@@ -39,192 +28,110 @@
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
- self.tool_shed = root.get( 'name' )
+ self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
log.debug( "Repositories will be installed from tool shed '%s' into configured tool_path location '%s'" % ( str( self.tool_shed ), str( self.tool_path ) ) )
self.repository_owner = 'devteam'
for elem in root:
- if elem.tag == 'tool':
- self.check_tool( elem )
+ if elem.tag == 'repository':
+ self.install_repository( elem )
elif elem.tag == 'section':
- self.check_section( elem )
- def check_tool( self, elem ):
- # TODO: write this method.
- pass
- def check_section( self, elem ):
+ self.install_section( elem )
+ def install_repository( self, elem, section_name='', section_id='' ):
+ # Install a single repository into the tool config. If outside of any sections, the entry looks something like:
+ # <repository name="cut_wrapper" description="Galaxy wrapper for the Cut tool" changeset_revision="f3ed6cfe6402">
+ # <tool id="Cut1" version="1.0.1" />
+ # </repository>
+ name = elem.get( 'name' )
+ description = elem.get( 'description' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ # Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
+ clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
+ if self.__isinstalled( elem, clone_dir ):
+ log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
+ else:
+ if section_name and section_id:
+ section_key = 'section_%s' % str( section_id )
+ if section_key in self.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in self.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % section_name )
+ tool_section = self.app.toolbox.tool_panel[ section_key ]
+ else:
+ # Appending a new section to self.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % section_name )
+ new_section_elem = Element( 'section' )
+ new_section_elem.attrib[ 'name' ] = section_name
+ new_section_elem.attrib[ 'id' ] = section_id
+ tool_section = ToolSection( new_section_elem )
+ self.app.toolbox.tool_panel[ section_key ] = tool_section
+ else:
+ tool_section = None
+ current_working_dir = os.getcwd()
+ tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
+ relative_install_dir = os.path.join( clone_dir, name )
+ returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
+ if returncode == 0:
+ returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
+ if returncode == 0:
+ metadata_dict = load_repository_contents( self.app,
+ name,
+ description,
+ self.repository_owner,
+ changeset_revision,
+ repository_clone_url,
+ self.install_tool_config,
+ self.tool_path,
+ tool_section,
+ relative_install_dir,
+ current_working_dir,
+ tmp_name )
+ # Add a new record to the tool_id_guid_map table for each
+ # tool in the repository if one doesn't already exist.
+ if 'tools' in metadata_dict:
+ tools_mapped = 0
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ guid = tool_dict[ 'guid' ]
+ tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tool_id_guid_map:
+ if tool_id_guid_map.guid != guid:
+ tool_id_guid_map.guid = guid
+ flush_needed = True
+ else:
+ tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
+ tool_version=tool_version,
+ tool_shed=self.tool_shed,
+ repository_owner=self.repository_owner,
+ repository_name=name,
+ guid=guid )
+ flush_needed = True
+ if flush_needed:
+ self.sa_session.add( tool_id_guid_map )
+ self.sa_session.flush()
+ tools_mapped += 1
+ log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ log.debug( "Error cloning repository '%s': %s" % ( name, tmp_stderr.read() ) )
+ tmp_stderr.close()
+ def install_section( self, elem ):
+ # Install 1 or more repositories into a section in the tool config. An entry looks something like:
+ # <section name="EMBOSS" id="EMBOSSLite">
+ # <repository name="emboss_5" description="Galaxy wrappers for EMBOSS version 5 tools" changeset_revision="bdd88ae5d0ac">
+ # <tool file="emboss_5/emboss_antigenic.xml" id="EMBOSS: antigenic1" version="5.0.0" />
+ # ...
+ # </repository>
+ # </section>
section_name = elem.get( 'name' )
section_id = elem.get( 'id' )
for repository_elem in elem:
- name = repository_elem.get( 'name' )
- description = repository_elem.get( 'description' )
- changeset_revision = repository_elem.get( 'changeset_revision' )
- installed = False
- for tool_elem in repository_elem:
- tool_config = tool_elem.get( 'file' )
- tool_id = tool_elem.get( 'id' )
- tool_version = tool_elem.get( 'version' )
- tigm = self.__get_tool_id_guid_map_by_id_version( tool_id, tool_version )
- if tigm:
- # A record exists in the tool_id_guid_map
- # table, so see if the tool is still installed.
- install_path = self.__generate_install_path( tigm )
- if os.path.exists( install_path ):
- message = "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % \
- ( name, install_path )
- log.debug( message )
- installed = True
- break
- if not installed:
- log.debug( "Installing repository '%s' from tool shed '%s'" % ( name, self.tool_shed ) )
- current_working_dir = os.getcwd()
- tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
- repository_clone_url = '%s/repos/devteam/%s' % ( tool_shed_url, name )
- # Install path is of the form: <tool path><tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos/devteam', name, changeset_revision )
- if not os.path.isdir( clone_dir ):
- os.makedirs( clone_dir )
- log.debug( 'Cloning %s...' % repository_clone_url )
- cmd = 'hg clone %s' % repository_clone_url
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( clone_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- if returncode == 0:
- # Update the cloned repository to changeset_revision. It is imperative that the
- # installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- relative_install_dir = os.path.join( clone_dir, name )
- log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision )
- cmd = 'hg update -r %s' % changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( relative_install_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- if returncode == 0:
- # Generate the metadata for the installed tool shed repository. It is imperative that
- # the installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- metadata_dict = generate_metadata( self.app.toolbox, relative_install_dir, repository_clone_url )
- if 'datatypes_config' in metadata_dict:
- datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
- # Load data types required by tools.
- self.__load_datatypes( trans, datatypes_config, relative_install_dir )
- if 'tools' in metadata_dict:
- repository_tools_tups = []
- for tool_dict in metadata_dict[ 'tools' ]:
- relative_path = tool_dict[ 'tool_config' ]
- guid = tool_dict[ 'guid' ]
- tool = self.app.toolbox.load_tool( os.path.abspath( relative_path ) )
- repository_tools_tups.append( ( relative_path, guid, tool ) )
- if repository_tools_tups:
- sample_files = metadata_dict.get( 'sample_files', [] )
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app, self.tool_path, sample_files, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
- # Handle tools that use fabric scripts to install dependencies.
- handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
- section_key = 'section_%s' % str( section_id )
- if section_key in self.app.toolbox.tool_panel:
- # Appending a tool to an existing section in self.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % section_name )
- tool_section = self.app.toolbox.tool_panel[ section_key ]
- else:
- # Appending a new section to self.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % section_name )
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = section_name
- elem.attrib[ 'id' ] = section_id
- tool_section = ToolSection( elem )
- self.app.toolbox.tool_panel[ section_key ] = tool_section
- # Generate an in-memory tool conf section that includes the new tools.
- new_tool_section = generate_tool_panel_section( name,
- repository_clone_url,
- changeset_revision,
- tool_section,
- repository_tools_tups,
- owner=self.repository_owner )
- # Create a temporary file to persist the in-memory tool section
- # TODO: Figure out how to do this in-memory using xml.etree.
- tmp_name = tempfile.NamedTemporaryFile().name
- persisted_new_tool_section = open( tmp_name, 'wb' )
- persisted_new_tool_section.write( new_tool_section )
- persisted_new_tool_section.close()
- # Parse the persisted tool panel section
- tree = util.parse_xml( tmp_name )
- root = tree.getroot()
- # Load the tools in the section into the tool panel.
- self.app.toolbox.load_section_tag_set( root, self.app.toolbox.tool_panel, self.tool_path )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
- # Append the new section to the shed_tool_config file.
- add_shed_tool_conf_entry( self.app, self.install_tool_config, new_tool_section )
- if self.app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools.
- self.app.toolbox_search = ToolBoxSearch( self.app.toolbox )
- # Add a new record to the tool_shed_repository table if one doesn't
- # already exist. If one exists but is marked deleted, undelete it.
- log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name )
- create_or_undelete_tool_shed_repository( self.app,
- name,
- description,
- changeset_revision,
- repository_clone_url,
- metadata_dict,
- owner=self.repository_owner )
- # Add a new record to the tool_id_guid_map table for each
- # tool in the repository if one doesn't already exist.
- if 'tools' in metadata_dict:
- tools_mapped = 0
- for tool_dict in metadata_dict[ 'tools' ]:
- tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- guid = tool_dict[ 'guid' ]
- tool_id_guid_map = model.ToolIdGuidMap( tool_id=tool_id,
- tool_version=tool_version,
- tool_shed=self.tool_shed,
- repository_owner=self.repository_owner,
- repository_name=name,
- guid=guid )
- self.sa_session.add( tool_id_guid_map )
- self.sa_session.flush()
- tools_mapped += 1
- log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
- def __generate_install_path( self, tool_id_guid_map ):
- """
- Generate a tool path in which a tool is or will be installed. The tool path will be of the form:
- <tool shed>/repos/<repository owner>/<repository name>/<changeset revision>
- """
- tool_shed = tool_id_guid_map.tool_shed
- repository_name = tool_id_guid_map.repository_name
- tool_shed_repository = self.__get_repository_by_tool_shed_name_owner( tool_shed, repository_name, self.repository_owner )
- changeset_revision = tool_shed_repository.changeset_revision
- return '%s/repos%s/%s/%s/%s' % ( tool_shed, self.repository_owner, repository_name, changeset_revision )
- def __get_repository_by_tool_shed_name_owner( tool_shed, name, owner ):
- """Get a repository from the database via tool_shed, name and owner."""
- # CRITICAL: this assumes that a single changeset_revision exists for each repository
- # in the tool shed. In other words, if a repository has multiple changset_revisions
- # there will be problems. We're probably safe here because only a single changeset_revision
- # for each tool shed repository will be installed using this installation process.
- return self.sa_session.query( self.app.model.ToolShedRepository ) \
- .filter( and_( self.app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- self.app.model.ToolShedRepository.table.c.name == name,
- self.app.model.ToolShedRepository.table.c.owner == owner ) ) \
- .first()
- def __get_tool_id_guid_map_by_id_version( self, tool_id, tool_version ):
- """Get a tool_id_guid_map from the database via tool_id and tool_version."""
- return self.sa_session.query( self.app.model.ToolIdGuidMap ) \
- .filter( and_( self.app.model.ToolIdGuidMap.table.c.tool_id == tool_id,
- self.app.model.ToolIdGuidMap.table.c.tool_version == tool_version ) ) \
- .first()
+ self.install_repository( repository_elem, section_name=section_name, section_id=section_id )
def __get_url_from_tool_shed( self, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu
# We need the URL to this tool shed, which is something like:
@@ -237,3 +144,17 @@
# The tool shed from which the repository was originally
# installed must no longer be configured in tool_sheds_conf.xml.
return None
+ def __isinstalled( self, repository_elem, clone_dir ):
+ name = repository_elem.get( 'name' )
+ installed = False
+ for tool_elem in repository_elem:
+ tool_config = tool_elem.get( 'file' )
+ tool_id = tool_elem.get( 'id' )
+ tool_version = tool_elem.get( 'version' )
+ tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
+ if tigm:
+ # A record exists in the tool_id_guid_map table, so see if the repository is installed.
+ if os.path.exists( clone_dir ):
+ installed = True
+ break
+ return installed
diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/util/shed_util.py
--- /dev/null
+++ b/lib/galaxy/util/shed_util.py
@@ -0,0 +1,548 @@
+import os, tempfile, shutil, subprocess, logging
+from datetime import date, datetime, timedelta
+from time import strftime
+from galaxy import util
+from galaxy.tools import ToolSection
+from galaxy.tools.search import ToolBoxSearch
+from galaxy.model.orm import *
+
+pkg_resources.require( 'elementtree' )
+from elementtree import ElementTree, ElementInclude
+from elementtree.ElementTree import Element, SubElement, tostring
+
+log = logging.getLogger( __name__ )
+
+def add_shed_tool_conf_entry( app, shed_tool_conf, tool_panel_entry ):
+ """
+ Add an entry in the shed_tool_conf file. An entry looks something like:
+ <section name="Filter and Sort" id="filter">
+ <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/>
+ </section>
+ This method is used by the InstallManager, which does not have access to trans.
+ """
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ if not os.path.exists( shed_tool_conf ):
+ output = open( shed_tool_conf, 'w' )
+ output.write( '<?xml version="1.0"?>\n' )
+ output.write( '<toolbox tool_path="%s">\n' % tool_path )
+ output.write( '</toolbox>\n' )
+ output.close()
+ # Make a backup of the shed_tool_conf file.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ shed_tool_conf_copy = '%s/%s_%s_backup' % ( app.config.root, shed_tool_conf, backup_date )
+ shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_shed_tool_conf = open( tmp_fname, 'wb' )
+ for i, line in enumerate( open( shed_tool_conf ) ):
+ if line.startswith( '</toolbox>' ):
+ # We're at the end of the original config file, so add our entry.
+ new_shed_tool_conf.write( ' ' )
+ new_shed_tool_conf.write( tostring( pretty_print_xml( tool_panel_entry ) ) )
+ new_shed_tool_conf.write( line )
+ else:
+ new_shed_tool_conf.write( line )
+ new_shed_tool_conf.close()
+ shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) )
+def clean_repository_clone_url( repository_clone_url ):
+ if repository_clone_url.find( '@' ) > 0:
+ # We have an url that includes an authenticated user, something like:
+ # http://test@bx.psu.edu:9009/repos/some_username/column
+ items = repository_clone_url.split( '@' )
+ tmp_url = items[ 1 ]
+ elif repository_clone_url.find( '//' ) > 0:
+ # We have an url that includes only a protocol, something like:
+ # http://bx.psu.edu:9009/repos/some_username/column
+ items = repository_clone_url.split( '//' )
+ tmp_url = items[ 1 ]
+ else:
+ tmp_url = repository_clone_url
+ return tmp_url
+def clean_tool_shed_url( tool_shed_url ):
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ return tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
+def clone_repository( name, clone_dir, current_working_dir, repository_clone_url ):
+ log.debug( "Installing repository '%s'" % name )
+ os.makedirs( clone_dir )
+ log.debug( 'Cloning %s' % repository_clone_url )
+ cmd = 'hg clone %s' % repository_clone_url
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stderr = open( tmp_name, 'wb' )
+ os.chdir( clone_dir )
+ proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stderr.close()
+ return returncode, tmp_name
+def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
+ # This method is used by the InstallManager, which does not have access to trans.
+ sa_session = app.model.context.current
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
+ if not owner:
+ owner = get_repository_owner( tmp_url )
+ includes_datatypes = 'datatypes_config' in metadata_dict
+ flush_needed = False
+ tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
+ if tool_shed_repository:
+ if tool_shed_repository.deleted:
+ tool_shed_repository.deleted = False
+ # Reset includes_datatypes in case metadata changed since last installed.
+ tool_shed_repository.includes_datatypes = includes_datatypes
+ flush_needed = True
+ else:
+ tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
+ name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ includes_datatypes=includes_datatypes )
+ flush_needed = True
+ if flush_needed:
+ sa_session.add( tool_shed_repository )
+ sa_session.flush()
+def generate_datatypes_metadata( datatypes_config, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received datatypes_config. This method is used by the InstallManager,
+ which does not have access to trans.
+ """
+ # Parse datatypes_config.
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ repository_datatype_code_files = []
+ datatype_files = root.find( 'datatype_files' )
+ if datatype_files:
+ for elem in datatype_files.findall( 'datatype_file' ):
+ name = elem.get( 'name', None )
+ repository_datatype_code_files.append( name )
+ metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
+ datatypes = []
+ registration = root.find( 'registration' )
+ if registration:
+ for elem in registration.findall( 'datatype' ):
+ extension = elem.get( 'extension', None )
+ dtype = elem.get( 'type', None )
+ mimetype = elem.get( 'mimetype', None )
+ datatypes.append( dict( extension=extension,
+ dtype=dtype,
+ mimetype=mimetype ) )
+ metadata_dict[ 'datatypes' ] = datatypes
+ return metadata_dict
+def generate_metadata( toolbox, relative_install_dir, repository_clone_url ):
+ """
+ Browse the repository files on disk to generate metadata. Since we are using disk files, it
+ is imperative that the repository is updated to the desired change set revision before metadata
+ is generated. This method is used by the InstallManager, which does not have access to trans.
+ """
+ metadata_dict = {}
+ sample_files = []
+ datatypes_config = None
+ # Find datatypes_conf.xml if it exists.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ relative_path = os.path.join( root, name )
+ datatypes_config = os.path.abspath( relative_path )
+ break
+ if datatypes_config:
+ metadata_dict[ 'datatypes_config' ] = relative_path
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ # Find all special .sample files.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name.endswith( '.sample' ):
+ sample_files.append( os.path.join( root, name ) )
+ if sample_files:
+ metadata_dict[ 'sample_files' ] = sample_files
+ # Find all tool configs and exported workflows.
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # Find all tool configs.
+ if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ try:
+ tool = toolbox.load_tool( full_path )
+ except Exception, e:
+ tool = None
+ if tool is not None:
+ tool_config = os.path.join( root, name )
+ metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ return metadata_dict
+def generate_tool_guid( repository_clone_url, tool ):
+ """
+ Generate a guid for the installed tool. It is critical that this guid matches the guid for
+ the tool in the Galaxy tool shed from which it is being installed. The form of the guid is
+ <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version>
+ """
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
+def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been
+ applied to the received tool. This method is used by the InstallManager,
+ which does not have access to trans.
+ """
+ # Generate the guid
+ guid = generate_tool_guid( repository_clone_url, tool )
+ # Handle tool.requirements.
+ tool_requirements = []
+ for tr in tool.requirements:
+ name=tr.name
+ type=tr.type
+ if type == 'fabfile':
+ version = None
+ fabfile = tr.fabfile
+ method = tr.method
+ else:
+ version = tr.version
+ fabfile = None
+ method = None
+ requirement_dict = dict( name=name,
+ type=type,
+ version=version,
+ fabfile=fabfile,
+ method=method )
+ tool_requirements.append( requirement_dict )
+ # Handle tool.tests.
+ tool_tests = []
+ if tool.tests:
+ for ttb in tool.tests:
+ test_dict = dict( name=ttb.name,
+ required_files=ttb.required_files,
+ inputs=ttb.inputs,
+ outputs=ttb.outputs )
+ tool_tests.append( test_dict )
+ tool_dict = dict( id=tool.id,
+ guid=guid,
+ name=tool.name,
+ version=tool.version,
+ description=tool.description,
+ version_string_cmd = tool.version_string_cmd,
+ tool_config=tool_config,
+ requirements=tool_requirements,
+ tests=tool_tests )
+ if 'tools' in metadata_dict:
+ metadata_dict[ 'tools' ].append( tool_dict )
+ else:
+ metadata_dict[ 'tools' ] = [ tool_dict ]
+ return metadata_dict
+def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, repository_tools_tups, tool_section=None, owner='' ):
+ """Generate a list of ElementTree Element objects for each section or list of tools."""
+ elem_list = []
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ if not owner:
+ owner = get_repository_owner( tmp_url )
+ if tool_section:
+ root_elem = Element( 'section' )
+ root_elem.attrib[ 'name' ] = tool_section.name
+ root_elem.attrib[ 'id' ] = tool_section.id
+ for repository_tool_tup in repository_tools_tups:
+ tool_file_path, guid, tool = repository_tool_tup
+ if tool_section:
+ tool_elem = SubElement( root_elem, 'tool' )
+ else:
+ tool_elem = Element( 'tool' )
+ tool_elem.attrib[ 'file' ] = tool_file_path
+ tool_elem.attrib[ 'guid' ] = guid
+ tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem.text = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
+ repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = SubElement( tool_elem, 'changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ id_elem = SubElement( tool_elem, 'id' )
+ id_elem.text = tool.id
+ version_elem = SubElement( tool_elem, 'version' )
+ version_elem.text = tool.version
+ if tool_section:
+ elem_list.append( root_elem )
+ else:
+ elem_list.append( tool_elem )
+ return elem_list
+def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received exported_workflow_dict. Store everything in the database.
+ This method is used by the InstallManager, which does not have access to trans.
+ """
+ if 'workflows' in metadata_dict:
+ metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) )
+ else:
+ metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
+ return metadata_dict
+def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
+ # This method is used by the InstallManager, which does not have access to trans.
+ sa_session = app.model.context.current
+ if tool_shed.find( '//' ) > 0:
+ tool_shed = tool_shed.split( '//' )[1]
+ return sa_session.query( app.model.ToolShedRepository ) \
+ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.model.ToolShedRepository.table.c.name == name,
+ app.model.ToolShedRepository.table.c.owner == owner,
+ app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+def get_repository_owner( cleaned_repository_url ):
+ items = cleaned_repository_url.split( 'repos' )
+ repo_path = items[ 1 ]
+ if repo_path.startswith( '/' ):
+ repo_path = repo_path.replace( '/', '', 1 )
+ return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
+def get_tool_id_guid_map( app, tool_id, version, tool_shed, repository_owner, repository_name ):
+ # This method is used by the InstallManager, which does not have access to trans.
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.ToolIdGuidMap ) \
+ .filter( and_( app.model.ToolIdGuidMap.table.c.tool_id == tool_id,
+ app.model.ToolIdGuidMap.table.c.tool_version == version,
+ app.model.ToolIdGuidMap.table.c.tool_shed == tool_shed,
+ app.model.ToolIdGuidMap.table.c.repository_owner == repository_owner,
+ app.model.ToolIdGuidMap.table.c.repository_name == repository_name ) ) \
+ .first()
+def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
+ """
+ Inspect each tool to see if any have input parameters that are dynamically
+ generated select lists that require entries in the tool_data_table_conf.xml file.
+ This method is used by the InstallManager, which does not have access to trans.
+ """
+ missing_data_table_entry = False
+ for index, repository_tools_tup in enumerate( repository_tools_tups ):
+ tup_path, guid, repository_tool = repository_tools_tup
+ if repository_tool.params_with_missing_data_table_entry:
+ missing_data_table_entry = True
+ break
+ if missing_data_table_entry:
+ # The repository must contain a tool_data_table_conf.xml.sample file that includes
+ # all required entries for all tools in the repository.
+ for sample_file in sample_files:
+ head, tail = os.path.split( sample_file )
+ if tail == 'tool_data_table_conf.xml.sample':
+ break
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file )
+ if error:
+ # TODO: Do more here than logging an exception.
+ log.debug( exception_msg )
+ # Reload the tool into the local list of repository_tools_tups.
+ repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) )
+ repository_tools_tups[ index ] = ( tup_path, repository_tool )
+ return repository_tools_tups
+def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ):
+ """
+ Inspect each tool to see if it has any input parameters that
+ are dynamically generated select lists that depend on a .loc file.
+ This method is used by the InstallManager, which does not have access to trans.
+ """
+ missing_files_handled = []
+ for index, repository_tools_tup in enumerate( repository_tools_tups ):
+ tup_path, guid, repository_tool = repository_tools_tup
+ params_with_missing_index_file = repository_tool.params_with_missing_index_file
+ for param in params_with_missing_index_file:
+ options = param.options
+ missing_head, missing_tail = os.path.split( options.missing_index_file )
+ if missing_tail not in missing_files_handled:
+ # The repository must contain the required xxx.loc.sample file.
+ for sample_file in sample_files:
+ sample_head, sample_tail = os.path.split( sample_file )
+ if sample_tail == '%s.sample' % missing_tail:
+ copy_sample_loc_file( app, sample_file )
+ if options.tool_data_table and options.tool_data_table.missing_index_file:
+ options.tool_data_table.handle_found_index_file( options.missing_index_file )
+ missing_files_handled.append( missing_tail )
+ break
+ # Reload the tool into the local list of repository_tools_tups.
+ repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) )
+ repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
+ return repository_tools_tups
+def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ):
+ """
+ Inspect each tool to see if it includes a "requirement" that refers to a fabric
+ script. For those that do, execute the fabric script to install tool dependencies.
+ This method is used by the InstallManager, which does not have access to trans.
+ """
+ for index, repository_tools_tup in enumerate( repository_tools_tups ):
+ tup_path, guid, repository_tool = repository_tools_tup
+ for requirement in repository_tool.requirements:
+ if requirement.type == 'fabfile':
+ log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name )
+ fabfile = requirement.fabfile
+ method = requirement.method
+ # Find the relative path to the fabfile.
+ relative_fabfile_path = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ for name in files:
+ if name == fabfile:
+ relative_fabfile_path = os.path.join( root, name )
+ break
+ if relative_fabfile_path:
+ # cmd will look something like: fab -f fabfile.py install_bowtie
+ cmd = 'fab -f %s %s' % ( relative_fabfile_path, method )
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stderr = open( tmp_name, 'wb' )
+ os.chdir( repo_files_dir )
+ proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stderr.close()
+ if returncode != 0:
+ # TODO: do something more here than logging the problem.
+ tmp_stderr = open( tmp_name, 'rb' )
+ error = tmp_stderr.read()
+ tmp_stderr.close()
+ log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
+def load_datatypes( app, datatypes_config, relative_intall_dir ):
+ # This method is used by the InstallManager, which does not have access to trans.
+ imported_module = None
+ # Parse datatypes_config.
+ tree = parse_xml( datatypes_config )
+ datatypes_config_root = tree.getroot()
+ relative_path_to_datatype_file_name = None
+ datatype_files = datatypes_config_root.find( 'datatype_files' )
+ # Currently only a single datatype_file is supported. For example:
+ # <datatype_files>
+ # <datatype_file name="gmap.py"/>
+ # </datatype_files>
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ for root, dirs, files in os.walk( relative_intall_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ relative_path_to_datatype_file_name = os.path.join( root, name )
+ break
+ break
+ if relative_path_to_datatype_file_name:
+ relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
+ registration = datatypes_config_root.find( 'registration' )
+ # Get the module by parsing the <datatype> tag.
+ for elem in registration.findall( 'datatype' ):
+ # A 'type' attribute is currently required. The attribute
+ # should be something like: type="gmap:GmapDB".
+ dtype = elem.get( 'type', None )
+ if dtype:
+ fields = dtype.split( ':' )
+ datatype_module = fields[0]
+ datatype_class_name = fields[1]
+ # Since we currently support only a single datatype_file,
+ # we have what we need.
+ break
+ try:
+ sys.path.insert( 0, relative_head )
+ imported_module = __import__( datatype_module )
+ sys.path.pop( 0 )
+ except Exception, e:
+ log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
+ app.datatypes_registry.load_datatypes( root_dir=app.config.root, config=datatypes_config, imported_module=imported_module )
+def load_repository_contents( app, name, description, owner, changeset_revision, repository_clone_url, shed_tool_conf,
+ tool_path, tool_section, relative_install_dir, current_working_dir, tmp_name ):
+ # This method is used by the InstallManager, which does not have access to trans.
+ # Generate the metadata for the installed tool shed repository. It is imperative that
+ # the installed repository is updated to the desired changeset_revision before metadata
+ # is set because the process for setting metadata uses the repository files on disk.
+ metadata_dict = generate_metadata( app.toolbox, relative_install_dir, repository_clone_url )
+ if 'datatypes_config' in metadata_dict:
+ datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
+ # Load data types required by tools.
+ load_datatypes( app, datatypes_config, relative_install_dir )
+ if 'tools' in metadata_dict:
+ repository_tools_tups = []
+ for tool_dict in metadata_dict[ 'tools' ]:
+ relative_path = tool_dict[ 'tool_config' ]
+ guid = tool_dict[ 'guid' ]
+ tool = app.toolbox.load_tool( os.path.abspath( relative_path ) )
+ repository_tools_tups.append( ( relative_path, guid, tool ) )
+ if repository_tools_tups:
+ sample_files = metadata_dict.get( 'sample_files', [] )
+ # Handle missing data table entries for tool parameters that are dynamically generated select lists.
+ repository_tools_tups = handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups )
+ # Handle missing index files for tool parameters that are dynamically generated select lists.
+ repository_tools_tups = handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups )
+ # Handle tools that use fabric scripts to install dependencies.
+ handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
+ # Generate a new entry for the tool config.
+ elem_list = generate_tool_panel_elem_list( name,
+ repository_clone_url,
+ changeset_revision,
+ repository_tools_tups,
+ tool_section=tool_section,
+ owner=owner )
+ if tool_section:
+ for section_elem in elem_list:
+ # Load the section into the tool panel.
+ app.toolbox.load_section_tag_set( section_elem, app.toolbox.tool_panel, tool_path )
+ else:
+ # Load the tools into the tool panel outside of any sections.
+ for tool_elem in elem_list:
+ guid = tool_elem.get( 'guid' )
+ app.toolbox.load_tool_tag_set( tool_elem, app.toolbox.tool_panel, tool_path=tool_path, guid=guid )
+ # Remove the temporary file
+ try:
+ os.unlink( tmp_name )
+ except:
+ pass
+ for elem_entry in elem_list:
+ # Append the new entry (either section or list of tools) to the shed_tool_config file.
+ add_shed_tool_conf_entry( app, shed_tool_conf, elem_entry )
+ if app.toolbox_search.enabled:
+ # If search support for tools is enabled, index the new installed tools.
+ app.toolbox_search = ToolBoxSearch( app.toolbox )
+ # Add a new record to the tool_shed_repository table if one doesn't
+ # already exist. If one exists but is marked deleted, undelete it.
+ log.debug( "Adding new row to tool_shed_repository table for repository '%s'" % name )
+ create_or_undelete_tool_shed_repository( app,
+ name,
+ description,
+ changeset_revision,
+ repository_clone_url,
+ metadata_dict )
+ return metadata_dict
+def pretty_print_xml( elem, level=0 ):
+ pad = ' '
+ i = "\n" + level * pad
+ if len( elem ):
+ if not elem.text or not elem.text.strip():
+ elem.text = i + pad + pad
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ for e in elem:
+ pretty_print_xml( e, level + 1 )
+ if not elem.tail or not elem.tail.strip():
+ elem.tail = i
+ else:
+ if level and ( not elem.tail or not elem.tail.strip() ):
+ elem.tail = i + pad
+ return elem
+def update_repository( current_working_dir, relative_install_dir, changeset_revision ):
+ # Update the cloned repository to changeset_revision. It is imperative that the
+ # installed repository is updated to the desired changeset_revision before metadata
+ # is set because the process for setting metadata uses the repository files on disk.
+ log.debug( 'Updating cloned repository to revision "%s"' % changeset_revision )
+ cmd = 'hg update -r %s' % changeset_revision
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stderr = open( tmp_name, 'wb' )
+ os.chdir( relative_install_dir )
+ proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stderr.close()
+ return returncode, tmp_name
diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -24,7 +24,7 @@
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree, ElementInclude
-from elementtree.ElementTree import Element
+from elementtree.ElementTree import Element, SubElement, tostring
log = logging.getLogger( __name__ )
diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -1,5 +1,5 @@
from galaxy.web.controllers.admin import *
-import logging
+from galaxy.util.shed_util import *
log = logging.getLogger( __name__ )
@@ -159,131 +159,68 @@
else:
section_key = 'section_%s' % tool_panel_section
tool_section = trans.app.toolbox.tool_panel[ section_key ]
- # Decode the encoded repo_info_dict param value.
- repo_info_dict = tool_shed_decode( repo_info_dict )
- # Clone the repository to the configured location.
- current_working_dir = os.getcwd()
- installed_repository_names = []
- for name, repo_info_tuple in repo_info_dict.items():
- metadata_dict = None
- description, repository_clone_url, changeset_revision = repo_info_tuple
- clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) )
- if os.path.exists( clone_dir ):
- # Repository and revision has already been cloned.
- # TODO: implement the ability to re-install or revert an existing repository.
- message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name )
- else:
- os.makedirs( clone_dir )
- log.debug( 'Cloning %s...' % repository_clone_url )
- cmd = 'hg clone %s' % repository_clone_url
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( clone_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
+ else:
+ tool_section = None
+ # Decode the encoded repo_info_dict param value.
+ repo_info_dict = tool_shed_decode( repo_info_dict )
+ # Clone the repository to the configured location.
+ current_working_dir = os.getcwd()
+ installed_repository_names = []
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision = repo_info_tuple
+ clone_dir = os.path.join( tool_path, self.__generate_tool_path( repository_clone_url, changeset_revision ) )
+ relative_install_dir = os.path.join( clone_dir, name )
+ if os.path.exists( clone_dir ):
+ # Repository and revision has already been cloned.
+ # TODO: implement the ability to re-install or revert an existing repository.
+ message += 'Revision <b>%s</b> of repository <b>%s</b> was previously installed.<br/>' % ( changeset_revision, name )
+ else:
+ returncode, tmp_name = clone_repository( name, clone_dir, current_working_dir, repository_clone_url )
+ if returncode == 0:
+ returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
- # Update the cloned repository to changeset_revision. It is imperative that the
- # installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- relative_install_dir = os.path.join( clone_dir, name )
- log.debug( 'Updating cloned repository to revision "%s"...' % changeset_revision )
- cmd = 'hg update -r %s' % changeset_revision
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( relative_install_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- if returncode == 0:
- # Generate the metadata for the installed tool shed repository. It is imperative that
- # the installed repository is updated to the desired changeset_revision before metadata
- # is set because the process for setting metadata uses the repository files on disk.
- metadata_dict = generate_metadata( trans.app.toolbox, relative_install_dir, repository_clone_url )
- if 'datatypes_config' in metadata_dict:
- datatypes_config = os.path.abspath( metadata_dict[ 'datatypes_config' ] )
- # Load data types required by tools.
- self.__load_datatypes( trans, datatypes_config, relative_install_dir )
- if 'tools' in metadata_dict:
- repository_tools_tups = []
- for tool_dict in metadata_dict[ 'tools' ]:
- relative_path = tool_dict[ 'tool_config' ]
- guid = tool_dict[ 'guid' ]
- tool = trans.app.toolbox.load_tool( os.path.abspath( relative_path ) )
- repository_tools_tups.append( ( relative_path, guid, tool ) )
- if repository_tools_tups:
- sample_files = metadata_dict.get( 'sample_files', [] )
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( trans.app, tool_path, sample_files, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
- # Handle tools that use fabric scripts to install dependencies.
- handle_tool_dependencies( current_working_dir, relative_install_dir, repository_tools_tups )
- # Generate an in-memory tool conf section that includes the new tools.
- new_tool_section = generate_tool_panel_section( name,
- repository_clone_url,
- changeset_revision,
- tool_section,
- repository_tools_tups )
- # Create a temporary file to persist the in-memory tool section
- # TODO: Figure out how to do this in-memory using xml.etree.
- tmp_name = tempfile.NamedTemporaryFile().name
- persisted_new_tool_section = open( tmp_name, 'wb' )
- persisted_new_tool_section.write( new_tool_section )
- persisted_new_tool_section.close()
- # Parse the persisted tool panel section
- tree = parse_xml( tmp_name )
- root = tree.getroot()
- # Load the tools in the section into the tool panel.
- trans.app.toolbox.load_section_tag_set( root, trans.app.toolbox.tool_panel, tool_path )
- # Remove the temporary file
- try:
- os.unlink( tmp_name )
- except:
- pass
- # Append the new section to the shed_tool_config file.
- add_shed_tool_conf_entry( trans.app, shed_tool_conf, new_tool_section )
- if trans.app.toolbox_search.enabled:
- # If search support for tools is enabled, index the new installed tools.
- trans.app.toolbox_search = ToolBoxSearch( trans.app.toolbox )
- # Add a new record to the tool_shed_repository table if one doesn't
- # already exist. If one exists but is marked deleted, undelete it.
- create_or_undelete_tool_shed_repository( trans.app,
- name,
- description,
- changeset_revision,
- repository_clone_url,
- metadata_dict )
- installed_repository_names.append( name )
- else:
- tmp_stderr = open( tmp_name, 'rb' )
- message += '%s<br/>' % tmp_stderr.read()
- tmp_stderr.close()
- status = 'error'
+ owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
+ metadata_dict = load_repository_contents( trans.app,
+ name,
+ description,
+ owner,
+ changeset_revision,
+ repository_clone_url,
+ shed_tool_conf,
+ tool_path,
+ tool_section,
+ relative_install_dir,
+ current_working_dir,
+ tmp_name )
+ installed_repository_names.append( name )
else:
tmp_stderr = open( tmp_name, 'rb' )
message += '%s<br/>' % tmp_stderr.read()
tmp_stderr.close()
status = 'error'
- if installed_repository_names:
- installed_repository_names.sort()
- num_repositories_installed = len( installed_repository_names )
+ else:
+ tmp_stderr = open( tmp_name, 'rb' )
+ message += '%s<br/>' % tmp_stderr.read()
+ tmp_stderr.close()
+ status = 'error'
+ if installed_repository_names:
+ installed_repository_names.sort()
+ num_repositories_installed = len( installed_repository_names )
+ if tool_section:
message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ), tool_section.name )
- for i, repo_name in enumerate( installed_repository_names ):
- if i == len( installed_repository_names ) -1:
- message += '%s.<br/>' % repo_name
- else:
- message += '%s, ' % repo_name
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- message=message,
- status=status ) )
- else:
- message = 'Choose the section in your tool panel to contain the installed tools.'
- status = 'error'
+ else:
+ message += 'Installed %d %s and all tools were loaded into the tool panel outside of any sections.<br/>Installed repositories: ' % \
+ ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ) )
+ for i, repo_name in enumerate( installed_repository_names ):
+ if i == len( installed_repository_names ) -1:
+ message += '%s.<br/>' % repo_name
+ else:
+ message += '%s, ' % repo_name
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
if len( trans.app.toolbox.shed_tool_confs.keys() ) > 1:
shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
shed_tool_conf = None
@@ -432,7 +369,7 @@
status=status )
def __get_relative_install_dir( self, trans, repository ):
# Get the directory where the repository is install.
- tool_shed = self.__clean_tool_shed_url( repository.tool_shed )
+ tool_shed = clean_tool_shed_url( repository.tool_shed )
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
shed_tool_confs = trans.app.toolbox.shed_tool_confs
@@ -443,55 +380,6 @@
if os.path.isdir( relative_install_dir ):
break
return relative_install_dir
- def __load_datatypes( self, trans, datatypes_config, relative_intall_dir ):
- imported_module = None
- # Parse datatypes_config.
- tree = parse_xml( datatypes_config )
- datatypes_config_root = tree.getroot()
- relative_path_to_datatype_file_name = None
- datatype_files = datatypes_config_root.find( 'datatype_files' )
- # Currently only a single datatype_file is supported. For example:
- # <datatype_files>
- # <datatype_file name="gmap.py"/>
- # </datatype_files>
- for elem in datatype_files.findall( 'datatype_file' ):
- datatype_file_name = elem.get( 'name', None )
- if datatype_file_name:
- # Find the file in the installed repository.
- for root, dirs, files in os.walk( relative_intall_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == datatype_file_name:
- relative_path_to_datatype_file_name = os.path.join( root, name )
- break
- break
- if relative_path_to_datatype_file_name:
- relative_head, relative_tail = os.path.split( relative_path_to_datatype_file_name )
- registration = datatypes_config_root.find( 'registration' )
- # Get the module by parsing the <datatype> tag.
- for elem in registration.findall( 'datatype' ):
- # A 'type' attribute is currently required. The attribute
- # should be something like: type="gmap:GmapDB".
- dtype = elem.get( 'type', None )
- if dtype:
- fields = dtype.split( ':' )
- datatype_module = fields[0]
- datatype_class_name = fields[1]
- # Since we currently support only a single datatype_file,
- # we have what we need.
- break
- try:
- sys.path.insert( 0, relative_head )
- imported_module = __import__( datatype_module )
- sys.path.pop( 0 )
- except Exception, e:
- log.debug( "Exception importing datatypes code file included in installed repository: %s" % str( e ) )
- trans.app.datatypes_registry.load_datatypes( root_dir=trans.app.config.root, config=datatypes_config, imported_module=imported_module )
- def __clean_tool_shed_url( self, tool_shed_url ):
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- return tool_shed_url.split( ':' )[ 0 ]
- return tool_shed_url.rstrip( '/' )
def __generate_tool_path( self, repository_clone_url, changeset_revision ):
"""
Generate a tool path that guarantees repositories with the same name will always be installed
@@ -504,7 +392,7 @@
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
- tool_shed_url = self.__clean_tool_shed_url( tool_shed_url )
+ tool_shed_url = clean_tool_shed_url( tool_shed_url )
return '%s/repos%s/%s' % ( tool_shed_url, repo_path, changeset_revision )
def __generate_clone_url( self, trans, repository ):
"""Generate the URL for cloning a repository."""
@@ -545,17 +433,6 @@
trans.model.ToolShedRepository.table.c.owner == owner,
trans.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
.first()
-def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
- # This method is used by the InstallManager, which does not have access to trans.
- sa_session = app.model.context.current
- if tool_shed.find( '//' ) > 0:
- tool_shed = tool_shed.split( '//' )[1]
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
def get_url_from_repository_tool_shed( trans, repository ):
# The stored value of repository.tool_shed is something like:
# toolshed.g2.bx.psu.edu
@@ -569,347 +446,3 @@
# The tool shed from which the repository was originally
# installed must no longer be configured in tool_sheds_conf.xml.
return None
-def generate_tool_panel_section( repository_name, repository_clone_url, changeset_revision, tool_section, repository_tools_tups, owner='' ):
- """
- Write an in-memory tool panel section so we can load it into the tool panel and then
- append it to the appropriate shed tool config.
- TODO: re-write using ElementTree.
- """
- tmp_url = clean_repository_clone_url( repository_clone_url )
- if not owner:
- owner = get_repository_owner( tmp_url )
- section_str = ''
- section_str += ' <section name="%s" id="%s">\n' % ( tool_section.name, tool_section.id )
- for repository_tool_tup in repository_tools_tups:
- tool_file_path, guid, tool = repository_tool_tup
- section_str += ' <tool file="%s" guid="%s">\n' % ( tool_file_path, guid )
- section_str += ' <tool_shed>%s</tool_shed>\n' % tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
- section_str += ' <repository_name>%s</repository_name>\n' % repository_name
- section_str += ' <repository_owner>%s</repository_owner>\n' % owner
- section_str += ' <changeset_revision>%s</changeset_revision>\n' % changeset_revision
- section_str += ' <id>%s</id>\n' % tool.id
- section_str += ' <version>%s</version>\n' % tool.version
- section_str += ' </tool>\n'
- section_str += ' </section>\n'
- return section_str
-def get_repository_owner( cleaned_repository_url ):
- items = cleaned_repository_url.split( 'repos' )
- repo_path = items[ 1 ]
- if repo_path.startswith( '/' ):
- repo_path = repo_path.replace( '/', '', 1 )
- return repo_path.lstrip( '/' ).split( '/' )[ 0 ]
-def generate_tool_guid( repository_clone_url, tool ):
- """
- Generate a guid for the installed tool. It is critical that this guid matches the guid for
- the tool in the Galaxy tool shed from which it is being installed. The form of the guid is
- <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version>
- """
- tmp_url = clean_repository_clone_url( repository_clone_url )
- return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
-def clean_repository_clone_url( repository_clone_url ):
- if repository_clone_url.find( '@' ) > 0:
- # We have an url that includes an authenticated user, something like:
- # http://test@bx.psu.edu:9009/repos/some_username/column
- items = repository_clone_url.split( '@' )
- tmp_url = items[ 1 ]
- elif repository_clone_url.find( '//' ) > 0:
- # We have an url that includes only a protocol, something like:
- # http://bx.psu.edu:9009/repos/some_username/column
- items = repository_clone_url.split( '//' )
- tmp_url = items[ 1 ]
- else:
- tmp_url = repository_clone_url
- return tmp_url
-def generate_metadata( toolbox, relative_install_dir, repository_clone_url ):
- """
- Browse the repository files on disk to generate metadata. Since we are using disk files, it
- is imperative that the repository is updated to the desired change set revision before metadata
- is generated. This method is used by the InstallManager, which does not have access to trans.
- """
- metadata_dict = {}
- sample_files = []
- datatypes_config = None
- # Find datatypes_conf.xml if it exists.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == 'datatypes_conf.xml':
- relative_path = os.path.join( root, name )
- datatypes_config = os.path.abspath( relative_path )
- break
- if datatypes_config:
- metadata_dict[ 'datatypes_config' ] = relative_path
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- # Find all special .sample files.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- sample_files.append( os.path.join( root, name ) )
- if sample_files:
- metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- try:
- tool = toolbox.load_tool( full_path )
- except Exception, e:
- tool = None
- if tool is not None:
- tool_config = os.path.join( root, name )
- metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- return metadata_dict
-def generate_datatypes_metadata( datatypes_config, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received datatypes_config. This method is used by the InstallManager,
- which does not have access to trans.
- """
- # Parse datatypes_config.
- tree = ElementTree.parse( datatypes_config )
- root = tree.getroot()
- ElementInclude.include( root )
- repository_datatype_code_files = []
- datatype_files = root.find( 'datatype_files' )
- if datatype_files:
- for elem in datatype_files.findall( 'datatype_file' ):
- name = elem.get( 'name', None )
- repository_datatype_code_files.append( name )
- metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
- datatypes = []
- registration = root.find( 'registration' )
- if registration:
- for elem in registration.findall( 'datatype' ):
- extension = elem.get( 'extension', None )
- dtype = elem.get( 'type', None )
- mimetype = elem.get( 'mimetype', None )
- datatypes.append( dict( extension=extension,
- dtype=dtype,
- mimetype=mimetype ) )
- metadata_dict[ 'datatypes' ] = datatypes
- return metadata_dict
-def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been
- applied to the received tool. This method is used by the InstallManager,
- which does not have access to trans.
- """
- # Generate the guid
- guid = generate_tool_guid( repository_clone_url, tool )
- # Handle tool.requirements.
- tool_requirements = []
- for tr in tool.requirements:
- name=tr.name
- type=tr.type
- if type == 'fabfile':
- version = None
- fabfile = tr.fabfile
- method = tr.method
- else:
- version = tr.version
- fabfile = None
- method = None
- requirement_dict = dict( name=name,
- type=type,
- version=version,
- fabfile=fabfile,
- method=method )
- tool_requirements.append( requirement_dict )
- # Handle tool.tests.
- tool_tests = []
- if tool.tests:
- for ttb in tool.tests:
- test_dict = dict( name=ttb.name,
- required_files=ttb.required_files,
- inputs=ttb.inputs,
- outputs=ttb.outputs )
- tool_tests.append( test_dict )
- tool_dict = dict( id=tool.id,
- guid=guid,
- name=tool.name,
- version=tool.version,
- description=tool.description,
- version_string_cmd = tool.version_string_cmd,
- tool_config=tool_config,
- requirements=tool_requirements,
- tests=tool_tests )
- if 'tools' in metadata_dict:
- metadata_dict[ 'tools' ].append( tool_dict )
- else:
- metadata_dict[ 'tools' ] = [ tool_dict ]
- return metadata_dict
-def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received exported_workflow_dict. Store everything in the database.
- This method is used by the InstallManager, which does not have access to trans.
- """
- if 'workflows' in metadata_dict:
- metadata_dict[ 'workflows' ].append( ( relative_path, exported_workflow_dict ) )
- else:
- metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
- return metadata_dict
-def handle_missing_data_table_entry( app, tool_path, sample_files, repository_tools_tups ):
- """
- Inspect each tool to see if any have input parameters that are dynamically
- generated select lists that require entries in the tool_data_table_conf.xml file.
- This method is used by the InstallManager, which does not have access to trans.
- """
- missing_data_table_entry = False
- for index, repository_tools_tup in enumerate( repository_tools_tups ):
- tup_path, guid, repository_tool = repository_tools_tup
- if repository_tool.params_with_missing_data_table_entry:
- missing_data_table_entry = True
- break
- if missing_data_table_entry:
- # The repository must contain a tool_data_table_conf.xml.sample file that includes
- # all required entries for all tools in the repository.
- for sample_file in sample_files:
- head, tail = os.path.split( sample_file )
- if tail == 'tool_data_table_conf.xml.sample':
- break
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_file )
- if error:
- # TODO: Do more here than logging an exception.
- log.debug( exception_msg )
- # Reload the tool into the local list of repository_tools_tups.
- repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) )
- repository_tools_tups[ index ] = ( tup_path, repository_tool )
- return repository_tools_tups
-def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups ):
- """
- Inspect each tool to see if it has any input parameters that
- are dynamically generated select lists that depend on a .loc file.
- This method is used by the InstallManager, which does not have access to trans.
- """
- missing_files_handled = []
- for index, repository_tools_tup in enumerate( repository_tools_tups ):
- tup_path, guid, repository_tool = repository_tools_tup
- params_with_missing_index_file = repository_tool.params_with_missing_index_file
- for param in params_with_missing_index_file:
- options = param.options
- missing_head, missing_tail = os.path.split( options.missing_index_file )
- if missing_tail not in missing_files_handled:
- # The repository must contain the required xxx.loc.sample file.
- for sample_file in sample_files:
- sample_head, sample_tail = os.path.split( sample_file )
- if sample_tail == '%s.sample' % missing_tail:
- copy_sample_loc_file( app, sample_file )
- if options.tool_data_table and options.tool_data_table.missing_index_file:
- options.tool_data_table.handle_found_index_file( options.missing_index_file )
- missing_files_handled.append( missing_tail )
- break
- # Reload the tool into the local list of repository_tools_tups.
- repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ) )
- repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
- return repository_tools_tups
-def handle_tool_dependencies( current_working_dir, repo_files_dir, repository_tools_tups ):
- """
- Inspect each tool to see if it includes a "requirement" that refers to a fabric
- script. For those that do, execute the fabric script to install tool dependencies.
- This method is used by the InstallManager, which does not have access to trans.
- """
- for index, repository_tools_tup in enumerate( repository_tools_tups ):
- tup_path, guid, repository_tool = repository_tools_tup
- for requirement in repository_tool.requirements:
- if requirement.type == 'fabfile':
- log.debug( 'Executing fabric script to install dependencies for tool "%s"...' % repository_tool.name )
- fabfile = requirement.fabfile
- method = requirement.method
- # Find the relative path to the fabfile.
- relative_fabfile_path = None
- for root, dirs, files in os.walk( repo_files_dir ):
- for name in files:
- if name == fabfile:
- relative_fabfile_path = os.path.join( root, name )
- break
- if relative_fabfile_path:
- # cmd will look something like: fab -f fabfile.py install_bowtie
- cmd = 'fab -f %s %s' % ( relative_fabfile_path, method )
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stderr = open( tmp_name, 'wb' )
- os.chdir( repo_files_dir )
- proc = subprocess.Popen( cmd, shell=True, stderr=tmp_stderr.fileno() )
- returncode = proc.wait()
- os.chdir( current_working_dir )
- tmp_stderr.close()
- if returncode != 0:
- # TODO: do something more here than logging the problem.
- tmp_stderr = open( tmp_name, 'rb' )
- error = tmp_stderr.read()
- tmp_stderr.close()
- log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
-def add_shed_tool_conf_entry( app, shed_tool_conf, new_tool_section ):
- """
- Add an entry in the shed_tool_conf file. An entry looks something like:
- <section name="Filter and Sort" id="filter">
- <tool file="filter/filtering.xml" guid="toolshed.g2.bx.psu.edu/repos/test/filter/1.0.2"/>
- </section>
- This method is used by the InstallManager, which does not have access to trans.
- """
- # Make a backup of the hgweb.config file since we're going to be changing it.
- if not os.path.exists( shed_tool_conf ):
- output = open( shed_tool_conf, 'w' )
- output.write( '<?xml version="1.0"?>\n' )
- output.write( '<toolbox tool_path="%s">\n' % tool_path )
- output.write( '</toolbox>\n' )
- output.close()
- # Make a backup of the shed_tool_conf file.
- today = date.today()
- backup_date = today.strftime( "%Y_%m_%d" )
- shed_tool_conf_copy = '%s/%s_%s_backup' % ( app.config.root, shed_tool_conf, backup_date )
- shutil.copy( os.path.abspath( shed_tool_conf ), os.path.abspath( shed_tool_conf_copy ) )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- new_shed_tool_conf = open( tmp_fname, 'wb' )
- for i, line in enumerate( open( shed_tool_conf ) ):
- if line.startswith( '</toolbox>' ):
- # We're at the end of the original config file, so add our entry.
- new_shed_tool_conf.write( new_tool_section )
- new_shed_tool_conf.write( line )
- else:
- new_shed_tool_conf.write( line )
- new_shed_tool_conf.close()
- shutil.move( tmp_fname, os.path.abspath( shed_tool_conf ) )
-def create_or_undelete_tool_shed_repository( app, name, description, changeset_revision, repository_clone_url, metadata_dict, owner='' ):
- # This method is used by the InstallManager, which does not have access to trans.
- sa_session = app.model.context.current
- tmp_url = clean_repository_clone_url( repository_clone_url )
- tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
- if not owner:
- owner = get_repository_owner( tmp_url )
- includes_datatypes = 'datatypes_config' in metadata_dict
- flush_needed = False
- tool_shed_repository = get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision )
- if tool_shed_repository:
- if tool_shed_repository.deleted:
- tool_shed_repository.deleted = False
- # Reset includes_datatypes in case metadata changed since last installed.
- tool_shed_repository.includes_datatypes = includes_datatypes
- flush_needed = True
- else:
- tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
- name=name,
- description=description,
- owner=owner,
- changeset_revision=changeset_revision,
- metadata=metadata_dict,
- includes_datatypes=includes_datatypes )
- flush_needed = True
- if flush_needed:
- sa_session.add( tool_shed_repository )
- sa_session.flush()
diff -r 9c46a216e24c529a496a50afec13ebcb78106b96 -r 7dd3a089101138a4796eb73a5f1391d2f436723e templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -23,39 +23,42 @@
<br/><div class="toolForm">
- <div class="toolFormTitle">Choose section to load tools into tool panel</div>
+ <div class="toolFormTitle">Choose tool panel section to contain installed tools (optional)</div><div class="toolFormBody">
- <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" >
- %if shed_tool_conf_select_field:
+ <form name="select_tool_panel_section" id="select_tool_panel_section" action="${h.url_for( controller='admin_toolshed', action='install_repository', tool_shed_url=tool_shed_url, repo_info_dict=repo_info_dict )}" method="post" >
+ %if shed_tool_conf_select_field:
+ <div class="form-row">
+ <label>Shed tool configuration file:</label>
+ ${shed_tool_conf_select_field.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Your Galaxy instance is configured with ${len( shed_tool_conf_select_field.options )} shed tool configuration files,
+ so choose one in which to configure the installed tools.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ %else:
+ <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf}"/>
+ %endif
<div class="form-row">
- <label>Shed tool configuration file:</label>
- ${shed_tool_conf_select_field.get_html()}
+ <label>Add new tool panel section:</label>
+ <input name="new_tool_panel_section" type="textfield" value="${new_tool_panel_section}" size="40"/><div class="toolParamHelp" style="clear: both;">
- Your Galaxy instance is configured with ${len( shed_tool_conf_select_field.options )} shed tool configuration files,
- so choose one in which to configure the installed tools.
+ Add a new tool panel section contain the installed tools (optional).
</div></div>
- <div style="clear: both"></div>
- %else:
- <input type="hidden" name="shed_tool_conf" value="${shed_tool_conf}"/>
- %endif
- <div class="form-row">
- <label>Add new tool panel section:</label>
- <input name="new_tool_panel_section" type="textfield" value="${new_tool_panel_section}" size="40"/>
- <div class="toolParamHelp" style="clear: both;">
- Add a new tool panel section or choose an existing section in your tool panel below to contain the installed tools.
+ <div class="form-row">
+ <label>Select existing tool panel section:</label>
+ ${tool_panel_section_select_field.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Choose an existing section in your tool panel to contain the installed tools (optional).
+ </div></div>
- </div>
- <div class="form-row">
- <label>Select existing tool panel section:</label>
- ${tool_panel_section_select_field.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Choose an existing section in your tool panel to contain the installed tools.
+ <div class="form-row">
+ <input type="submit" name="select_tool_panel_section_button" value="Install"/>
+ <div class="toolParamHelp" style="clear: both;">
+ Clicking <b>Install</b> without selecting a tool panel section will load the installed tools into the tool panel outside of any sections.
+ </div></div>
- </div>
- <div class="form-row">
- <input type="submit" name="select_tool_panel_section_button" value="Install"/>
- </div>
- </form>
+ </form></div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Fix extra files display with object store and the distributed object store config.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9c46a216e24c/
changeset: 9c46a216e24c
user: natefoo
date: 2011-12-13 06:45:52
summary: Fix extra files display with object store and the distributed object store config.
affected #: 3 files
diff -r a638b139a24187786916bc9b5c5491f4fab744fc -r 9c46a216e24c529a496a50afec13ebcb78106b96 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -868,10 +868,10 @@
def __init__(self, config):
super(DistributedObjectStore, self).__init__()
- assert config is not None, "distributed object store ('object_store = distributed') " \
- "requires a config file, please set one in " \
- "'distributed_object_store_config_file')"
- self.distributed_config = config
+ self.distributed_config = config.distributed_object_store_config_file
+ assert self.distributed_config is not None, "distributed object store ('object_store = distributed') " \
+ "requires a config file, please set one in " \
+ "'distributed_object_store_config_file')"
self.backends = {}
self.weighted_backend_names = []
@@ -1006,7 +1006,7 @@
os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
return S3ObjectStore(config=config)
elif store == 'distributed':
- return DistributedObjectStore(config.distributed_object_store_config_file)
+ return DistributedObjectStore(config=config)
elif store == 'hierarchical':
return HierarchicalObjectStore()
diff -r a638b139a24187786916bc9b5c5491f4fab744fc -r 9c46a216e24c529a496a50afec13ebcb78106b96 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -382,7 +382,7 @@
if filename and filename != "index":
# For files in extra_files_path
- file_path = trans.app.object_store.get_filename(data.id, extra_dir='dataset_%s_files' % data.id, alt_name=filename)
+ file_path = trans.app.object_store.get_filename(data.dataset.id, extra_dir='dataset_%s_files' % data.dataset.id, alt_name=filename)
if os.path.exists( file_path ):
if os.path.isdir( file_path ):
return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
diff -r a638b139a24187786916bc9b5c5491f4fab744fc -r 9c46a216e24c529a496a50afec13ebcb78106b96 scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -59,7 +59,7 @@
tool_config_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', 'tool_conf.xml.sample' )
tool_data_table_config_path = 'tool_data_table_conf.xml'
tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None )
- use_hierarchical_object_store = os.environ.get( 'GALAXY_USE_HIERARCHICAL_OBJECT_STORE', False )
+ use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False )
if os.path.exists( 'tool_data_table_conf.test.xml' ):
tool_data_table_config_path = 'tool_data_table_conf.test.xml'
if start_server:
@@ -154,9 +154,9 @@
if tool_dependency_dir is not None:
kwargs['tool_dependency_dir'] = tool_dependency_dir
- if use_hierarchical_object_store:
- kwargs['object_store'] = 'hierarchical'
- kwargs['hierarchical_object_store_config_file'] = 'hierarchical_object_store_conf.xml.sample'
+ if use_distributed_object_store:
+ kwargs['object_store'] = 'distributed'
+ kwargs['distributed_object_store_config_file'] = 'distributed_object_store_conf.xml.sample'
# Build the Universe Application
app = UniverseApplication( job_queue_workers = 5,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Better queries for the job queue, especially when checking the user job count.
by Bitbucket 13 Dec '11
by Bitbucket 13 Dec '11
13 Dec '11
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a638b139a241/
changeset: a638b139a241
user: natefoo
date: 2011-12-13 05:26:59
summary: Better queries for the job queue, especially when checking the user job count.
affected #: 1 file
diff -r 55bb95ec5c2a3ce2ef7eaf0a79ec60365d7d1aec -r a638b139a24187786916bc9b5c5491f4fab744fc lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -2,7 +2,6 @@
import galaxy
from galaxy import util, model
-from galaxy.model.orm import lazyload
from galaxy.datatypes.tabular import *
from galaxy.datatypes.interval import *
# tabular/interval imports appear to be unused. Clean up?
@@ -114,7 +113,7 @@
else:
log.debug( "no runner: %s is still in new state, adding to the jobs queue" %job.id )
self.queue.put( ( job.id, job.tool_id ) )
- for job in self.sa_session.query( model.Job ).options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ).filter( ( model.Job.state == model.Job.states.RUNNING ) | ( model.Job.state == model.Job.states.QUEUED ) ):
+ for job in self.sa_session.query( model.Job ).enable_eagerloads( False ).filter( ( model.Job.state == model.Job.states.RUNNING ) | ( model.Job.state == model.Job.states.QUEUED ) ):
if job.tool_id not in self.app.toolbox.tools_by_id:
log.warning( "Tool '%s' removed from tool config, unable to recover job: %s" % ( job.tool_id, job.id ) )
JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' )
@@ -161,8 +160,7 @@
# Clear the session so we get fresh states for job and all datasets
self.sa_session.expunge_all()
# Fetch all new jobs
- jobs_to_check = self.sa_session.query( model.Job ) \
- .options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ) \
+ jobs_to_check = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
.filter( model.Job.state == model.Job.states.NEW ).all()
else:
# Get job objects and append to watch queue for any which were
@@ -264,21 +262,19 @@
if not self.app.config.user_job_limit:
return JOB_READY
if job.user:
- user_jobs = self.sa_session.query( model.Job ) \
- .options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ) \
- .filter( and_( model.Job.user_id == job.user.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).all()
+ count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( and_( model.Job.user_id == job.user.id,
+ or_( model.Job.state == model.Job.states.RUNNING,
+ model.Job.state == model.Job.states.QUEUED ) ) ).count()
elif job.galaxy_session:
- user_jobs = self.sa_session.query( model.Job ) \
- .options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ) \
- .filter( and_( model.Job.session_id == job.galaxy_session.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).all()
+ count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( and_( model.Job.session_id == job.galaxy_session.id,
+ or_( model.Job.state == model.Job.states.RUNNING,
+ model.Job.state == model.Job.states.QUEUED ) ) ).count()
else:
log.warning( 'Job %s is not associated with a user or session so job concurrency limit cannot be checked.' % job.id )
return JOB_READY
- if len( user_jobs ) >= self.app.config.user_job_limit:
+ if count >= self.app.config.user_job_limit:
return JOB_WAIT
return JOB_READY
@@ -1245,8 +1241,7 @@
# Clear the session so we get fresh states for job and all datasets
self.sa_session.expunge_all()
# Fetch all new jobs
- newly_deleted_jobs = self.sa_session.query( model.Job ) \
- .options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ) \
+ newly_deleted_jobs = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
.filter( model.Job.state == model.Job.states.DELETED_NEW ).all()
for job in newly_deleted_jobs:
jobs_to_check.append( ( job, None ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0