6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9e7f9282deba/
Changeset: 9e7f9282deba
User: Jeremy Goecks
Date: 2014-03-26 18:42:54
Summary: Trackster: consolidate read painters.
Affected #: 2 files
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 9e7f9282deba9d383e708e210acea5dea8eb7673 static/scripts/viz/trackster/painters.js
--- a/static/scripts/viz/trackster/painters.js
+++ b/static/scripts/viz/trackster/painters.js
@@ -736,297 +736,6 @@
};
},
- // FIXME: extract common functionality from draw_read functions for ReadPainters.
-
- /**
- * Draw a single read.
- */
- draw_read: function(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, cigar, strand, read_seq) {
- ctx.textAlign = "center";
- var tile_region = [tile_low, tile_high],
- base_offset = 0,
- seq_offset = 0,
- gap = Math.round(w_scale/2),
- char_width_px = ctx.canvas.manager.char_width_px,
- block_color = (strand === "+" ? this.prefs.block_color : this.prefs.reverse_strand_color),
- pack_mode = (mode === 'Pack'),
- paint_utils = new ReadPainterUtils(ctx, (pack_mode ? PACK_FEATURE_HEIGHT : SQUISH_FEATURE_HEIGHT), w_scale, mode);
-
- // Keep list of items that need to be drawn on top of initial drawing layer.
- var draw_last = [];
-
- // If no cigar string, then assume all matches.
- if (!cigar) {
- cigar = [ [0, read_seq.length] ];
- }
-
- // Draw read by processing cigar.
- for (var cig_id = 0, len = cigar.length; cig_id < len; cig_id++) {
- var cig = cigar[cig_id],
- cig_op = "MIDNSHP=X"[ cig[0] ],
- cig_len = cig[1];
-
- var seq_start = feature_start + base_offset,
- // -0.5 to offset sequence between bases.
- s_start = Math.floor( Math.max(-0.5 * w_scale, (seq_start - tile_low - 0.5) * w_scale) ),
- s_end = Math.floor( Math.max(0, (seq_start + cig_len - tile_low - 0.5) * w_scale) );
-
- if (!is_overlap([seq_start, seq_start + cig_len], tile_region)) {
- // Increment base offset only for certain operations.
- if ("MDN=X".indexOf(cig_op) !== -1) {
- base_offset += cig_len;
- }
- continue;
- }
-
- // Make sure that read is drawn even if it too small to be rendered officially; in this case,
- // read is drawn at 1px.
- // TODO: need to ensure that s_start, s_end are calcuated the same for both slotting
- // and drawing.
- if (s_start === s_end) {
- s_end += 1;
- }
-
- switch (cig_op) {
- case "H": // Hard clipping.
- // Sequence not present, so do not increment seq_offset.
- break;
- case "S": // Soft clipping.
- seq_offset += cig_len;
- break;
- case "M": // Loose match with reference; can be match or mismatch.
- case "=": // Strict match with reference.
- case "X": // Strict mismatch with reference.
- // Draw read base as rectangle.
- ctx.fillStyle = block_color;
- ctx.fillRect(s_start,
- y_start + (pack_mode ? 1 : 4 ),
- s_end - s_start,
- (pack_mode ? PACK_FEATURE_HEIGHT : SQUISH_FEATURE_HEIGHT));
-
- // Draw sequence and/or variants.
- var seq = read_seq.slice(seq_offset, seq_offset + cig_len),
- ref_char,
- read_char;
- for (var c = 0, str_len = seq.length; c < str_len; c++) {
- // Draw base if it's on tile:
- if (seq_start + c >= tile_low && seq_start + c <= tile_high) {
- // Get reference and read character.
- ref_char = (this.ref_seq ? this.ref_seq[seq_start - tile_low + c] : null);
- read_char = seq[c];
-
- // Draw base depending on (a) available reference data and (b) config options.
- if (
- // If there's reference data and (a) showing all (i.e. not showing
- // differences) or (b) if there is a variant.
- (ref_char &&
- (!this.prefs.show_differences ||
- (read_char.toLowerCase !== 'n' && (ref_char.toLowerCase() !== read_char.toLowerCase())))
- ) ||
- // If there's no reference data and showing all.
- (!ref_char && !this.prefs.show_differences)
- ) {
-
- // Draw base.
- var c_start = Math.floor( Math.max(0, (seq_start + c - tile_low) * w_scale) );
- ctx.fillStyle = this.base_color_fn(seq[c]);
- if (pack_mode && w_scale > char_width_px) {
- ctx.fillText(seq[c], c_start, y_start + 9);
- }
- // Require a minimum w_scale so that variants are only drawn when somewhat zoomed in.
- else if (w_scale > 0.05) {
- ctx.fillRect(c_start - gap,
- y_start + (pack_mode ? 1 : 4),
- Math.max( 1, Math.round(w_scale) ),
- (pack_mode ? PACK_FEATURE_HEIGHT : SQUISH_FEATURE_HEIGHT));
- }
- }
-
- }
- }
-
- seq_offset += cig_len;
- base_offset += cig_len;
- break;
- case "N": // Skipped bases.
- ctx.fillStyle = CONNECTOR_COLOR;
- ctx.fillRect(s_start, y_start + 5, s_end - s_start, 1);
- //ctx.dashedLine(s_start + this.left_offset, y_start + 5, this.left_offset + s_end, y_start + 5);
- base_offset += cig_len;
- break;
- case "D": // Deletion.
- paint_utils.draw_deletion(s_start, y_start, 1);
- base_offset += cig_len;
- break;
- case "P": // TODO: No good way to draw insertions/padding right now, so ignore
- // Sequences not present, so do not increment seq_offset.
- break;
- case "I": // Insertion.
- // Check to see if sequence should be drawn at all by looking at the overlap between
- // the sequence region and the tile region.
- var insert_x_coord = s_start - gap;
-
- if (is_overlap([seq_start, seq_start + cig_len], tile_region)) {
- var seq = read_seq.slice(seq_offset, seq_offset + cig_len);
- // Insertion point is between the sequence start and the previous base: (-gap) moves
- // back from sequence start to insertion point.
- if (this.prefs.show_insertions) {
- //
- // Show inserted sequence above, centered on insertion point.
- //
-
- // Draw sequence.
- // X center is offset + start - <half_sequence_length>
- var x_center = s_start - (s_end - s_start)/2;
- if ( (mode === "Pack" || this.mode === "Auto") && read_seq !== undefined && w_scale > char_width_px) {
- // Draw sequence container.
- ctx.fillStyle = "yellow";
- ctx.fillRect(x_center - gap, y_start - 9, s_end - s_start, 9);
- draw_last[draw_last.length] = {type: "triangle", data: [insert_x_coord, y_start + 4, 5]};
- ctx.fillStyle = CONNECTOR_COLOR;
- // Based on overlap b/t sequence and tile, get sequence to be drawn.
- switch( compute_overlap( [seq_start, seq_start + cig_len], tile_region ) ) {
- case(OVERLAP_START):
- seq = seq.slice(tile_low-seq_start);
- break;
- case(OVERLAP_END):
- seq = seq.slice(0, seq_start-tile_high);
- break;
- case(CONTAINED_BY):
- // All of sequence drawn.
- break;
- case(CONTAINS):
- seq = seq.slice(tile_low-seq_start, seq_start-tile_high);
- break;
- }
- // Draw sequence.
- for (var c = 0, str_len = seq.length; c < str_len; c++) {
- var c_start = Math.floor( Math.max(0, (seq_start + c - tile_low) * w_scale) );
- ctx.fillText(seq[c], c_start - (s_end - s_start)/2, y_start);
- }
- }
- else {
- // Draw block.
- ctx.fillStyle = "yellow";
- // TODO: This is a pretty hack-ish way to fill rectangle based on mode.
- ctx.fillRect(x_center, y_start + (this.mode !== "Dense" ? 2 : 5),
- s_end - s_start, (mode !== "Dense" ? SQUISH_FEATURE_HEIGHT : DENSE_FEATURE_HEIGHT));
- }
- }
- else {
- if ( (mode === "Pack" || this.mode === "Auto") && read_seq !== undefined && w_scale > char_width_px) {
- // Show insertions with a single number at the insertion point.
- draw_last.push( { type: "text", data: [seq.length, insert_x_coord, y_start + 9] } );
- }
- else {
- // TODO: probably can merge this case with code above.
- }
- }
- }
- seq_offset += cig_len;
- // No change to base offset because insertions are drawn above sequence/read.
- break;
- }
- }
-
- //
- // Draw last items.
- //
- ctx.fillStyle = "yellow";
- var item, type, data;
- for (var i = 0; i < draw_last.length; i++) {
- item = draw_last[i];
- type = item.type;
- data = item.data;
- if (type === "text") {
- ctx.save();
- ctx.font = "bold " + ctx.font;
- ctx.fillText(data[0], data[1], data[2]);
- ctx.restore();
- }
- else if (type === "triangle") {
- drawDownwardEquilateralTriangle(ctx, data[0], data[1], data[2]);
- }
- }
- },
-
- /**
- * Draw a complete read pair
- */
- draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width ) {
- // All features need a start, end, and vertical center.
- var feature_uid = feature[0],
- feature_start = feature[1],
- feature_end = feature[2],
- feature_name = feature[3],
- // -0.5 to put element between bases.
- f_start = Math.floor( Math.max(-0.5 * w_scale, (feature_start - tile_low - 0.5) * w_scale) ),
- f_end = Math.ceil( Math.min(width, Math.max(0, (feature_end - tile_low - 0.5) * w_scale)) ),
- y_start = (mode === "Dense" ? 0 : (0 + slot)) * y_scale,
- label_color = this.prefs.label_color;
-
- // Draw read.
- if (feature[5] instanceof Array) {
- // Read is paired.
- var connector = true;
-
- // Draw left/forward read.
- if (feature[4][1] >= tile_low && feature[4][0] <= tile_high && feature[4][2]) {
- this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[4][0], feature[4][2], feature[4][3], feature[4][4]);
- }
- else {
- connector = false;
- }
-
- // Draw right/reverse read.
- if (feature[5][1] >= tile_low && feature[5][0] <= tile_high && feature[5][2]) {
- this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[5][0], feature[5][2], feature[5][3], feature[5][4]);
- }
- else {
- connector = false;
- }
-
- // Draw connector if both reads were drawn.
- // TODO: currently, there is no way to connect reads drawn on different tiles; to connect reads on different tiles, data manager
- // code is needed to join mate pairs from different regions. Alternatively, requesting multiple regions of data at once would
- // make it possible to put together more easily.
- // -0.5 to position connector correctly between reads.
- var b1_end = Math.ceil( Math.min(width, Math.max(-0.5 * w_scale, (feature[4][1] - tile_low - 0.5) * w_scale)) ),
- b2_start = Math.floor( Math.max(-0.5 * w_scale, (feature[5][0] - tile_low - 0.5) * w_scale) );
- if (connector && b2_start > b1_end) {
- ctx.fillStyle = CONNECTOR_COLOR;
- dashedLine(ctx, b1_end, y_start + 5, b2_start, y_start + 5);
- }
- } else {
- // Read is single.
- this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, feature[4], feature[5], feature[6]);
- }
- if (mode === "Pack" && feature_start >= tile_low && feature_name !== ".") {
- // Draw label.
- ctx.fillStyle = this.prefs.label_color;
- if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
- ctx.textAlign = "left";
- ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8);
- } else {
- ctx.textAlign = "right";
- ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8);
- }
- }
-
- // FIXME: provide actual coordinates for drawn read.
- return [0,0];
- }
-});
-
-/**
- * Painter for reads encoded using reference-based compression.
- */
-var RefBasedReadPainter = function(data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler, ref_seq, base_color_fn) {
- ReadPainter.call(this, data, view_start, view_end, prefs, mode, alpha_scaler, height_scaler, ref_seq, base_color_fn);
-};
-
-extend(RefBasedReadPainter.prototype, ReadPainter.prototype, FeaturePainter, {
-
/**
* Draw a single read from reference-based read sequence and cigar.
*/
@@ -1255,6 +964,73 @@
drawDownwardEquilateralTriangle(ctx, data[0], data[1], data[2]);
}
}
+ },
+
+ /**
+ * Draw a complete read pair
+ */
+ draw_element: function(ctx, mode, feature, slot, tile_low, tile_high, w_scale, y_scale, width ) {
+ // All features need a start, end, and vertical center.
+ var feature_uid = feature[0],
+ feature_start = feature[1],
+ feature_end = feature[2],
+ feature_name = feature[3],
+ // -0.5 to put element between bases.
+ f_start = Math.floor( Math.max(-0.5 * w_scale, (feature_start - tile_low - 0.5) * w_scale) ),
+ f_end = Math.ceil( Math.min(width, Math.max(0, (feature_end - tile_low - 0.5) * w_scale)) ),
+ y_start = (mode === "Dense" ? 0 : (0 + slot)) * y_scale,
+ label_color = this.prefs.label_color;
+
+ // Draw read.
+ if (feature[5] instanceof Array) {
+ // Read is paired.
+ var connector = true;
+
+ // Draw left/forward read.
+ if (feature[4][1] >= tile_low && feature[4][0] <= tile_high && feature[4][2]) {
+ this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[4][0], feature[4][2], feature[4][3], feature[4][4]);
+ }
+ else {
+ connector = false;
+ }
+
+ // Draw right/reverse read.
+ if (feature[5][1] >= tile_low && feature[5][0] <= tile_high && feature[5][2]) {
+ this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature[5][0], feature[5][2], feature[5][3], feature[5][4]);
+ }
+ else {
+ connector = false;
+ }
+
+ // Draw connector if both reads were drawn.
+ // TODO: currently, there is no way to connect reads drawn on different tiles; to connect reads on different tiles, data manager
+ // code is needed to join mate pairs from different regions. Alternatively, requesting multiple regions of data at once would
+ // make it possible to put together more easily.
+ // -0.5 to position connector correctly between reads.
+ var b1_end = Math.ceil( Math.min(width, Math.max(-0.5 * w_scale, (feature[4][1] - tile_low - 0.5) * w_scale)) ),
+ b2_start = Math.floor( Math.max(-0.5 * w_scale, (feature[5][0] - tile_low - 0.5) * w_scale) );
+ if (connector && b2_start > b1_end) {
+ ctx.fillStyle = CONNECTOR_COLOR;
+ dashedLine(ctx, b1_end, y_start + 5, b2_start, y_start + 5);
+ }
+ } else {
+ // Read is single.
+ this.draw_read(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, feature[4], feature[5], feature[6]);
+ }
+ if (mode === "Pack" && feature_start >= tile_low && feature_name !== ".") {
+ // Draw label.
+ ctx.fillStyle = this.prefs.label_color;
+ if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
+ ctx.textAlign = "left";
+ ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8);
+ } else {
+ ctx.textAlign = "right";
+ ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8);
+ }
+ }
+
+ // FIXME: provide actual coordinates for drawn read.
+ return [0,0];
}
});
@@ -1775,7 +1551,6 @@
LinePainter: LinePainter,
LinkedFeaturePainter: LinkedFeaturePainter,
ReadPainter: ReadPainter,
- RefBasedReadPainter: RefBasedReadPainter,
ArcLinkedFeaturePainter: ArcLinkedFeaturePainter,
DiagonalHeatmapPainter: DiagonalHeatmapPainter,
VariantPainter: VariantPainter
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 9e7f9282deba9d383e708e210acea5dea8eb7673 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -4163,8 +4163,7 @@
*/
var ReadTrack = function (view, container, obj_dict) {
FeatureTrack.call(this, view, container, obj_dict);
- // Choose painter based on whether there is reference data.
- this.painter = (view.reference_track ? painters.RefBasedReadPainter : painters.ReadPainter);
+ this.painter = painters.ReadPainter;
this.update_icons();
};
https://bitbucket.org/galaxy/galaxy-central/commits/09676399640d/
Changeset: 09676399640d
User: Jeremy Goecks
Date: 2014-03-26 19:46:29
Summary: Trackster: remove reference to left_offset in painters because it is not needed.
Affected #: 1 file
diff -r 9e7f9282deba9d383e708e210acea5dea8eb7673 -r 09676399640d304d45dfeb81a32ed30d7314d855 static/scripts/viz/trackster/painters.js
--- a/static/scripts/viz/trackster/painters.js
+++ b/static/scripts/viz/trackster/painters.js
@@ -127,9 +127,7 @@
Painter.prototype.default_prefs = {};
/**
- * Draw on the context using a rectangle of width x height. w_scale is
- * needed because it cannot be computed from width and view size alone
- * as a left_offset may be present.
+ * Draw on the context using a rectangle of width x height using scale w_scale.
*/
Painter.prototype.draw = function(ctx, width, height, w_scale) {};
@@ -869,7 +867,6 @@
case "N": // Skipped bases.
ctx.fillStyle = CONNECTOR_COLOR;
ctx.fillRect(s_start, y_start + 5, s_end - s_start, 1);
- //ctx.dashedLine(s_start + this.left_offset, y_start + 5, this.left_offset + s_end, y_start + 5);
// No change in seq_offset because sequence not used when skipping.
base_offset += cig_len;
break;
@@ -1357,9 +1354,7 @@
},
/**
- * Draw on the context using a rectangle of width x height. w_scale is
- * needed because it cannot be computed from width and view size alone
- * as a left_offset may be present.
+ * Draw on the context using a rectangle of width x height with scale w_scale.
*/
draw: function(ctx, width, height, w_scale) {
ctx.save();
https://bitbucket.org/galaxy/galaxy-central/commits/ff91d665fb8e/
Changeset: ff91d665fb8e
User: Jeremy Goecks
Date: 2014-03-26 20:12:39
Summary: Trackster: set maximum label length.
Affected #: 1 file
diff -r 09676399640d304d45dfeb81a32ed30d7314d855 -r ff91d665fb8e211c0bea4c02d9c046f32be7005c static/scripts/viz/trackster/painters.js
--- a/static/scripts/viz/trackster/painters.js
+++ b/static/scripts/viz/trackster/painters.js
@@ -341,6 +341,7 @@
Painter.call(this, data, view_start, view_end, prefs, mode);
this.alpha_scaler = (alpha_scaler ? alpha_scaler : new Scaler());
this.height_scaler = (height_scaler ? height_scaler : new Scaler());
+ this.max_label_length = 200;
};
FeaturePainter.prototype.default_prefs = { block_color: "#FFF", connector_color: "#FFF" };
@@ -399,6 +400,7 @@
feature_mapper.y_translation = this.get_top_padding(width);
return feature_mapper;
},
+
/**
* Abstract function for drawing an individual feature.
*/
@@ -645,11 +647,11 @@
// FIXME: assumption here that the entire view starts at 0
if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
ctx.textAlign = "left";
- ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8);
+ ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8, this.max_label_length);
draw_end += ctx.measureText(feature_name).width + LABEL_SPACING;
} else {
ctx.textAlign = "right";
- ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8);
+ ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8, this.max_label_length);
draw_start -= ctx.measureText(feature_name).width + LABEL_SPACING;
}
//ctx.fillStyle = block_color;
@@ -1019,10 +1021,10 @@
ctx.fillStyle = this.prefs.label_color;
if (tile_low === 0 && f_start - ctx.measureText(feature_name).width < 0) {
ctx.textAlign = "left";
- ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8);
+ ctx.fillText(feature_name, f_end + LABEL_SPACING, y_start + 8, this.max_label_length);
} else {
ctx.textAlign = "right";
- ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8);
+ ctx.fillText(feature_name, f_start - LABEL_SPACING, y_start + 8, this.max_label_length);
}
}
https://bitbucket.org/galaxy/galaxy-central/commits/cc7b0f0be551/
Changeset: cc7b0f0be551
User: Jeremy Goecks
Date: 2014-03-26 20:39:14
Summary: Fix initialization of boolean values in client-side config object.
Affected #: 1 file
diff -r ff91d665fb8e211c0bea4c02d9c046f32be7005c -r cc7b0f0be551dd1c0564c1fd7b95b6a2aaa7ee39 static/scripts/utils/config.js
--- a/static/scripts/utils/config.js
+++ b/static/scripts/utils/config.js
@@ -16,7 +16,7 @@
this.set(_.extend({}, defaults, options));
}
- if (!this.get('value')) {
+ if (this.get('value') === undefined) {
// Use default to set value.
this.set_value(this.get('default_value'));
https://bitbucket.org/galaxy/galaxy-central/commits/e298d0294721/
Changeset: e298d0294721
User: Jeremy Goecks
Date: 2014-03-26 22:37:03
Summary: Trackster: fix offset bug that was rendering bases incorrectly.
Affected #: 1 file
diff -r cc7b0f0be551dd1c0564c1fd7b95b6a2aaa7ee39 -r e298d029472150df08bfae9b6ec311c78cb63dfd static/scripts/viz/trackster/painters.js
--- a/static/scripts/viz/trackster/painters.js
+++ b/static/scripts/viz/trackster/painters.js
@@ -740,6 +740,20 @@
* Draw a single read from reference-based read sequence and cigar.
*/
draw_read: function(ctx, mode, w_scale, y_start, tile_low, tile_high, feature_start, cigar, strand, read_seq) {
+ // Helper function to update base and sequnence offsets.
+ var update_base_offset = function(offset, cig_op, cig_len) {
+ if ('M=NXD'.indexOf(cig_op) !== -1) {
+ offset += cig_len;
+ }
+ return offset;
+ },
+ update_seq_offset = function(offset, cig_op, cig_len) {
+ if ('IX'.indexOf(cig_op) !== -1) {
+ offset += cig_len;
+ }
+ return offset;
+ };
+
ctx.textAlign = "center";
var tile_region = [tile_low, tile_high],
base_offset = 0,
@@ -770,7 +784,7 @@
// Make sure that block is drawn even if it too small to be rendered officially; in this case,
// read is drawn at 1px.
- // TODO: need to ensure that s_start, s_end are calcuated the same for both slotting
+ // TODO: need to ensure that s_start, s_end are calculated the same for both slotting
// and drawing.
if (s_start === s_end) {
s_end += 1;
@@ -793,21 +807,26 @@
var seq_start = feature_start + base_offset,
// -0.5 to offset sequence between bases.
- s_start = Math.floor( Math.max(0, -0.5 * w_scale, (seq_start - tile_low - 0.5) * w_scale) ),
+ s_start = Math.floor( Math.max(-0.5 * w_scale, (seq_start - tile_low - 0.5) * w_scale) ),
s_end = Math.floor( Math.max(0, (seq_start + cig_len - tile_low - 0.5) * w_scale) );
+ // Skip feature if it's not in tile.
if (!is_overlap([seq_start, seq_start + cig_len], tile_region)) {
+ // Update offsets.
+ base_offset = update_base_offset(base_offset, cig_op, cig_len);
+ seq_offset = update_seq_offset(seq_offset, cig_op, cig_len);
continue;
}
// Make sure that read is drawn even if it too small to be rendered officially; in this case,
// read is drawn at 1px.
- // TODO: need to ensure that s_start, s_end are calcuated the same for both slotting
+ // TODO: need to ensure that s_start, s_end are calculated the same for both slotting
// and drawing.
if (s_start === s_end) {
s_end += 1;
}
-
+
+ // Draw read feature.
switch (cig_op) {
case "H": // Hard clipping.
case "S": // Soft clipping.
https://bitbucket.org/galaxy/galaxy-central/commits/bc739e4b5bdf/
Changeset: bc739e4b5bdf
User: Jeremy Goecks
Date: 2014-03-26 22:38:22
Summary: Automated merge of local changesets with -default
Affected #: 24 files
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -56,6 +56,7 @@
python_openid = 2.2.5
python_daemon = 1.5.5
pytz = 2013.9
+requests = 2.2.1
Routes = 1.12.3
sqlalchemy_migrate = 0.7.2
ssh = 1.7.14
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -41,6 +41,7 @@
from galaxy.tools.parameters.basic import DataToolParameter
from galaxy.util.json import to_json_string
from galaxy.workflow.modules import ToolModule
+from galaxy.workflow.steps import attach_ordered_steps
log = logging.getLogger( __name__ )
@@ -1676,7 +1677,6 @@
"""
Creates a workflow from a dict. Created workflow is stored in the database and returned.
"""
- from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
# Put parameters in workflow mode
trans.workflow_building_mode = True
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -11,8 +11,10 @@
from galaxy import web
from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import BaseAPIController, url_for, UsesStoredWorkflowMixin
+from galaxy.web.base.controller import UsesHistoryMixin
from galaxy.workflow.modules import module_factory
from galaxy.workflow.run import invoke
+from galaxy.workflow.extract import extract_workflow
log = logging.getLogger(__name__)
@@ -51,7 +53,7 @@
step.state.inputs.update(param_dict)
-class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin):
+class WorkflowsAPIController(BaseAPIController, UsesStoredWorkflowMixin, UsesHistoryMixin):
@web.expose_api
def index(self, trans, **kwd):
@@ -141,9 +143,11 @@
"""
POST /api/workflows
- We're not creating workflows from the api. Just execute for now.
+ Run or create workflows from the api.
- However, we will import them if installed_repository_file is specified.
+ If installed_repository_file or from_history_id is specified a new
+ workflow will be created for this user. Otherwise, workflow_id must be
+ specified and this API method will cause a workflow to execute.
:param installed_repository_file The path of a workflow to import. Either workflow_id or installed_repository_file must be specified
:type installed_repository_file str
@@ -165,6 +169,15 @@
:param replacement_params: A dictionary used when renaming datasets
:type replacement_params: dict
+
+ :param from_history_id: Id of history to extract a workflow from. Should not be used with worfklow_id or installed_repository_file.
+ :type from_history_id: str
+
+ :param job_ids: If from_history_id is set - this should be a list of jobs to include when extracting workflow from history.
+ :type job_ids: str
+
+ :param dataset_ids: If from_history_id is set - this should be a list of HDA ids corresponding to workflow inputs when extracting workflow from history.
+ :type dataset_ids: str
"""
# Pull parameters out of payload.
@@ -183,6 +196,24 @@
cntrller='api',
**payload)
return result
+ if 'from_history_id' in payload:
+ from_history_id = payload.get( 'from_history_id' )
+ history = self.get_history( trans, from_history_id, check_ownership=False, check_accessible=True )
+ job_ids = map( trans.security.decode_id, payload.get( "job_ids", [] ) )
+ dataset_ids = map( trans.security.decode_id, payload.get( "dataset_ids", [] ) )
+ workflow_name = payload[ "workflow_name" ]
+ stored_workflow = extract_workflow(
+ trans=trans,
+ user=trans.get_user(),
+ history=history,
+ job_ids=job_ids,
+ dataset_ids=dataset_ids,
+ workflow_name=workflow_name,
+ )
+ item = stored_workflow.to_dict( value_mapper={ "id": trans.security.encode_id } )
+ item[ 'url' ] = url_for( 'workflow', id=item[ "id" ] )
+ return item
+
trans.response.status = 403
return "Either workflow_id or installed_repository_file must be specified"
if 'installed_repository_file' in payload:
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -201,9 +201,13 @@
repository_id = kwd.get( 'id', None )
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
+ params = '?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( web.url_for( '/', qualified=True ),
+ str( repository.name ),
+ str( repository.owner ),
+ str( repository.changeset_revision ) )
url = suc.url_join( tool_shed_url,
- 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( web.url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
+ 'repository/check_for_updates%s' % params )
return trans.response.send_redirect( url )
@web.expose
@@ -367,9 +371,10 @@
@web.require_admin
def get_tool_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
"""
- Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for the received repository name,
- owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We need
- it so that we can derive the tool shed from which it was installed.
+ Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for
+ the received repository name, owner and changeset revision. The received repository_id is the encoded id
+ of the installed tool shed repository in Galaxy. We need it so that we can derive the tool shed from which
+ it was installed.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -388,8 +393,8 @@
@web.require_admin
def get_updated_repository_information( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
"""
- Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall an updated revision of an
- uninstalled tool shed repository.
+ Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall
+ an updated revision of an uninstalled tool shed repository.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -436,13 +441,16 @@
@web.expose
@web.require_admin
- def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
+ def initiate_tool_dependency_installation( self, trans, tool_dependencies, **kwd ):
"""Install specified dependencies for repository tools."""
# Get the tool_shed_repository from one of the tool_dependencies.
- message = ''
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ err_msg = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME, tool_shed_repository.repo_path( trans.app ) )
+ tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME,
+ tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -452,13 +460,12 @@
if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR:
text = util.unicodify( installed_tool_dependency.error_message )
if text is not None:
- message += ' %s' % text
+ err_msg += ' %s' % text
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
- if message:
+ if err_msg:
+ message += err_msg
status = 'error'
- else:
- message = "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
- status = 'done'
+ message += "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
td_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_tool_dependencies',
@@ -506,9 +513,10 @@
status = 'error'
else:
# Install the latest downloadable revision of the repository.
+ params = '?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
+ ( name, owner, str( latest_downloadable_revision ), web.url_for( '/', qualified=True ) )
url = suc.url_join( tool_shed_url,
- 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
- ( name, owner, latest_downloadable_revision, web.url_for( '/', qualified=True ) ) )
+ 'repository/install_repositories_by_revision%s' % params )
return trans.response.send_redirect( url )
else:
message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id )
@@ -521,43 +529,74 @@
message=message,
status=status ) )
-
@web.expose
@web.require_admin
- def install_tool_dependencies( self, trans, **kwd ):
+ def install_tool_dependencies_with_update( self, trans, **kwd ):
+ """
+ Updating an installed tool shed repository where new tool dependencies but no new repository
+ dependencies are included in the updated revision.
+ """
+ updating_repository_id = kwd.get( 'updating_repository_id', None )
+ repository = suc.get_installed_tool_shed_repository( trans, updating_repository_id )
+ # All received dependencies need to be installed - confirmed by the caller.
+ encoded_tool_dependencies_dict = kwd.get( 'encoded_tool_dependencies_dict', None )
+ if encoded_tool_dependencies_dict is not None:
+ tool_dependencies_dict = encoding_util.tool_shed_decode( encoded_tool_dependencies_dict )
+ else:
+ tool_dependencies_dict = {}
+ encoded_relative_install_dir = kwd.get( 'encoded_relative_install_dir', None )
+ if encoded_relative_install_dir is not None:
+ relative_install_dir = encoding_util.tool_shed_decode( encoded_relative_install_dir )
+ else:
+ relative_install_dir = ''
+ updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+ updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+ encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
- tool_dependencies = []
- for tool_dependency_id in tool_dependency_ids:
- tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
- tool_dependencies.append( tool_dependency )
- if kwd.get( 'install_tool_dependencies_button', False ):
- # Filter tool dependencies to only those that are installed.
- tool_dependencies_for_installation = []
- for tool_dependency in tool_dependencies:
- if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.UNINSTALLED,
- trans.install_model.ToolDependency.installation_status.ERROR ]:
- tool_dependencies_for_installation.append( tool_dependency )
- if tool_dependencies_for_installation:
- # Redirect back to the ToolDependencyGrid before initiating installation.
- encoded_tool_dependency_for_installation_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies_for_installation ]
- new_kwd = dict( action='manage_tool_dependencies',
- operation='initiate_tool_dependency_installation',
- tool_dependency_ids=encoded_tool_dependency_for_installation_ids,
- message=message,
- status=status )
- return self.tool_dependency_grid( trans, **new_kwd )
+ install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
+ if 'install_tool_dependencies_with_update_button' in kwd:
+ # Now that the user has chosen whether to install tool dependencies or not, we can
+ # update the repository record with the changes in the updated revision.
+ if encoded_updated_metadata:
+ updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
else:
- message = 'All of the selected tool dependencies are already installed.'
- status = 'error'
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='manage_repository_tool_dependencies',
- tool_dependency_ids=tool_dependency_ids,
- status=status,
- message=message ) )
- return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_dependencies=tool_dependencies,
+ updated_metadata = None
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=updated_metadata,
+ updated_changeset_revision=updating_to_changeset_revision,
+ updated_ctx_rev=updating_to_ctx_rev )
+ if install_tool_dependencies:
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
+ repository,
+ relative_install_dir,
+ set_status=False )
+ message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \
+ ( str( repository.name ), updating_to_changeset_revision )
+ self.initiate_tool_dependency_installation( trans, tool_dependencies, message=message, status=status )
+ # Handle tool dependencies check box.
+ if trans.app.config.tool_dependency_dir is None:
+ if includes_tool_dependencies:
+ message = "Tool dependencies defined in this repository can be automatically installed if you set "
+ message += "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+ message += "(universe_wsgi.ini) and restart your Galaxy server."
+ status = "warning"
+ install_tool_dependencies_check_box_checked = False
+ else:
+ install_tool_dependencies_check_box_checked = True
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+ checked=install_tool_dependencies_check_box_checked )
+ return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies_with_update.mako',
+ repository=repository,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
+ encoded_relative_install_dir=encoded_relative_install_dir,
+ encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ tool_dependencies_dict=tool_dependencies_dict,
message=message,
status=status )
@@ -611,7 +650,8 @@
# TODO: I believe this block should be removed, but make sure..
repositories_for_uninstallation = []
for repository_id in tsridslist:
- repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( repository_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .get( trans.security.decode_id( repository_id ) )
if repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
trans.install_model.ToolShedRepository.installation_status.ERROR ]:
repositories_for_uninstallation.append( repository )
@@ -632,28 +672,34 @@
filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
- # Some repositories may have repository dependencies that are required to be installed before the dependent repository, so we'll
- # order the list of tsr_ids to ensure all repositories install in the required order.
+ # Some repositories may have repository dependencies that are required to be installed before the
+ # dependent repository, so we'll order the list of tsr_ids to ensure all repositories install in the
+ # required order.
ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
repository_util.order_components_for_installation( trans,
tsr_ids,
repo_info_dicts,
tool_panel_section_keys=tool_panel_section_keys )
for tsr_id in ordered_tsr_ids:
- repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .get( trans.security.decode_id( tsr_id ) )
if repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
- repo_info_dict, tool_panel_section_key = repository_util.get_repository_components_for_installation( tsr_id,
- ordered_tsr_ids,
- ordered_repo_info_dicts,
- ordered_tool_panel_section_keys )
+ repo_info_dict, tool_panel_section_key = \
+ repository_util.get_repository_components_for_installation( tsr_id,
+ ordered_tsr_ids,
+ ordered_repo_info_dicts,
+ ordered_tool_panel_section_keys )
filtered_repo_info_dicts.append( repo_info_dict )
filtered_tool_panel_section_keys.append( tool_panel_section_key )
if repositories_for_installation:
decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
- self.install_tool_shed_repositories( trans, repositories_for_installation, reinstalling=reinstalling, **decoded_kwd )
+ self.install_tool_shed_repositories( trans,
+ repositories_for_installation,
+ reinstalling=reinstalling,
+ **decoded_kwd )
else:
kwd[ 'message' ] = 'All selected tool shed repositories are already installed.'
kwd[ 'status' ] = 'error'
@@ -772,7 +818,10 @@
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
- self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ self.initiate_tool_dependency_installation( trans,
+ tool_dependencies_for_installation,
+ message=message,
+ status=status )
else:
message = 'All selected tool dependencies are already installed.'
status = 'error'
@@ -780,14 +829,16 @@
message = 'Set the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file (universe_wsgi.ini) '
message += ' and restart your Galaxy server to install tool dependencies.'
status = 'error'
- installed_tool_dependencies_select_field = suc.build_tool_dependencies_select_field( trans,
- tool_shed_repository=tool_shed_repository,
- name='inst_td_ids',
- uninstalled_only=False )
- uninstalled_tool_dependencies_select_field = suc.build_tool_dependencies_select_field( trans,
- tool_shed_repository=tool_shed_repository,
- name='uninstalled_tool_dependency_ids',
- uninstalled_only=True )
+ installed_tool_dependencies_select_field = \
+ suc.build_tool_dependencies_select_field( trans,
+ tool_shed_repository=tool_shed_repository,
+ name='inst_td_ids',
+ uninstalled_only=False )
+ uninstalled_tool_dependencies_select_field = \
+ suc.build_tool_dependencies_select_field( trans,
+ tool_shed_repository=tool_shed_repository,
+ name='uninstalled_tool_dependency_ids',
+ uninstalled_only=True )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository_tool_dependencies.mako',
repository=tool_shed_repository,
installed_tool_dependencies_select_field=installed_tool_dependencies_select_field,
@@ -836,7 +887,10 @@
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
- self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ self.initiate_tool_dependency_installation( trans,
+ tool_dependencies_for_installation,
+ message=message,
+ status=status )
else:
kwd[ 'message' ] = 'All selected tool dependencies are already installed.'
kwd[ 'status' ] = 'error'
@@ -876,18 +930,21 @@
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
if not suc.have_shed_tool_conf_for_install( trans ):
- message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a '
- message += '<b><toolbox></b> tag that includes a <b>tool_path</b> attribute value which is a directory relative to the Galaxy installation '
- message += 'directory in order to automatically install tools from a Galaxy tool shed (e.g., the file name <b>shed_tool_conf.xml</b> whose '
- message += '<b><toolbox></b> tag is <b><toolbox tool_path="../shed_tools"></b>).<p/>See the '
- message += '<a href="http://wiki.g2.bx.psu.edu/InstallingRepositoriesToGalaxy" target="_blank">Installation of Galaxy tool shed repository tools '
- message += 'into a local Galaxy instance</a> section of the Galaxy tool shed wiki for all of the details.'
+ message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one '
+ message += 'shed tool configuration file name with a <b><toolbox></b> tag that includes a <b>tool_path</b> '
+ message += 'attribute value which is a directory relative to the Galaxy installation directory in order '
+ message += 'to automatically install tools from a Galaxy Tool Shed (e.g., the file name <b>shed_tool_conf.xml</b> '
+ message += 'whose <b><toolbox></b> tag is <b><toolbox tool_path="../shed_tools"></b>).<p/>See the '
+ message += '<a href="http://wiki.g2.bx.psu.edu/InstallingRepositoriesToGalaxy" target="_blank">Installation '
+ message += 'of Galaxy Tool Shed repository tools into a local Galaxy instance</a> section of the Galaxy Tool '
+ message += 'Shed wiki for all of the details.'
return trans.show_error_message( message )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
shed_tool_conf = kwd.get( 'shed_tool_conf', None )
tool_shed_url = kwd.get( 'tool_shed_url', None )
- # Handle repository dependencies, which do not include those that are required only for compiling a dependent repository's tool dependencies.
+ # Handle repository dependencies, which do not include those that are required only for compiling a dependent
+ # repository's tool dependencies.
has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
# Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
@@ -900,13 +957,36 @@
includes_tools_for_display_in_tool_panel = util.string_as_bool( kwd.get( 'includes_tools_for_display_in_tool_panel', False ) )
includes_tool_dependencies = util.string_as_bool( kwd.get( 'includes_tool_dependencies', False ) )
install_tool_dependencies = kwd.get( 'install_tool_dependencies', '' )
+ # In addition to installing new repositories, this method is called when updating an installed repository
+ # to a new changeset_revision where the update includes newly defined repository dependencies.
+ updating = util.asbool( kwd.get( 'updating', False ) )
+ updating_repository_id = kwd.get( 'updating_repository_id', None )
+ updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+ updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+ encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
encoded_repo_info_dicts = kwd.get( 'encoded_repo_info_dicts', '' )
if encoded_repo_info_dicts:
encoded_repo_info_dicts = encoded_repo_info_dicts.split( encoding_util.encoding_sep )
if not encoded_repo_info_dicts:
- # The request originated in the tool shed via a tool search.
+ # The request originated in the tool shed via a tool search or from this controller's
+ # update_to_changeset_revision() method.
repository_ids = kwd.get( 'repository_ids', None )
- changeset_revisions = kwd.get( 'changeset_revisions', None )
+ if updating:
+ # We have updated an installed repository where the updates included newly defined repository
+ # and possibly tool dependencies. We will have arrived here only if the updates include newly
+ # defined repository dependencies. We're preparing to allow the user to elect to install these
+ # dependencies. At this point, the repository has been updated to the latest changeset revision,
+ # but the received repository id is from the Galaxy side (the caller is this controller's
+ # update_to_changeset_revision() method. We need to get the id of the same repository from the
+ # Tool Shed side.
+ repository = suc.get_tool_shed_repository_by_id( trans, updating_repository_id )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_id?name=%s&owner=%s' % \
+ ( str( repository.name ), str( repository.owner ) ) )
+ repository_ids = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ changeset_revisions = updating_to_changeset_revision
+ else:
+ changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
url = suc.url_join( tool_shed_url,
'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
@@ -918,7 +998,8 @@
if not includes_tools:
includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
+ includes_tools_for_display_in_tool_panel = \
+ util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
if not has_repository_dependencies:
has_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'has_repository_dependencies', False ) )
if not includes_tool_dependencies:
@@ -927,6 +1008,19 @@
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \
( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ):
+ if updating:
+ encoded_updated_metadata_dict = kwd.get( 'encoded_updated_metadata_dict', None )
+ updated_changeset_revision = kwd.get( 'updated_changeset_revision', None )
+ updated_ctx_rev = kwd.get( 'updated_ctx_rev', None )
+ repository = suc.get_tool_shed_repository_by_id( trans, updating_repository_id )
+ decoded_updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
+ # Now that the user has decided whether they will handle dependencies, we can update
+ # the repository to the latest revision.
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=decoded_updated_metadata,
+ updated_changeset_revision=updating_to_changeset_revision,
+ updated_ctx_rev=updating_to_ctx_rev )
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
if includes_tool_dependencies:
install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
@@ -959,7 +1053,8 @@
tool_panel_section_keys=tool_panel_section_keys,
tool_path=tool_path,
tool_shed_url=tool_shed_url )
- encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = repository_util.initiate_repository_installation( trans, installation_dict )
+ encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
+ repository_util.initiate_repository_installation( trans, installation_dict )
return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
encoded_kwd=encoded_kwd,
query=query,
@@ -976,12 +1071,14 @@
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
if len( repo_info_dicts ) == 1:
- # If we're installing a single repository, see if it contains a readme or dependencies that we can display.
+ # If we're installing or updating a single repository, see if it contains a readme or
+ # dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -990,7 +1087,8 @@
if not includes_tools:
includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tools_for_display_in_tool_panel = \
+ dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
@@ -998,19 +1096,25 @@
name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
readme_files_dict = readme_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
- # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
- # and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
- # so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
- # dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
- # each displayed repository dependency will display a status, whether installed or missing.
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ # We're handling 1 of 3 scenarios here: (1) we're installing a tool shed repository for the first time, so we've
+ # retrieved the list of installed and missing repository dependencies from the database (2) we're handling the
+ # scenario where an error occurred during the installation process, so we have a tool_shed_repository record in
+ # the database with associated repository dependency records. Since we have the repository dependencies in both
+ # of the above 2 cases, we'll merge the list of missing repository dependencies into the list of installed
+ # repository dependencies since each displayed repository dependency will display a status, whether installed or
+ # missing. The 3rd scenario is where we're updating an installed repository and the updates include newly
+ # defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined
+ # dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate.
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
else:
# We're installing a list of repositories, each of which may have tool dependencies or repository dependencies.
containers_dicts = []
@@ -1018,7 +1122,8 @@
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -1027,34 +1132,39 @@
if not includes_tools:
includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tools_for_display_in_tool_panel = \
+ dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=None,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=None,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
containers_dicts.append( containers_dict )
# Merge all containers into a single container.
containers_dict = repository_util.merge_containers_dicts_for_new_install( containers_dicts )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
if includes_tool_dependencies:
- message = "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
- message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository."
+ message = "Tool dependencies defined in this repository can be automatically installed if you set "
+ message += "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+ message += "(universe_wsgi.ini) and restart your Galaxy server before installing the repository."
status = "warning"
install_tool_dependencies_check_box_checked = False
else:
install_tool_dependencies_check_box_checked = True
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=install_tool_dependencies_check_box_checked )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+ checked=install_tool_dependencies_check_box_checked )
# Handle repository dependencies check box.
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
encoded_repo_info_dicts = encoding_util.encoding_sep.join( encoded_repo_info_dicts )
@@ -1062,6 +1172,11 @@
if includes_tools_for_display_in_tool_panel:
return trans.fill_template( '/admin/tool_shed_repository/select_tool_panel_section.mako',
encoded_repo_info_dicts=encoded_repo_info_dicts,
+ updating=updating,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
includes_tool_dependencies=includes_tool_dependencies,
@@ -1077,10 +1192,16 @@
message=message,
status=status )
else:
- # If installing repositories that includes no tools and has no repository dependencies, display a page allowing the Galaxy administrator to
- # select a shed-related tool panel configuration file whose tool_path setting will be the location the repositories will be installed.
+ # If installing repositories that includes no tools and has no repository dependencies, display a page
+ # allowing the Galaxy administrator to select a shed-related tool panel configuration file whose tool_path
+ # setting will be the location the repositories will be installed.
return trans.fill_template( '/admin/tool_shed_repository/select_shed_tool_panel_config.mako',
encoded_repo_info_dicts=encoded_repo_info_dicts,
+ updating=updating,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
includes_tool_dependencies=includes_tool_dependencies,
@@ -1400,7 +1521,8 @@
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=False )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
@@ -1449,14 +1571,16 @@
original_section_name = ''
tool_panel_section_select_field = None
shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=False )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
@@ -1574,8 +1698,8 @@
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
"""
- Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed repository and update the
- metadata for the repository's revision in the Galaxy database.
+ Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed
+ repository and update the metadata for the repository's revision in the Galaxy database.
"""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -1720,29 +1844,17 @@
resetting_all_metadata_on_repository=False,
updating_installed_repository=True,
persist=True )
- repository.metadata = metadata_dict
- # Update the repository.changeset_revision column in the database.
- repository.changeset_revision = latest_changeset_revision
- repository.ctx_rev = latest_ctx_rev
- # Update the repository.tool_shed_status column in the database.
- tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, repository )
- if tool_shed_status_dict:
- repository.tool_shed_status = tool_shed_status_dict
- else:
- repository.tool_shed_status = None
- trans.install_model.context.add( repository )
- trans.install_model.context.flush()
if 'tools' in metadata_dict:
tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
if tool_panel_dict is None:
tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
tool_util.add_to_tool_panel( app=trans.app,
- repository_name=repository.name,
+ repository_name=str( repository.name ),
repository_clone_url=repository_clone_url,
- changeset_revision=repository.installed_changeset_revision,
+ changeset_revision=str( repository.installed_changeset_revision ),
repository_tools_tups=repository_tools_tups,
- owner=repository.owner,
+ owner=str( repository.owner ),
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=False )
@@ -1755,19 +1867,40 @@
os.path.join( relative_install_dir, name ),
repository,
repository_tools_tups )
- # Create tool_dependency records if necessary.
- if 'tool_dependencies' in metadata_dict:
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
- repository,
- relative_install_dir,
- set_status=False )
+ if 'repository_dependencies' in metadata_dict or 'tool_dependencies' in metadata_dict:
+ if 'repository_dependencies' in metadata_dict:
+ # Updates received include newly defined repository dependencies, so allow the user
+ # the option of installting them. We cannot update the repository with the changes
+ # until that happens, so we have to send them along.
+ new_kwd = dict( tool_shed_url=tool_shed_url,
+ updating_repository_id=trans.security.encode_id( repository.id ),
+ updating_to_ctx_rev=latest_ctx_rev,
+ updating_to_changeset_revision=latest_changeset_revision,
+ encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ),
+ updating=True )
+ return self.prepare_for_install( trans, **new_kwd )
+ # Updates received did not include any newly defined repository dependencies but did include
+ # newly defined tool dependencies.
+ encoded_tool_dependencies_dict = encoding_util.tool_shed_encode( metadata_dict.get( 'tool_dependencies', {} ) )
+ encoded_relative_install_dir = encoding_util.tool_shed_encode( relative_install_dir )
+ new_kwd = dict( updating_repository_id=trans.security.encode_id( repository.id ),
+ updating_to_ctx_rev=latest_ctx_rev,
+ updating_to_changeset_revision=latest_changeset_revision,
+ encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ),
+ encoded_relative_install_dir=encoded_relative_install_dir,
+ encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+ message=message,
+ status = status )
+ return self.install_tool_dependencies_with_update( trans, **new_kwd )
+ # Updates received did not include any newly defined repository dependencies or newly defined
+ # tool dependencies.
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=metadata_dict,
+ updated_changeset_revision=latest_changeset_revision,
+ updated_ctx_rev=latest_ctx_rev )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \
( name, latest_changeset_revision )
- # See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = \
- suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- if repository.missing_tool_dependencies:
- message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
message = "The directory containing the installed repository named '%s' cannot be found. " % name
status = 'error'
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -4,7 +4,6 @@
import base64
import httplib
import json
-import math
import os
import sgmllib
import svgfig
@@ -20,12 +19,9 @@
from galaxy.datatypes.data import Data
from galaxy.model.item_attrs import UsesItemRatings
from galaxy.model.mapping import desc
+from galaxy.tools.parameters.basic import DataToolParameter
from galaxy.tools.parameters import visit_input_values
-from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue
-from galaxy.tools.parameters.grouping import Conditional, Repeat
-from galaxy.util.odict import odict
from galaxy.util.sanitize_html import sanitize_html
-from galaxy.util.topsort import CycleError, topsort, topsort_levels
from galaxy.web import error, url_for
from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesStoredWorkflowMixin
from galaxy.web.framework import form
@@ -33,6 +29,14 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.workflow.modules import module_factory
from galaxy.workflow.run import invoke
+from galaxy.workflow.extract import summarize
+from galaxy.workflow.extract import extract_workflow
+from galaxy.workflow.steps import (
+ attach_ordered_steps,
+ order_workflow_steps,
+ edgelist_for_workflow_steps,
+ order_workflow_steps_with_levels,
+)
class StoredWorkflowListGrid( grids.Grid ):
@@ -1188,7 +1192,7 @@
if not user:
return trans.show_error_message( "Must be logged in to create workflows" )
if ( job_ids is None and dataset_ids is None ) or workflow_name is None:
- jobs, warnings = get_job_dict( trans )
+ jobs, warnings = summarize( trans )
# Render
return trans.fill_template(
"workflow/build_from_current_history.mako",
@@ -1197,82 +1201,13 @@
history=history
)
else:
- # Ensure job_ids and dataset_ids are lists (possibly empty)
- if job_ids is None:
- job_ids = []
- elif type( job_ids ) is not list:
- job_ids = [ job_ids ]
- if dataset_ids is None:
- dataset_ids = []
- elif type( dataset_ids ) is not list:
- dataset_ids = [ dataset_ids ]
- # Convert both sets of ids to integers
- job_ids = [ int( id ) for id in job_ids ]
- dataset_ids = [ int( id ) for id in dataset_ids ]
- # Find each job, for security we (implicately) check that they are
- # associated witha job in the current history.
- jobs, warnings = get_job_dict( trans )
- jobs_by_id = dict( ( job.id, job ) for job in jobs.keys() )
- steps = []
- steps_by_job_id = {}
- hid_to_output_pair = {}
- # Input dataset steps
- for hid in dataset_ids:
- step = model.WorkflowStep()
- step.type = 'data_input'
- step.tool_inputs = dict( name="Input Dataset" )
- hid_to_output_pair[ hid ] = ( step, 'output' )
- steps.append( step )
- # Tool steps
- for job_id in job_ids:
- assert job_id in jobs_by_id, "Attempt to create workflow with job not connected to current history"
- job = jobs_by_id[ job_id ]
- tool = trans.app.toolbox.get_tool( job.tool_id )
- param_values = job.get_param_values( trans.app, ignore_errors=True ) # If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
- associations = cleanup_param_values( tool.inputs, param_values )
- step = model.WorkflowStep()
- step.type = 'tool'
- step.tool_id = job.tool_id
- step.tool_inputs = tool.params_to_strings( param_values, trans.app )
- # NOTE: We shouldn't need to do two passes here since only
- # an earlier job can be used as an input to a later
- # job.
- for other_hid, input_name in associations:
- if other_hid in hid_to_output_pair:
- other_step, other_name = hid_to_output_pair[ other_hid ]
- conn = model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
- # Should always be connected to an earlier step
- conn.output_step = other_step
- conn.output_name = other_name
- steps.append( step )
- steps_by_job_id[ job_id ] = step
- # Store created dataset hids
- for assoc in job.output_datasets:
- hid_to_output_pair[ assoc.dataset.hid ] = ( step, assoc.name )
- # Workflow to populate
- workflow = model.Workflow()
- workflow.name = workflow_name
- # Order the steps if possible
- attach_ordered_steps( workflow, steps )
- # And let's try to set up some reasonable locations on the canvas
- # (these are pretty arbitrary values)
- levorder = order_workflow_steps_with_levels( steps )
- base_pos = 10
- for i, steps_at_level in enumerate( levorder ):
- for j, index in enumerate( steps_at_level ):
- step = steps[ index ]
- step.position = dict( top=( base_pos + 120 * j ),
- left=( base_pos + 220 * i ) )
- # Store it
- stored = model.StoredWorkflow()
- stored.user = user
- stored.name = workflow_name
- workflow.stored_workflow = stored
- stored.latest_workflow = workflow
- trans.sa_session.add( stored )
- trans.sa_session.flush()
+ extract_workflow(
+ trans,
+ user=user,
+ job_ids=job_ids,
+ dataset_ids=dataset_ids,
+ workflow_name=workflow_name
+ )
# Index page with message
return trans.show_message( "Workflow '%s' created from current history." % workflow_name )
## return trans.show_ok_message( "<p>Workflow '%s' created.</p><p><a target='_top' href='%s'>Click to load in workflow editor</a></p>"
@@ -1692,148 +1627,6 @@
## ---- Utility methods -------------------------------------------------------
-def attach_ordered_steps( workflow, steps ):
- ordered_steps = order_workflow_steps( steps )
- if ordered_steps:
- workflow.has_cycles = False
- for i, step in enumerate( ordered_steps ):
- step.order_index = i
- workflow.steps.append( step )
- else:
- workflow.has_cycles = True
- workflow.steps = steps
-
-
-def edgelist_for_workflow_steps( steps ):
- """
- Create a list of tuples representing edges between ``WorkflowSteps`` based
- on associated ``WorkflowStepConnection``s
- """
- edges = []
- steps_to_index = dict( ( step, i ) for i, step in enumerate( steps ) )
- for step in steps:
- edges.append( ( steps_to_index[step], steps_to_index[step] ) )
- for conn in step.input_connections:
- edges.append( ( steps_to_index[conn.output_step], steps_to_index[conn.input_step] ) )
- return edges
-
-
-def order_workflow_steps( steps ):
- """
- Perform topological sort of the steps, return ordered or None
- """
- position_data_available = True
- for step in steps:
- if not step.position or not 'left' in step.position or not 'top' in step.position:
- position_data_available = False
- if position_data_available:
- steps.sort(cmp=lambda s1, s2: cmp( math.sqrt(s1.position['left'] ** 2 + s1.position['top'] ** 2), math.sqrt(s2.position['left'] ** 2 + s2.position['top'] ** 2)))
- try:
- edges = edgelist_for_workflow_steps( steps )
- node_order = topsort( edges )
- return [ steps[i] for i in node_order ]
- except CycleError:
- return None
-
-
-def order_workflow_steps_with_levels( steps ):
- try:
- return topsort_levels( edgelist_for_workflow_steps( steps ) )
- except CycleError:
- return None
-
-
-class FakeJob( object ):
- """
- Fake job object for datasets that have no creating_job_associations,
- they will be treated as "input" datasets.
- """
- def __init__( self, dataset ):
- self.is_fake = True
- self.id = "fake_%s" % dataset.id
-
-
-def get_job_dict( trans ):
- """
- Return a dictionary of Job -> [ Dataset ] mappings, for all finished
- active Datasets in the current history and the jobs that created them.
- """
- history = trans.get_history()
- # Get the jobs that created the datasets
- warnings = set()
- jobs = odict()
- for dataset in history.active_datasets:
- # FIXME: Create "Dataset.is_finished"
- if dataset.state in ( 'new', 'running', 'queued' ):
- warnings.add( "Some datasets still queued or running were ignored" )
- continue
-
- #if this hda was copied from another, we need to find the job that created the origial hda
- job_hda = dataset
- while job_hda.copied_from_history_dataset_association:
- job_hda = job_hda.copied_from_history_dataset_association
-
- if not job_hda.creating_job_associations:
- jobs[ FakeJob( dataset ) ] = [ ( None, dataset ) ]
-
- for assoc in job_hda.creating_job_associations:
- job = assoc.job
- if job in jobs:
- jobs[ job ].append( ( assoc.name, dataset ) )
- else:
- jobs[ job ] = [ ( assoc.name, dataset ) ]
- return jobs, warnings
-
-
-def cleanup_param_values( inputs, values ):
- """
- Remove 'Data' values from `param_values`, along with metadata cruft,
- but track the associations.
- """
- associations = []
- # dbkey is pushed in by the framework
- if 'dbkey' in values:
- del values['dbkey']
- root_values = values
-
- # Recursively clean data inputs and dynamic selects
- def cleanup( prefix, inputs, values ):
- for key, input in inputs.items():
- if isinstance( input, ( SelectToolParameter, DrillDownSelectToolParameter ) ):
- if input.is_dynamic and not isinstance( values[key], UnvalidatedValue ):
- values[key] = UnvalidatedValue( values[key] )
- if isinstance( input, DataToolParameter ):
- tmp = values[key]
- values[key] = None
- # HACK: Nested associations are not yet working, but we
- # still need to clean them up so we can serialize
- # if not( prefix ):
- if tmp: # this is false for a non-set optional dataset
- if not isinstance(tmp, list):
- associations.append( ( tmp.hid, prefix + key ) )
- else:
- associations.extend( [ (t.hid, prefix + key) for t in tmp] )
-
- # Cleanup the other deprecated crap associated with datasets
- # as well. Worse, for nested datasets all the metadata is
- # being pushed into the root. FIXME: MUST REMOVE SOON
- key = prefix + key + "_"
- for k in root_values.keys():
- if k.startswith( key ):
- del root_values[k]
- elif isinstance( input, Repeat ):
- group_values = values[key]
- for i, rep_values in enumerate( group_values ):
- rep_index = rep_values['__index__']
- cleanup( "%s%s_%d|" % (prefix, key, rep_index ), input.inputs, group_values[i] )
- elif isinstance( input, Conditional ):
- group_values = values[input.name]
- current_case = group_values['__current_case__']
- cleanup( "%s%s|" % ( prefix, key ), input.cases[current_case].inputs, group_values )
- cleanup( "", inputs, values )
- return associations
-
-
def _build_workflow_on_str(instance_ds_names):
# Returns suffix for new histories based on multi input iteration
num_multi_inputs = len(instance_ds_names)
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -866,11 +866,13 @@
no_update = 'false'
elif galaxy_url:
# Start building up the url to redirect back to the calling Galaxy instance.
+ params = '?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision )
url = suc.url_join( galaxy_url,
- 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
+ 'admin_toolshed/update_to_changeset_revision%s' % params )
else:
- message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>. You may need to enable third-party cookies in your browser. ' % galaxy_url
+ message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url
+ message += 'You may need to enable third-party cookies in your browser. '
return trans.show_error_message( message )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
@@ -883,15 +885,17 @@
trans.security.encode_id( repository.id ),
changeset_revision )
if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+ # If changeset_revision is in the repository_metadata table for this repository, there are no
+ # additional updates.
if from_update_manager:
return no_update
else:
# Return the same value for changeset_revision and latest_changeset_revision.
url += latest_changeset_revision
else:
- # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was installed. We need to find the changeset_revision to which we need to update.
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # changeset_revision value since the repository was installed. We need to find the changeset_revision
+ # to which we need to update.
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
@@ -1680,6 +1684,16 @@
return encoding_util.tool_shed_encode( repository_dependencies )
return ''
+ @web.expose
+ def get_repository_id( self, trans, **kwd ):
+ """Given a repository name and owner, return the encoded repository id."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+ if repository:
+ return trans.security.encode_id( repository.id )
+ return ''
+
@web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
"""
@@ -1824,7 +1838,9 @@
if not repository_metadata:
# The received changeset_revision is no longer associated with metadata, so get the next changeset_revision in the repository
# changelog that is associated with metadata.
- changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision=changeset_revision )
+ changeset_revision = suc.get_next_downloadable_changeset_revision( repository,
+ repo,
+ after_changeset_revision=changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
@@ -2039,11 +2055,12 @@
galaxy_url = suc.handle_galaxy_url( trans, **kwd )
if galaxy_url:
# Redirect back to local Galaxy to perform install.
+ params = '?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( web.url_for( '/', qualified=True ),
+ ','.join( util.listify( repository_ids ) ),
+ ','.join( util.listify( changeset_revisions ) ) )
url = suc.url_join( galaxy_url,
- 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( web.url_for( '/', qualified=True ),
- ','.join( util.listify( repository_ids ) ),
- ','.join( util.listify( changeset_revisions ) ) ) )
+ 'admin_toolshed/prepare_for_install%s' % params )
return trans.response.send_redirect( url )
else:
message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url
@@ -2477,9 +2494,10 @@
@web.expose
def next_installable_changeset_revision( self, trans, **kwd ):
"""
- Handle a request from a Galaxy instance where the changeset_revision defined for a repository in a dependency definition file is older
- than the changeset_revision associated with the installed repository. This will occur with repository's of type tool_dependency_definition,
- and this scenario will occur while repository dependency hierarchies are bing installed.
+ Handle a request from a Galaxy instance where the changeset_revision defined for a repository
+ in a dependency definition file is older than the changeset_revision associated with the installed
+ repository. This will occur with repository's of type tool_dependency_definition, and this scenario
+ will occur while repository dependency hierarchies are bing installed.
"""
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
@@ -2981,12 +2999,13 @@
@web.expose
def updated_changeset_revisions( self, trans, **kwd ):
"""
- Handle a request from a local Galaxy instance to retrieve the list of changeset revisions to which an installed repository can be updated. This
- method will return a string of comma-separated changeset revision hashes for all available updates to the received changeset revision. Among
- other things , this method handles the scenario where an installed tool shed repository's tool_dependency definition file defines a changeset
- revision for a complex repository dependency that is outdated. In other words, a defined changeset revision is older than the current changeset
- revision for the required repository, making it impossible to discover the repository without knowledge of revisions to which it could have been
- updated.
+ Handle a request from a local Galaxy instance to retrieve the list of changeset revisions to which an
+ installed repository can be updated. This method will return a string of comma-separated changeset revision
+ hashes for all available updates to the received changeset revision. Among other things , this method
+ handles the scenario where an installed tool shed repository's tool_dependency definition file defines a
+ changeset revision for a complex repository dependency that is outdated. In other words, a defined changeset
+ revision is older than the current changeset revision for the required repository, making it impossible to
+ discover the repository without knowledge of revisions to which it could have been updated.
"""
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/workflow/extract.py
--- /dev/null
+++ b/lib/galaxy/workflow/extract.py
@@ -0,0 +1,209 @@
+""" This module contains functionality to aid in extracting workflows from
+histories.
+"""
+from galaxy.util.odict import odict
+from galaxy import model
+from galaxy.tools.parameters.basic import (
+ DataToolParameter,
+ DrillDownSelectToolParameter,
+ SelectToolParameter,
+ UnvalidatedValue
+)
+from galaxy.tools.parameters.grouping import (
+ Conditional,
+ Repeat
+)
+from .steps import (
+ attach_ordered_steps,
+ order_workflow_steps_with_levels
+)
+
+WARNING_SOME_DATASETS_NOT_READY = "Some datasets still queued or running were ignored"
+
+
+def extract_workflow( trans, user, history=None, job_ids=None, dataset_ids=None, workflow_name=None ):
+ steps = extract_steps( trans, history=history, job_ids=job_ids, dataset_ids=dataset_ids )
+ # Workflow to populate
+ workflow = model.Workflow()
+ workflow.name = workflow_name
+ # Order the steps if possible
+ attach_ordered_steps( workflow, steps )
+ # And let's try to set up some reasonable locations on the canvas
+ # (these are pretty arbitrary values)
+ levorder = order_workflow_steps_with_levels( steps )
+ base_pos = 10
+ for i, steps_at_level in enumerate( levorder ):
+ for j, index in enumerate( steps_at_level ):
+ step = steps[ index ]
+ step.position = dict( top=( base_pos + 120 * j ),
+ left=( base_pos + 220 * i ) )
+ # Store it
+ stored = model.StoredWorkflow()
+ stored.user = user
+ stored.name = workflow_name
+ workflow.stored_workflow = stored
+ stored.latest_workflow = workflow
+ trans.sa_session.add( stored )
+ trans.sa_session.flush()
+ return stored
+
+
+def extract_steps( trans, history=None, job_ids=None, dataset_ids=None ):
+ # Ensure job_ids and dataset_ids are lists (possibly empty)
+ if job_ids is None:
+ job_ids = []
+ elif type( job_ids ) is not list:
+ job_ids = [ job_ids ]
+ if dataset_ids is None:
+ dataset_ids = []
+ elif type( dataset_ids ) is not list:
+ dataset_ids = [ dataset_ids ]
+ # Convert both sets of ids to integers
+ job_ids = [ int( id ) for id in job_ids ]
+ dataset_ids = [ int( id ) for id in dataset_ids ]
+ # Find each job, for security we (implicately) check that they are
+ # associated witha job in the current history.
+ jobs, warnings = summarize( trans, history=history )
+ jobs_by_id = dict( ( job.id, job ) for job in jobs.keys() )
+ steps = []
+ steps_by_job_id = {}
+ hid_to_output_pair = {}
+ # Input dataset steps
+ for hid in dataset_ids:
+ step = model.WorkflowStep()
+ step.type = 'data_input'
+ step.tool_inputs = dict( name="Input Dataset" )
+ hid_to_output_pair[ hid ] = ( step, 'output' )
+ steps.append( step )
+ # Tool steps
+ for job_id in job_ids:
+ assert job_id in jobs_by_id, "Attempt to create workflow with job not connected to current history"
+ job = jobs_by_id[ job_id ]
+ tool_inputs, associations = step_inputs( trans, job )
+ step = model.WorkflowStep()
+ step.type = 'tool'
+ step.tool_id = job.tool_id
+ step.tool_inputs = tool_inputs
+ # NOTE: We shouldn't need to do two passes here since only
+ # an earlier job can be used as an input to a later
+ # job.
+ for other_hid, input_name in associations:
+ if other_hid in hid_to_output_pair:
+ other_step, other_name = hid_to_output_pair[ other_hid ]
+ conn = model.WorkflowStepConnection()
+ conn.input_step = step
+ conn.input_name = input_name
+ # Should always be connected to an earlier step
+ conn.output_step = other_step
+ conn.output_name = other_name
+ steps.append( step )
+ steps_by_job_id[ job_id ] = step
+ # Store created dataset hids
+ for assoc in job.output_datasets:
+ hid_to_output_pair[ assoc.dataset.hid ] = ( step, assoc.name )
+ return steps
+
+
+class FakeJob( object ):
+ """
+ Fake job object for datasets that have no creating_job_associations,
+ they will be treated as "input" datasets.
+ """
+ def __init__( self, dataset ):
+ self.is_fake = True
+ self.id = "fake_%s" % dataset.id
+
+
+def summarize( trans, history=None ):
+ """ Return mapping of job description to datasets for active items in
+ supplied history - needed for building workflow from a history.
+
+ Formerly call get_job_dict in workflow web controller.
+ """
+ if not history:
+ history = trans.get_history()
+
+ # Get the jobs that created the datasets
+ warnings = set()
+ jobs = odict()
+ for dataset in history.active_datasets:
+ # FIXME: Create "Dataset.is_finished"
+ if dataset.state in ( 'new', 'running', 'queued' ):
+ warnings.add( WARNING_SOME_DATASETS_NOT_READY )
+ continue
+
+ #if this hda was copied from another, we need to find the job that created the origial hda
+ job_hda = dataset
+ while job_hda.copied_from_history_dataset_association:
+ job_hda = job_hda.copied_from_history_dataset_association
+
+ if not job_hda.creating_job_associations:
+ jobs[ FakeJob( dataset ) ] = [ ( None, dataset ) ]
+
+ for assoc in job_hda.creating_job_associations:
+ job = assoc.job
+ if job in jobs:
+ jobs[ job ].append( ( assoc.name, dataset ) )
+ else:
+ jobs[ job ] = [ ( assoc.name, dataset ) ]
+
+ return jobs, warnings
+
+
+def step_inputs( trans, job ):
+ tool = trans.app.toolbox.get_tool( job.tool_id )
+ param_values = job.get_param_values( trans.app, ignore_errors=True ) # If a tool was updated and e.g. had a text value changed to an integer, we don't want a traceback here
+ associations = __cleanup_param_values( tool.inputs, param_values )
+ tool_inputs = tool.params_to_strings( param_values, trans.app )
+ return tool_inputs, associations
+
+
+def __cleanup_param_values( inputs, values ):
+ """
+ Remove 'Data' values from `param_values`, along with metadata cruft,
+ but track the associations.
+ """
+ associations = []
+ # dbkey is pushed in by the framework
+ if 'dbkey' in values:
+ del values['dbkey']
+ root_values = values
+
+ # Recursively clean data inputs and dynamic selects
+ def cleanup( prefix, inputs, values ):
+ for key, input in inputs.items():
+ if isinstance( input, ( SelectToolParameter, DrillDownSelectToolParameter ) ):
+ if input.is_dynamic and not isinstance( values[key], UnvalidatedValue ):
+ values[key] = UnvalidatedValue( values[key] )
+ if isinstance( input, DataToolParameter ):
+ tmp = values[key]
+ values[key] = None
+ # HACK: Nested associations are not yet working, but we
+ # still need to clean them up so we can serialize
+ # if not( prefix ):
+ if tmp: # this is false for a non-set optional dataset
+ if not isinstance(tmp, list):
+ associations.append( ( tmp.hid, prefix + key ) )
+ else:
+ associations.extend( [ (t.hid, prefix + key) for t in tmp] )
+
+ # Cleanup the other deprecated crap associated with datasets
+ # as well. Worse, for nested datasets all the metadata is
+ # being pushed into the root. FIXME: MUST REMOVE SOON
+ key = prefix + key + "_"
+ for k in root_values.keys():
+ if k.startswith( key ):
+ del root_values[k]
+ elif isinstance( input, Repeat ):
+ group_values = values[key]
+ for i, rep_values in enumerate( group_values ):
+ rep_index = rep_values['__index__']
+ cleanup( "%s%s_%d|" % (prefix, key, rep_index ), input.inputs, group_values[i] )
+ elif isinstance( input, Conditional ):
+ group_values = values[input.name]
+ current_case = group_values['__current_case__']
+ cleanup( "%s%s|" % ( prefix, key ), input.cases[current_case].inputs, group_values )
+ cleanup( "", inputs, values )
+ return associations
+
+__all__ = [ summarize, extract_workflow ]
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/galaxy/workflow/steps.py
--- /dev/null
+++ b/lib/galaxy/workflow/steps.py
@@ -0,0 +1,63 @@
+""" This module contains utility methods for reasoning about and ordering
+workflow steps.
+"""
+import math
+from galaxy.util.topsort import (
+ CycleError,
+ topsort,
+ topsort_levels
+)
+
+
+def attach_ordered_steps( workflow, steps ):
+ """ Attempt to topologically order steps and attach to workflow. If this
+ fails - the workflow contains cycles so it mark it as such.
+ """
+ ordered_steps = order_workflow_steps( steps )
+ if ordered_steps:
+ workflow.has_cycles = False
+ for i, step in enumerate( ordered_steps ):
+ step.order_index = i
+ workflow.steps.append( step )
+ else:
+ workflow.has_cycles = True
+ workflow.steps = steps
+
+
+def order_workflow_steps( steps ):
+ """
+ Perform topological sort of the steps, return ordered or None
+ """
+ position_data_available = True
+ for step in steps:
+ if not step.position or not 'left' in step.position or not 'top' in step.position:
+ position_data_available = False
+ if position_data_available:
+ steps.sort(cmp=lambda s1, s2: cmp( math.sqrt(s1.position['left'] ** 2 + s1.position['top'] ** 2), math.sqrt(s2.position['left'] ** 2 + s2.position['top'] ** 2)))
+ try:
+ edges = edgelist_for_workflow_steps( steps )
+ node_order = topsort( edges )
+ return [ steps[i] for i in node_order ]
+ except CycleError:
+ return None
+
+
+def edgelist_for_workflow_steps( steps ):
+ """
+ Create a list of tuples representing edges between ``WorkflowSteps`` based
+ on associated ``WorkflowStepConnection``s
+ """
+ edges = []
+ steps_to_index = dict( ( step, i ) for i, step in enumerate( steps ) )
+ for step in steps:
+ edges.append( ( steps_to_index[step], steps_to_index[step] ) )
+ for conn in step.input_connections:
+ edges.append( ( steps_to_index[conn.output_step], steps_to_index[conn.input_step] ) )
+ return edges
+
+
+def order_workflow_steps_with_levels( steps ):
+ try:
+ return topsort_levels( edgelist_for_workflow_steps( steps ) )
+ except CycleError:
+ return None
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -37,15 +37,19 @@
Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined
for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy under three scenarios:
+ This method is called from Galaxy under four scenarios:
1. During the tool shed repository installation process via the tool shed's get_repository_information()
- method. In this case both the received repository and repository_metadata will be objects., but
+ method. In this case both the received repository and repository_metadata will be objects, but
tool_dependencies and repository_dependencies will be None.
- 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
+ 2. When getting updates for an install repository where the updates include newly defined repository
+ dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information()
+ method is the caller, and both the received repository and repository_metadata will be objects, but
+ tool_dependencies and repository_dependencies will be None.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies
and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes
definitions for them.
- 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
+ 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
available. In this case, this method is reached via the tool shed's get_updated_repository_information()
method, and both repository and repository_metadata will be objects but tool_dependencies and
repository_dependencies will be None.
@@ -184,16 +188,22 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ repository_id,
+ changeset_revision )
if not repository_metadata:
- # The received changeset_revision is no longer installable, so get the next changeset_revision in the repository's changelog.
- # This generally occurs only with repositories of type tool_dependency_definition.
- next_downloadable_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ # The received changeset_revision is no longer installable, so get the next changeset_revision
+ # in the repository's changelog. This generally occurs only with repositories of type
+ # tool_dependency_definition.
+ next_downloadable_changeset_revision = \
+ suc.get_next_downloadable_changeset_revision( repository,repo, changeset_revision )
if next_downloadable_changeset_revision:
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, next_downloadable_changeset_revision )
+ repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans, repository_id, next_downloadable_changeset_revision )
if repository_metadata:
- # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption is not valid we'll have to
- # enhance the callers to handle repository_metadata values of None in the returned repo_info_dict.
+ # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
+ # is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
+ # returned repo_info_dict.
metadata = repository_metadata.metadata
if 'tools' in metadata:
includes_tools = True
@@ -743,9 +753,13 @@
ordered_tool_panel_section_keys.append( tool_panel_section_key )
return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
-def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, missing_repository_dependencies,
- installed_tool_dependencies, missing_tool_dependencies ):
- """Return the populated containers for a repository being installed for the first time."""
+def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies,
+ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies,
+ updating=False ):
+ """
+ Return the populated containers for a repository being installed for the first time or for an installed repository
+ that is being updated and the updates include newly defined repository (and possibly tool) dependencies.
+ """
installed_tool_dependencies, missing_tool_dependencies = \
tool_dependency_util.populate_tool_dependencies_dicts( trans=trans,
tool_shed_url=tool_shed_url,
@@ -753,27 +767,32 @@
repository_installed_tool_dependencies=installed_tool_dependencies,
repository_missing_tool_dependencies=missing_tool_dependencies,
required_repo_info_dicts=None )
- # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
- containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans,
- repository=None,
- datatypes=None,
- invalid_tools=None,
- missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=installed_repository_dependencies,
- tool_dependencies=installed_tool_dependencies,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None,
- invalid_data_managers=None,
- data_managers_errors=None,
- new_install=True,
- reinstalling=False )
- # Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container.
- containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
- # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
- containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict )
+ # Most of the repository contents are set to None since we don't yet know what they are.
+ containers_dict = \
+ container_util.build_repository_containers_for_galaxy( trans=trans,
+ repository=None,
+ datatypes=None,
+ invalid_tools=None,
+ missing_repository_dependencies=missing_repository_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None,
+ data_managers_errors=None,
+ new_install=True,
+ reinstalling=False )
+ if not updating:
+ # If we installing a new repository and not updaing an installed repository, we can merge
+ # the missing_repository_dependencies container contents to the installed_repository_dependencies
+ # container. When updating an installed repository, merging will result in losing newly defined
+ # dependencies included in the updates.
+ containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+ # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
+ containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict )
return containers_dict
def pull_repository( repo, repository_clone_url, ctx_rev ):
@@ -875,3 +894,23 @@
repository.uninstalled = uninstalled
trans.install_model.context.add( repository )
trans.install_model.context.flush()
+
+def update_repository_record( trans, repository, updated_metadata_dict, updated_changeset_revision, updated_ctx_rev ):
+ """
+ Update a tool_shed_repository database record with new information retrieved from the
+ Tool Shed. This happens when updating an installed repository to a new changeset revision.
+ """
+ repository.metadata = updated_metadata_dict
+ # Update the repository.changeset_revision column in the database.
+ repository.changeset_revision = updated_changeset_revision
+ repository.ctx_rev = updated_ctx_rev
+ # Update the repository.tool_shed_status column in the database.
+ tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, repository )
+ if tool_shed_status_dict:
+ repository.tool_shed_status = tool_shed_status_dict
+ else:
+ repository.tool_shed_status = None
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
+ trans.install_model.context.refresh( repository )
+ return repository
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -359,13 +359,14 @@
can_install_tool_dependency = True
if can_install_tool_dependency:
package_install_version = package_elem.get( 'version', '1.0' )
- tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.install_model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.install_model.ToolDependency.installation_status.INSTALLING,
+ set_status=True )
# Get the information about the current platform in case the tool dependency definition includes tag sets
# for installing compiled binaries.
platform_info_dict = tool_dependency_util.get_platform_info_dict()
@@ -879,25 +880,28 @@
# Tool dependencies of type "set_environmnet" always have the version attribute set to None.
attr_tup = ( env_var_name, None, 'set_environment' )
if attr_tup in attr_tups_of_dependencies_for_install:
- install_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='set_environment',
- tool_dependency_name=env_var_name,
- tool_dependency_version=None )
+ install_dir = \
+ tool_dependency_util.get_tool_dependency_install_dir( app=app,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dependency_type='set_environment',
+ tool_dependency_name=env_var_name,
+ tool_dependency_version=None )
tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository )
env_var_dict = td_common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
- tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=env_var_name,
- version=None,
- type='set_environment',
- status=app.install_model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ status = app.install_model.ToolDependency.installation_status.INSTALLING
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=env_var_name,
+ version=None,
+ type='set_environment',
+ status=status,
+ set_status=True )
if env_var_version == '1.0':
# Create this tool dependency's env.sh file.
env_file_builder = fabric_util.EnvFileBuilder( install_dir )
@@ -906,28 +910,34 @@
error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
( str( tool_dependency.name ), str( return_code ) )
log.debug( error_message )
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.ERROR,
- error_message=error_message,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.ERROR
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=error_message,
+ remove_from_disk=False )
else:
if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR,
app.install_model.ToolDependency.installation_status.INSTALLED ]:
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.INSTALLED,
- error_message=None,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.INSTALLED
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=None,
+ remove_from_disk=False )
log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \
( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) )
else:
error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).'
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.ERROR,
- error_message=error_message,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.ERROR
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=error_message,
+ remove_from_disk=False )
return tool_dependency
def strip_path( fpath ):
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -70,7 +70,7 @@
if display_path is not None:
datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
-def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ):
+def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ):
"""
Return dictionaries containing the sets of installed and missing tool dependencies and repository
dependencies associated with the repository defined by the received repo_info_dict.
@@ -90,7 +90,7 @@
# Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.
# We don't add to installed_td and missing_td here because at this point they are empty.
installed_td, missing_td = \
- get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, tool_dependencies )
+ get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies )
# In cases where a repository dependency is required only for compiling a dependent repository's
# tool dependency, the value of repository_dependencies will be an empty dictionary here.
if repository_dependencies:
@@ -101,10 +101,11 @@
name,
repository_owner,
changeset_revision )
- if repository and repository.metadata:
+ if not updating and repository and repository.metadata:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
else:
- installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple )
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
all_repo_info_dict = get_required_repo_info_dicts( trans, tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
@@ -119,7 +120,8 @@
required_tool_dependencies = {}
for rid in required_repo_info_dicts:
for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
+ description, repository_clone_url, changeset_revision, ctx_rev, \
+ repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
if rid_tool_dependencies:
for td_key, td_dict in rid_tool_dependencies.items():
@@ -128,7 +130,9 @@
if required_tool_dependencies:
# Discover and categorize all tool dependencies defined for this repository's repository dependencies.
required_installed_td, required_missing_td = \
- get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, required_tool_dependencies )
+ get_installed_and_missing_tool_dependencies_for_repository( trans,
+ tool_shed_url,
+ required_tool_dependencies )
if required_installed_td:
if not includes_tool_dependencies:
includes_tool_dependencies = True
@@ -168,11 +172,12 @@
def get_installed_and_missing_repository_dependencies( trans, repository ):
"""
- Return the installed and missing repository dependencies for a tool shed repository that has a record in the Galaxy database, but
- may or may not be installed. In this case, the repository dependencies are associated with the repository in the database. Do not
- include a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since these
- special kinds of repository dependencies are really a dependency of the dependent repository's contained tool dependency, and only if
- that tool dependency requires compilation.
+ Return the installed and missing repository dependencies for a tool shed repository that has a record
+ in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are
+ associated with the repository in the database. Do not include a repository dependency if it is required
+ only to compile a tool dependency defined for the dependent repository since these special kinds of repository
+ dependencies are really a dependency of the dependent repository's contained tool dependency, and only
+ if that tool dependency requires compilation.
"""
missing_repository_dependencies = {}
installed_repository_dependencies = {}
@@ -228,7 +233,7 @@
missing_repository_dependencies[ 'description' ] = description
return installed_repository_dependencies, missing_repository_dependencies
-def get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple ):
+def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple ):
"""
Parse the received repository_dependencies dictionary that is associated with a repository being
installed into Galaxy for the first time and attempt to determine repository dependencies that are
@@ -264,13 +269,13 @@
tmp_repo_info_tuple )
if repository:
new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- repository.id,
- repository.status ]
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ repository.id,
+ repository.status ]
if repository.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
@@ -285,12 +290,12 @@
missing_rd_tups.append( new_rd_tup )
else:
new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- None,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ None,
'Never installed' ]
if not util.asbool( only_if_compiling_contained_td ):
# A repository dependency that is not installed will not be considered missing if it's value for
@@ -307,11 +312,17 @@
missing_repository_dependencies[ 'description' ] = description
return installed_repository_dependencies, missing_repository_dependencies
-def get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, tool_dependencies_dict ):
+def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies_dict ):
"""
Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
being installed into Galaxy.
"""
+ # FIXME: This implementation breaks when updates to a repository contain dependencies that result in
+ # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories
+ # define the same dependency internally (not using the complex repository dependency definition to a separate
+ # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency
+ # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries
+ # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled.
installed_tool_dependencies = {}
missing_tool_dependencies = {}
if tool_dependencies_dict:
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -821,8 +821,8 @@
tool_dependencies = metadata[ 'tool_dependencies' ]
if trans.webapp.name == 'tool_shed':
if 'orphan_tool_dependencies' in metadata:
- # The use of the orphan_tool_dependencies category in metadata has been deprecated, but we still need to check in case
- # the metadata is out of date.
+ # The use of the orphan_tool_dependencies category in metadata has been deprecated,
+ # but we still need to check in case the metadata is out of date.
orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
tool_dependencies.update( orphan_tool_dependencies )
# Tool dependencies can be categorized as orphans only if the repository contains tools.
diff -r e298d029472150df08bfae9b6ec311c78cb63dfd -r bc739e4b5bdf2a49a7c1a365b1e779df26d0f813 lib/tool_shed/util/encoding_util.py
--- a/lib/tool_shed/util/encoding_util.py
+++ b/lib/tool_shed/util/encoding_util.py
@@ -21,7 +21,6 @@
try:
values = json.loads( value )
except Exception, e:
- #log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
pass
if values is not None:
try:
@@ -34,7 +33,7 @@
return values
def tool_shed_encode( val ):
- if isinstance( val, dict ):
+ if isinstance( val, dict ) or isinstance( val, list ):
value = json.dumps( val )
else:
value = val
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8af539a3006b/
Changeset: 8af539a3006b
User: jmchilton
Date: 2014-03-26 22:28:31
Summary: Fix broken import with workflow PR.
Affected #: 1 file
diff -r 46d4a48fcb7b6dd736f081f5d8b650b0a33a296c -r 8af539a3006b24e027215af4f270b5db0d07c5e6 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -41,7 +41,7 @@
from galaxy.tools.parameters.basic import DataToolParameter
from galaxy.util.json import to_json_string
from galaxy.workflow.modules import ToolModule
-from galaxy.workflow.build_util import attach_ordered_steps
+from galaxy.workflow.steps import attach_ordered_steps
log = logging.getLogger( __name__ )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/90b4baa5c2b1/
Changeset: 90b4baa5c2b1
User: greg
Date: 2014-03-26 19:58:08
Summary: Handle combinations of newly defined tool dependencies and repository dependencies that are included in updates being pulled to tool shed repositories installed into Galaxy.
Affected #: 15 files
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -201,9 +201,13 @@
repository_id = kwd.get( 'id', None )
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
+ params = '?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( web.url_for( '/', qualified=True ),
+ str( repository.name ),
+ str( repository.owner ),
+ str( repository.changeset_revision ) )
url = suc.url_join( tool_shed_url,
- 'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( web.url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision ) )
+ 'repository/check_for_updates%s' % params )
return trans.response.send_redirect( url )
@web.expose
@@ -367,9 +371,10 @@
@web.require_admin
def get_tool_dependencies( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
"""
- Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for the received repository name,
- owner and changeset revision. The received repository_id is the encoded id of the installed tool shed repository in Galaxy. We need
- it so that we can derive the tool shed from which it was installed.
+ Send a request to the appropriate tool shed to retrieve the dictionary of tool dependencies defined for
+ the received repository name, owner and changeset revision. The received repository_id is the encoded id
+ of the installed tool shed repository in Galaxy. We need it so that we can derive the tool shed from which
+ it was installed.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -388,8 +393,8 @@
@web.require_admin
def get_updated_repository_information( self, trans, repository_id, repository_name, repository_owner, changeset_revision ):
"""
- Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall an updated revision of an
- uninstalled tool shed repository.
+ Send a request to the appropriate tool shed to retrieve the dictionary of information required to reinstall
+ an updated revision of an uninstalled tool shed repository.
"""
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -436,13 +441,16 @@
@web.expose
@web.require_admin
- def initiate_tool_dependency_installation( self, trans, tool_dependencies ):
+ def initiate_tool_dependency_installation( self, trans, tool_dependencies, **kwd ):
"""Install specified dependencies for repository tools."""
# Get the tool_shed_repository from one of the tool_dependencies.
- message = ''
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ err_msg = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME, tool_shed_repository.repo_path( trans.app ) )
+ tool_dependencies_config = suc.get_config_from_disk( suc.TOOL_DEPENDENCY_DEFINITION_FILENAME,
+ tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -452,13 +460,12 @@
if installed_tool_dependency.status == trans.app.install_model.ToolDependency.installation_status.ERROR:
text = util.unicodify( installed_tool_dependency.error_message )
if text is not None:
- message += ' %s' % text
+ err_msg += ' %s' % text
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
- if message:
+ if err_msg:
+ message += err_msg
status = 'error'
- else:
- message = "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
- status = 'done'
+ message += "Installed tool dependencies: %s" % ','.join( td.name for td in installed_tool_dependencies )
td_ids = [ trans.security.encode_id( td.id ) for td in tool_shed_repository.tool_dependencies ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_tool_dependencies',
@@ -506,9 +513,10 @@
status = 'error'
else:
# Install the latest downloadable revision of the repository.
+ params = '?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
+ ( name, owner, str( latest_downloadable_revision ), web.url_for( '/', qualified=True ) )
url = suc.url_join( tool_shed_url,
- 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
- ( name, owner, latest_downloadable_revision, web.url_for( '/', qualified=True ) ) )
+ 'repository/install_repositories_by_revision%s' % params )
return trans.response.send_redirect( url )
else:
message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id )
@@ -521,43 +529,74 @@
message=message,
status=status ) )
-
@web.expose
@web.require_admin
- def install_tool_dependencies( self, trans, **kwd ):
+ def install_tool_dependencies_with_update( self, trans, **kwd ):
+ """
+ Updating an installed tool shed repository where new tool dependencies but no new repository
+ dependencies are included in the updated revision.
+ """
+ updating_repository_id = kwd.get( 'updating_repository_id', None )
+ repository = suc.get_installed_tool_shed_repository( trans, updating_repository_id )
+ # All received dependencies need to be installed - confirmed by the caller.
+ encoded_tool_dependencies_dict = kwd.get( 'encoded_tool_dependencies_dict', None )
+ if encoded_tool_dependencies_dict is not None:
+ tool_dependencies_dict = encoding_util.tool_shed_decode( encoded_tool_dependencies_dict )
+ else:
+ tool_dependencies_dict = {}
+ encoded_relative_install_dir = kwd.get( 'encoded_relative_install_dir', None )
+ if encoded_relative_install_dir is not None:
+ relative_install_dir = encoding_util.tool_shed_decode( encoded_relative_install_dir )
+ else:
+ relative_install_dir = ''
+ updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+ updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+ encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- tool_dependency_ids = tool_dependency_util.get_tool_dependency_ids( as_string=False, **kwd )
- tool_dependencies = []
- for tool_dependency_id in tool_dependency_ids:
- tool_dependency = tool_dependency_util.get_tool_dependency( trans, tool_dependency_id )
- tool_dependencies.append( tool_dependency )
- if kwd.get( 'install_tool_dependencies_button', False ):
- # Filter tool dependencies to only those that are installed.
- tool_dependencies_for_installation = []
- for tool_dependency in tool_dependencies:
- if tool_dependency.status in [ trans.install_model.ToolDependency.installation_status.UNINSTALLED,
- trans.install_model.ToolDependency.installation_status.ERROR ]:
- tool_dependencies_for_installation.append( tool_dependency )
- if tool_dependencies_for_installation:
- # Redirect back to the ToolDependencyGrid before initiating installation.
- encoded_tool_dependency_for_installation_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies_for_installation ]
- new_kwd = dict( action='manage_tool_dependencies',
- operation='initiate_tool_dependency_installation',
- tool_dependency_ids=encoded_tool_dependency_for_installation_ids,
- message=message,
- status=status )
- return self.tool_dependency_grid( trans, **new_kwd )
+ install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
+ if 'install_tool_dependencies_with_update_button' in kwd:
+ # Now that the user has chosen whether to install tool dependencies or not, we can
+ # update the repository record with the changes in the updated revision.
+ if encoded_updated_metadata:
+ updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
else:
- message = 'All of the selected tool dependencies are already installed.'
- status = 'error'
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='manage_repository_tool_dependencies',
- tool_dependency_ids=tool_dependency_ids,
- status=status,
- message=message ) )
- return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies.mako',
- tool_dependencies=tool_dependencies,
+ updated_metadata = None
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=updated_metadata,
+ updated_changeset_revision=updating_to_changeset_revision,
+ updated_ctx_rev=updating_to_ctx_rev )
+ if install_tool_dependencies:
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
+ repository,
+ relative_install_dir,
+ set_status=False )
+ message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \
+ ( str( repository.name ), updating_to_changeset_revision )
+ self.initiate_tool_dependency_installation( trans, tool_dependencies, message=message, status=status )
+ # Handle tool dependencies check box.
+ if trans.app.config.tool_dependency_dir is None:
+ if includes_tool_dependencies:
+ message = "Tool dependencies defined in this repository can be automatically installed if you set "
+ message += "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+ message += "(universe_wsgi.ini) and restart your Galaxy server."
+ status = "warning"
+ install_tool_dependencies_check_box_checked = False
+ else:
+ install_tool_dependencies_check_box_checked = True
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+ checked=install_tool_dependencies_check_box_checked )
+ return trans.fill_template( '/admin/tool_shed_repository/install_tool_dependencies_with_update.mako',
+ repository=repository,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
+ encoded_relative_install_dir=encoded_relative_install_dir,
+ encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+ install_tool_dependencies_check_box=install_tool_dependencies_check_box,
+ tool_dependencies_dict=tool_dependencies_dict,
message=message,
status=status )
@@ -611,7 +650,8 @@
# TODO: I believe this block should be removed, but make sure..
repositories_for_uninstallation = []
for repository_id in tsridslist:
- repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( repository_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .get( trans.security.decode_id( repository_id ) )
if repository.status in [ trans.install_model.ToolShedRepository.installation_status.INSTALLED,
trans.install_model.ToolShedRepository.installation_status.ERROR ]:
repositories_for_uninstallation.append( repository )
@@ -632,28 +672,34 @@
filtered_repo_info_dicts = []
filtered_tool_panel_section_keys = []
repositories_for_installation = []
- # Some repositories may have repository dependencies that are required to be installed before the dependent repository, so we'll
- # order the list of tsr_ids to ensure all repositories install in the required order.
+ # Some repositories may have repository dependencies that are required to be installed before the
+ # dependent repository, so we'll order the list of tsr_ids to ensure all repositories install in the
+ # required order.
ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys = \
repository_util.order_components_for_installation( trans,
tsr_ids,
repo_info_dicts,
tool_panel_section_keys=tool_panel_section_keys )
for tsr_id in ordered_tsr_ids:
- repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ).get( trans.security.decode_id( tsr_id ) )
+ repository = trans.install_model.context.query( trans.install_model.ToolShedRepository ) \
+ .get( trans.security.decode_id( tsr_id ) )
if repository.status in [ trans.install_model.ToolShedRepository.installation_status.NEW,
trans.install_model.ToolShedRepository.installation_status.UNINSTALLED ]:
repositories_for_installation.append( repository )
- repo_info_dict, tool_panel_section_key = repository_util.get_repository_components_for_installation( tsr_id,
- ordered_tsr_ids,
- ordered_repo_info_dicts,
- ordered_tool_panel_section_keys )
+ repo_info_dict, tool_panel_section_key = \
+ repository_util.get_repository_components_for_installation( tsr_id,
+ ordered_tsr_ids,
+ ordered_repo_info_dicts,
+ ordered_tool_panel_section_keys )
filtered_repo_info_dicts.append( repo_info_dict )
filtered_tool_panel_section_keys.append( tool_panel_section_key )
if repositories_for_installation:
decoded_kwd[ 'repo_info_dicts' ] = filtered_repo_info_dicts
decoded_kwd[ 'tool_panel_section_keys' ] = filtered_tool_panel_section_keys
- self.install_tool_shed_repositories( trans, repositories_for_installation, reinstalling=reinstalling, **decoded_kwd )
+ self.install_tool_shed_repositories( trans,
+ repositories_for_installation,
+ reinstalling=reinstalling,
+ **decoded_kwd )
else:
kwd[ 'message' ] = 'All selected tool shed repositories are already installed.'
kwd[ 'status' ] = 'error'
@@ -772,7 +818,10 @@
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
- self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ self.initiate_tool_dependency_installation( trans,
+ tool_dependencies_for_installation,
+ message=message,
+ status=status )
else:
message = 'All selected tool dependencies are already installed.'
status = 'error'
@@ -780,14 +829,16 @@
message = 'Set the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file (universe_wsgi.ini) '
message += ' and restart your Galaxy server to install tool dependencies.'
status = 'error'
- installed_tool_dependencies_select_field = suc.build_tool_dependencies_select_field( trans,
- tool_shed_repository=tool_shed_repository,
- name='inst_td_ids',
- uninstalled_only=False )
- uninstalled_tool_dependencies_select_field = suc.build_tool_dependencies_select_field( trans,
- tool_shed_repository=tool_shed_repository,
- name='uninstalled_tool_dependency_ids',
- uninstalled_only=True )
+ installed_tool_dependencies_select_field = \
+ suc.build_tool_dependencies_select_field( trans,
+ tool_shed_repository=tool_shed_repository,
+ name='inst_td_ids',
+ uninstalled_only=False )
+ uninstalled_tool_dependencies_select_field = \
+ suc.build_tool_dependencies_select_field( trans,
+ tool_shed_repository=tool_shed_repository,
+ name='uninstalled_tool_dependency_ids',
+ uninstalled_only=True )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository_tool_dependencies.mako',
repository=tool_shed_repository,
installed_tool_dependencies_select_field=installed_tool_dependencies_select_field,
@@ -836,7 +887,10 @@
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
tool_dependencies_for_installation.append( tool_dependency )
if tool_dependencies_for_installation:
- self.initiate_tool_dependency_installation( trans, tool_dependencies_for_installation )
+ self.initiate_tool_dependency_installation( trans,
+ tool_dependencies_for_installation,
+ message=message,
+ status=status )
else:
kwd[ 'message' ] = 'All selected tool dependencies are already installed.'
kwd[ 'status' ] = 'error'
@@ -876,18 +930,21 @@
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
if not suc.have_shed_tool_conf_for_install( trans ):
- message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a '
- message += '<b><toolbox></b> tag that includes a <b>tool_path</b> attribute value which is a directory relative to the Galaxy installation '
- message += 'directory in order to automatically install tools from a Galaxy tool shed (e.g., the file name <b>shed_tool_conf.xml</b> whose '
- message += '<b><toolbox></b> tag is <b><toolbox tool_path="../shed_tools"></b>).<p/>See the '
- message += '<a href="http://wiki.g2.bx.psu.edu/InstallingRepositoriesToGalaxy" target="_blank">Installation of Galaxy tool shed repository tools '
- message += 'into a local Galaxy instance</a> section of the Galaxy tool shed wiki for all of the details.'
+ message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one '
+ message += 'shed tool configuration file name with a <b><toolbox></b> tag that includes a <b>tool_path</b> '
+ message += 'attribute value which is a directory relative to the Galaxy installation directory in order '
+ message += 'to automatically install tools from a Galaxy Tool Shed (e.g., the file name <b>shed_tool_conf.xml</b> '
+ message += 'whose <b><toolbox></b> tag is <b><toolbox tool_path="../shed_tools"></b>).<p/>See the '
+ message += '<a href="http://wiki.g2.bx.psu.edu/InstallingRepositoriesToGalaxy" target="_blank">Installation '
+ message += 'of Galaxy Tool Shed repository tools into a local Galaxy instance</a> section of the Galaxy Tool '
+ message += 'Shed wiki for all of the details.'
return trans.show_error_message( message )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
shed_tool_conf = kwd.get( 'shed_tool_conf', None )
tool_shed_url = kwd.get( 'tool_shed_url', None )
- # Handle repository dependencies, which do not include those that are required only for compiling a dependent repository's tool dependencies.
+ # Handle repository dependencies, which do not include those that are required only for compiling a dependent
+ # repository's tool dependencies.
has_repository_dependencies = util.string_as_bool( kwd.get( 'has_repository_dependencies', False ) )
install_repository_dependencies = kwd.get( 'install_repository_dependencies', '' )
# Every repository will be installed into the same tool panel section or all will be installed outside of any sections.
@@ -900,13 +957,36 @@
includes_tools_for_display_in_tool_panel = util.string_as_bool( kwd.get( 'includes_tools_for_display_in_tool_panel', False ) )
includes_tool_dependencies = util.string_as_bool( kwd.get( 'includes_tool_dependencies', False ) )
install_tool_dependencies = kwd.get( 'install_tool_dependencies', '' )
+ # In addition to installing new repositories, this method is called when updating an installed repository
+ # to a new changeset_revision where the update includes newly defined repository dependencies.
+ updating = util.asbool( kwd.get( 'updating', False ) )
+ updating_repository_id = kwd.get( 'updating_repository_id', None )
+ updating_to_changeset_revision = kwd.get( 'updating_to_changeset_revision', None )
+ updating_to_ctx_rev = kwd.get( 'updating_to_ctx_rev', None )
+ encoded_updated_metadata = kwd.get( 'encoded_updated_metadata', None )
encoded_repo_info_dicts = kwd.get( 'encoded_repo_info_dicts', '' )
if encoded_repo_info_dicts:
encoded_repo_info_dicts = encoded_repo_info_dicts.split( encoding_util.encoding_sep )
if not encoded_repo_info_dicts:
- # The request originated in the tool shed via a tool search.
+ # The request originated in the tool shed via a tool search or from this controller's
+ # update_to_changeset_revision() method.
repository_ids = kwd.get( 'repository_ids', None )
- changeset_revisions = kwd.get( 'changeset_revisions', None )
+ if updating:
+ # We have updated an installed repository where the updates included newly defined repository
+ # and possibly tool dependencies. We will have arrived here only if the updates include newly
+ # defined repository dependencies. We're preparing to allow the user to elect to install these
+ # dependencies. At this point, the repository has been updated to the latest changeset revision,
+ # but the received repository id is from the Galaxy side (the caller is this controller's
+ # update_to_changeset_revision() method. We need to get the id of the same repository from the
+ # Tool Shed side.
+ repository = suc.get_tool_shed_repository_by_id( trans, updating_repository_id )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_repository_id?name=%s&owner=%s' % \
+ ( str( repository.name ), str( repository.owner ) ) )
+ repository_ids = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ changeset_revisions = updating_to_changeset_revision
+ else:
+ changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
url = suc.url_join( tool_shed_url,
'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
@@ -918,7 +998,8 @@
if not includes_tools:
includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
+ includes_tools_for_display_in_tool_panel = \
+ util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
if not has_repository_dependencies:
has_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'has_repository_dependencies', False ) )
if not includes_tool_dependencies:
@@ -927,6 +1008,19 @@
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
if ( not includes_tools_for_display_in_tool_panel and kwd.get( 'select_shed_tool_panel_config_button', False ) ) or \
( includes_tools_for_display_in_tool_panel and kwd.get( 'select_tool_panel_section_button', False ) ):
+ if updating:
+ encoded_updated_metadata_dict = kwd.get( 'encoded_updated_metadata_dict', None )
+ updated_changeset_revision = kwd.get( 'updated_changeset_revision', None )
+ updated_ctx_rev = kwd.get( 'updated_ctx_rev', None )
+ repository = suc.get_tool_shed_repository_by_id( trans, updating_repository_id )
+ decoded_updated_metadata = encoding_util.tool_shed_decode( encoded_updated_metadata )
+ # Now that the user has decided whether they will handle dependencies, we can update
+ # the repository to the latest revision.
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=decoded_updated_metadata,
+ updated_changeset_revision=updating_to_changeset_revision,
+ updated_ctx_rev=updating_to_ctx_rev )
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
if includes_tool_dependencies:
install_tool_dependencies = CheckboxField.is_checked( install_tool_dependencies )
@@ -959,7 +1053,8 @@
tool_panel_section_keys=tool_panel_section_keys,
tool_path=tool_path,
tool_shed_url=tool_shed_url )
- encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = repository_util.initiate_repository_installation( trans, installation_dict )
+ encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
+ repository_util.initiate_repository_installation( trans, installation_dict )
return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
encoded_kwd=encoded_kwd,
query=query,
@@ -976,12 +1071,14 @@
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
if len( repo_info_dicts ) == 1:
- # If we're installing a single repository, see if it contains a readme or dependencies that we can display.
+ # If we're installing or updating a single repository, see if it contains a readme or
+ # dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -990,7 +1087,8 @@
if not includes_tools:
includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tools_for_display_in_tool_panel = \
+ dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
@@ -998,19 +1096,25 @@
name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
readme_files_dict = readme_util.get_readme_files_dict_for_display( trans, tool_shed_url, repo_info_dict )
- # We're handling 1 of 2 scenarios here: (1) we're installing a tool shed repository for the first time, so we've retrieved the list of installed
- # and missing repository dependencies from the database (2) we're handling the scenario where an error occurred during the installation process,
- # so we have a tool_shed_repository record in the database with associated repository dependency records. Since we have the repository
- # dependencies in either case, we'll merge the list of missing repository dependencies into the list of installed repository dependencies since
- # each displayed repository dependency will display a status, whether installed or missing.
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ # We're handling 1 of 3 scenarios here: (1) we're installing a tool shed repository for the first time, so we've
+ # retrieved the list of installed and missing repository dependencies from the database (2) we're handling the
+ # scenario where an error occurred during the installation process, so we have a tool_shed_repository record in
+ # the database with associated repository dependency records. Since we have the repository dependencies in both
+ # of the above 2 cases, we'll merge the list of missing repository dependencies into the list of installed
+ # repository dependencies since each displayed repository dependency will display a status, whether installed or
+ # missing. The 3rd scenario is where we're updating an installed repository and the updates include newly
+ # defined repository (and possibly tool) dependencies. In this case, merging will result in newly defined
+ # dependencies to be lost. We pass the updating parameter to make sure merging occurs only when appropriate.
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
else:
# We're installing a list of repositories, each of which may have tool dependencies or repository dependencies.
containers_dicts = []
@@ -1018,7 +1122,8 @@
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=updating )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
if not has_repository_dependencies:
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
@@ -1027,34 +1132,39 @@
if not includes_tools:
includes_tools = dependencies_for_repository_dict.get( 'includes_tools', False )
if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tools_for_display_in_tool_panel = \
+ dependencies_for_repository_dict.get( 'includes_tools_for_display_in_tool_panel', False )
installed_repository_dependencies = dependencies_for_repository_dict.get( 'installed_repository_dependencies', None )
installed_tool_dependencies = dependencies_for_repository_dict.get( 'installed_tool_dependencies', None )
missing_repository_dependencies = dependencies_for_repository_dict.get( 'missing_repository_dependencies', None )
missing_tool_dependencies = dependencies_for_repository_dict.get( 'missing_tool_dependencies', None )
name = dependencies_for_repository_dict.get( 'name', None )
repository_owner = dependencies_for_repository_dict.get( 'repository_owner', None )
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=None,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=None,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=updating )
containers_dicts.append( containers_dict )
# Merge all containers into a single container.
containers_dict = repository_util.merge_containers_dicts_for_new_install( containers_dicts )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
if includes_tool_dependencies:
- message = "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
- message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository."
+ message = "Tool dependencies defined in this repository can be automatically installed if you set "
+ message += "the value of your <b>tool_dependency_dir</b> setting in your Galaxy config file "
+ message += "(universe_wsgi.ini) and restart your Galaxy server before installing the repository."
status = "warning"
install_tool_dependencies_check_box_checked = False
else:
install_tool_dependencies_check_box_checked = True
- install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies', checked=install_tool_dependencies_check_box_checked )
+ install_tool_dependencies_check_box = CheckboxField( 'install_tool_dependencies',
+ checked=install_tool_dependencies_check_box_checked )
# Handle repository dependencies check box.
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
encoded_repo_info_dicts = encoding_util.encoding_sep.join( encoded_repo_info_dicts )
@@ -1062,6 +1172,11 @@
if includes_tools_for_display_in_tool_panel:
return trans.fill_template( '/admin/tool_shed_repository/select_tool_panel_section.mako',
encoded_repo_info_dicts=encoded_repo_info_dicts,
+ updating=updating,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
includes_tool_dependencies=includes_tool_dependencies,
@@ -1077,10 +1192,16 @@
message=message,
status=status )
else:
- # If installing repositories that includes no tools and has no repository dependencies, display a page allowing the Galaxy administrator to
- # select a shed-related tool panel configuration file whose tool_path setting will be the location the repositories will be installed.
+ # If installing repositories that includes no tools and has no repository dependencies, display a page
+ # allowing the Galaxy administrator to select a shed-related tool panel configuration file whose tool_path
+ # setting will be the location the repositories will be installed.
return trans.fill_template( '/admin/tool_shed_repository/select_shed_tool_panel_config.mako',
encoded_repo_info_dicts=encoded_repo_info_dicts,
+ updating=updating,
+ updating_repository_id=updating_repository_id,
+ updating_to_ctx_rev=updating_to_ctx_rev,
+ updating_to_changeset_revision=updating_to_changeset_revision,
+ encoded_updated_metadata=encoded_updated_metadata,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
includes_tool_dependencies=includes_tool_dependencies,
@@ -1400,7 +1521,8 @@
dependencies_for_repository_dict = common_install_util.get_dependencies_for_repository( trans,
tool_shed_url,
repo_info_dict,
- includes_tool_dependencies )
+ includes_tool_dependencies,
+ updating=False )
changeset_revision = dependencies_for_repository_dict.get( 'changeset_revision', None )
has_repository_dependencies = dependencies_for_repository_dict.get( 'has_repository_dependencies', False )
includes_tool_dependencies = dependencies_for_repository_dict.get( 'includes_tool_dependencies', False )
@@ -1449,14 +1571,16 @@
original_section_name = ''
tool_panel_section_select_field = None
shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
- containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- readme_files_dict=readme_files_dict,
- installed_repository_dependencies=installed_repository_dependencies,
- missing_repository_dependencies=missing_repository_dependencies,
- installed_tool_dependencies=installed_tool_dependencies,
- missing_tool_dependencies=missing_tool_dependencies )
+ containers_dict = \
+ repository_util.populate_containers_dict_for_new_install( trans=trans,
+ tool_shed_url=tool_shed_url,
+ tool_path=tool_path,
+ readme_files_dict=readme_files_dict,
+ installed_repository_dependencies=installed_repository_dependencies,
+ missing_repository_dependencies=missing_repository_dependencies,
+ installed_tool_dependencies=installed_tool_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ updating=False )
# Since we're reinstalling we'll merge the list of missing repository dependencies into the list of installed repository dependencies since each displayed
# repository dependency will display a status, whether installed or missing.
containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
@@ -1574,8 +1698,8 @@
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
"""
- Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed repository and update the
- metadata for the repository's revision in the Galaxy database.
+ Get the tool_versions from the tool shed for each tool in the installed revision of a selected tool shed
+ repository and update the metadata for the repository's revision in the Galaxy database.
"""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
@@ -1720,29 +1844,17 @@
resetting_all_metadata_on_repository=False,
updating_installed_repository=True,
persist=True )
- repository.metadata = metadata_dict
- # Update the repository.changeset_revision column in the database.
- repository.changeset_revision = latest_changeset_revision
- repository.ctx_rev = latest_ctx_rev
- # Update the repository.tool_shed_status column in the database.
- tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, repository )
- if tool_shed_status_dict:
- repository.tool_shed_status = tool_shed_status_dict
- else:
- repository.tool_shed_status = None
- trans.install_model.context.add( repository )
- trans.install_model.context.flush()
if 'tools' in metadata_dict:
tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
if tool_panel_dict is None:
tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
tool_util.add_to_tool_panel( app=trans.app,
- repository_name=repository.name,
+ repository_name=str( repository.name ),
repository_clone_url=repository_clone_url,
- changeset_revision=repository.installed_changeset_revision,
+ changeset_revision=str( repository.installed_changeset_revision ),
repository_tools_tups=repository_tools_tups,
- owner=repository.owner,
+ owner=str( repository.owner ),
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=False )
@@ -1755,19 +1867,40 @@
os.path.join( relative_install_dir, name ),
repository,
repository_tools_tups )
- # Create tool_dependency records if necessary.
- if 'tool_dependencies' in metadata_dict:
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app,
- repository,
- relative_install_dir,
- set_status=False )
+ if 'repository_dependencies' in metadata_dict or 'tool_dependencies' in metadata_dict:
+ if 'repository_dependencies' in metadata_dict:
+ # Updates received include newly defined repository dependencies, so allow the user
+ # the option of installting them. We cannot update the repository with the changes
+ # until that happens, so we have to send them along.
+ new_kwd = dict( tool_shed_url=tool_shed_url,
+ updating_repository_id=trans.security.encode_id( repository.id ),
+ updating_to_ctx_rev=latest_ctx_rev,
+ updating_to_changeset_revision=latest_changeset_revision,
+ encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ),
+ updating=True )
+ return self.prepare_for_install( trans, **new_kwd )
+ # Updates received did not include any newly defined repository dependencies but did include
+ # newly defined tool dependencies.
+ encoded_tool_dependencies_dict = encoding_util.tool_shed_encode( metadata_dict.get( 'tool_dependencies', {} ) )
+ encoded_relative_install_dir = encoding_util.tool_shed_encode( relative_install_dir )
+ new_kwd = dict( updating_repository_id=trans.security.encode_id( repository.id ),
+ updating_to_ctx_rev=latest_ctx_rev,
+ updating_to_changeset_revision=latest_changeset_revision,
+ encoded_updated_metadata=encoding_util.tool_shed_encode( metadata_dict ),
+ encoded_relative_install_dir=encoded_relative_install_dir,
+ encoded_tool_dependencies_dict=encoded_tool_dependencies_dict,
+ message=message,
+ status = status )
+ return self.install_tool_dependencies_with_update( trans, **new_kwd )
+ # Updates received did not include any newly defined repository dependencies or newly defined
+ # tool dependencies.
+ repository = repository_util.update_repository_record( trans,
+ repository=repository,
+ updated_metadata_dict=metadata_dict,
+ updated_changeset_revision=latest_changeset_revision,
+ updated_ctx_rev=latest_ctx_rev )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % \
( name, latest_changeset_revision )
- # See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = \
- suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- if repository.missing_tool_dependencies:
- message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
message = "The directory containing the installed repository named '%s' cannot be found. " % name
status = 'error'
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -866,11 +866,13 @@
no_update = 'false'
elif galaxy_url:
# Start building up the url to redirect back to the calling Galaxy instance.
+ params = '?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
+ ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision )
url = suc.url_join( galaxy_url,
- 'admin_toolshed/update_to_changeset_revision?tool_shed_url=%s&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( web.url_for( '/', qualified=True ), repository.name, repository.user.username, changeset_revision ) )
+ 'admin_toolshed/update_to_changeset_revision%s' % params )
else:
- message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>. You may need to enable third-party cookies in your browser. ' % galaxy_url
+ message = 'Unable to check for updates due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url
+ message += 'You may need to enable third-party cookies in your browser. '
return trans.show_error_message( message )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
@@ -883,15 +885,17 @@
trans.security.encode_id( repository.id ),
changeset_revision )
if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
+ # If changeset_revision is in the repository_metadata table for this repository, there are no
+ # additional updates.
if from_update_manager:
return no_update
else:
# Return the same value for changeset_revision and latest_changeset_revision.
url += latest_changeset_revision
else:
- # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was installed. We need to find the changeset_revision to which we need to update.
+ # The changeset_revision column in the repository_metadata table has been updated with a new
+ # changeset_revision value since the repository was installed. We need to find the changeset_revision
+ # to which we need to update.
update_to_changeset_hash = None
for changeset in repo.changelog:
changeset_hash = str( repo.changectx( changeset ) )
@@ -1680,6 +1684,16 @@
return encoding_util.tool_shed_encode( repository_dependencies )
return ''
+ @web.expose
+ def get_repository_id( self, trans, **kwd ):
+ """Given a repository name and owner, return the encoded repository id."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+ if repository:
+ return trans.security.encode_id( repository.id )
+ return ''
+
@web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
"""
@@ -1824,7 +1838,9 @@
if not repository_metadata:
# The received changeset_revision is no longer associated with metadata, so get the next changeset_revision in the repository
# changelog that is associated with metadata.
- changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, after_changeset_revision=changeset_revision )
+ changeset_revision = suc.get_next_downloadable_changeset_revision( repository,
+ repo,
+ after_changeset_revision=changeset_revision )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
@@ -2039,11 +2055,12 @@
galaxy_url = suc.handle_galaxy_url( trans, **kwd )
if galaxy_url:
# Redirect back to local Galaxy to perform install.
+ params = '?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
+ ( web.url_for( '/', qualified=True ),
+ ','.join( util.listify( repository_ids ) ),
+ ','.join( util.listify( changeset_revisions ) ) )
url = suc.url_join( galaxy_url,
- 'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( web.url_for( '/', qualified=True ),
- ','.join( util.listify( repository_ids ) ),
- ','.join( util.listify( changeset_revisions ) ) ) )
+ 'admin_toolshed/prepare_for_install%s' % params )
return trans.response.send_redirect( url )
else:
message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url
@@ -2477,9 +2494,10 @@
@web.expose
def next_installable_changeset_revision( self, trans, **kwd ):
"""
- Handle a request from a Galaxy instance where the changeset_revision defined for a repository in a dependency definition file is older
- than the changeset_revision associated with the installed repository. This will occur with repository's of type tool_dependency_definition,
- and this scenario will occur while repository dependency hierarchies are bing installed.
+ Handle a request from a Galaxy instance where the changeset_revision defined for a repository
+ in a dependency definition file is older than the changeset_revision associated with the installed
+ repository. This will occur with repository's of type tool_dependency_definition, and this scenario
+ will occur while repository dependency hierarchies are bing installed.
"""
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
@@ -2981,12 +2999,13 @@
@web.expose
def updated_changeset_revisions( self, trans, **kwd ):
"""
- Handle a request from a local Galaxy instance to retrieve the list of changeset revisions to which an installed repository can be updated. This
- method will return a string of comma-separated changeset revision hashes for all available updates to the received changeset revision. Among
- other things , this method handles the scenario where an installed tool shed repository's tool_dependency definition file defines a changeset
- revision for a complex repository dependency that is outdated. In other words, a defined changeset revision is older than the current changeset
- revision for the required repository, making it impossible to discover the repository without knowledge of revisions to which it could have been
- updated.
+ Handle a request from a local Galaxy instance to retrieve the list of changeset revisions to which an
+ installed repository can be updated. This method will return a string of comma-separated changeset revision
+ hashes for all available updates to the received changeset revision. Among other things , this method
+ handles the scenario where an installed tool shed repository's tool_dependency definition file defines a
+ changeset revision for a complex repository dependency that is outdated. In other words, a defined changeset
+ revision is older than the current changeset revision for the required repository, making it impossible to
+ discover the repository without knowledge of revisions to which it could have been updated.
"""
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -37,15 +37,19 @@
Galaxy instance. The dictionary will also contain the recursive list of repository dependencies defined
for the repository, as well as the defined tool dependencies.
- This method is called from Galaxy under three scenarios:
+ This method is called from Galaxy under four scenarios:
1. During the tool shed repository installation process via the tool shed's get_repository_information()
- method. In this case both the received repository and repository_metadata will be objects., but
+ method. In this case both the received repository and repository_metadata will be objects, but
tool_dependencies and repository_dependencies will be None.
- 2. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
+ 2. When getting updates for an install repository where the updates include newly defined repository
+ dependency definitions. This scenario is similar to 1. above. The tool shed's get_repository_information()
+ method is the caller, and both the received repository and repository_metadata will be objects, but
+ tool_dependencies and repository_dependencies will be None.
+ 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with no
updates available. In this case, both repository and repository_metadata will be None, but tool_dependencies
and repository_dependencies will be objects previously retrieved from the tool shed if the repository includes
definitions for them.
- 3. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
+ 4. When a tool shed repository that was uninstalled from a Galaxy instance is being reinstalled with updates
available. In this case, this method is reached via the tool shed's get_updated_repository_information()
method, and both repository and repository_metadata will be objects but tool_dependencies and
repository_dependencies will be None.
@@ -184,16 +188,22 @@
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
+ repository_id,
+ changeset_revision )
if not repository_metadata:
- # The received changeset_revision is no longer installable, so get the next changeset_revision in the repository's changelog.
- # This generally occurs only with repositories of type tool_dependency_definition.
- next_downloadable_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ # The received changeset_revision is no longer installable, so get the next changeset_revision
+ # in the repository's changelog. This generally occurs only with repositories of type
+ # tool_dependency_definition.
+ next_downloadable_changeset_revision = \
+ suc.get_next_downloadable_changeset_revision( repository,repo, changeset_revision )
if next_downloadable_changeset_revision:
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, next_downloadable_changeset_revision )
+ repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans, repository_id, next_downloadable_changeset_revision )
if repository_metadata:
- # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption is not valid we'll have to
- # enhance the callers to handle repository_metadata values of None in the returned repo_info_dict.
+ # For now, we'll always assume that we'll get repository_metadata, but if we discover our assumption
+ # is not valid we'll have to enhance the callers to handle repository_metadata values of None in the
+ # returned repo_info_dict.
metadata = repository_metadata.metadata
if 'tools' in metadata:
includes_tools = True
@@ -743,9 +753,13 @@
ordered_tool_panel_section_keys.append( tool_panel_section_key )
return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
-def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies, missing_repository_dependencies,
- installed_tool_dependencies, missing_tool_dependencies ):
- """Return the populated containers for a repository being installed for the first time."""
+def populate_containers_dict_for_new_install( trans, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies,
+ missing_repository_dependencies, installed_tool_dependencies, missing_tool_dependencies,
+ updating=False ):
+ """
+ Return the populated containers for a repository being installed for the first time or for an installed repository
+ that is being updated and the updates include newly defined repository (and possibly tool) dependencies.
+ """
installed_tool_dependencies, missing_tool_dependencies = \
tool_dependency_util.populate_tool_dependencies_dicts( trans=trans,
tool_shed_url=tool_shed_url,
@@ -753,27 +767,32 @@
repository_installed_tool_dependencies=installed_tool_dependencies,
repository_missing_tool_dependencies=missing_tool_dependencies,
required_repo_info_dicts=None )
- # Since we are installing a new repository, most of the repository contents are set to None since we don't yet know what they are.
- containers_dict = container_util.build_repository_containers_for_galaxy( trans=trans,
- repository=None,
- datatypes=None,
- invalid_tools=None,
- missing_repository_dependencies=missing_repository_dependencies,
- missing_tool_dependencies=missing_tool_dependencies,
- readme_files_dict=readme_files_dict,
- repository_dependencies=installed_repository_dependencies,
- tool_dependencies=installed_tool_dependencies,
- valid_tools=None,
- workflows=None,
- valid_data_managers=None,
- invalid_data_managers=None,
- data_managers_errors=None,
- new_install=True,
- reinstalling=False )
- # Merge the missing_repository_dependencies container contents to the installed_repository_dependencies container.
- containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
- # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
- containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict )
+ # Most of the repository contents are set to None since we don't yet know what they are.
+ containers_dict = \
+ container_util.build_repository_containers_for_galaxy( trans=trans,
+ repository=None,
+ datatypes=None,
+ invalid_tools=None,
+ missing_repository_dependencies=missing_repository_dependencies,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=installed_repository_dependencies,
+ tool_dependencies=installed_tool_dependencies,
+ valid_tools=None,
+ workflows=None,
+ valid_data_managers=None,
+ invalid_data_managers=None,
+ data_managers_errors=None,
+ new_install=True,
+ reinstalling=False )
+ if not updating:
+ # If we installing a new repository and not updaing an installed repository, we can merge
+ # the missing_repository_dependencies container contents to the installed_repository_dependencies
+ # container. When updating an installed repository, merging will result in losing newly defined
+ # dependencies included in the updates.
+ containers_dict = repository_dependency_util.merge_missing_repository_dependencies_to_installed_container( containers_dict )
+ # Merge the missing_tool_dependencies container contents to the installed_tool_dependencies container.
+ containers_dict = tool_dependency_util.merge_missing_tool_dependencies_to_installed_container( containers_dict )
return containers_dict
def pull_repository( repo, repository_clone_url, ctx_rev ):
@@ -875,3 +894,23 @@
repository.uninstalled = uninstalled
trans.install_model.context.add( repository )
trans.install_model.context.flush()
+
+def update_repository_record( trans, repository, updated_metadata_dict, updated_changeset_revision, updated_ctx_rev ):
+ """
+ Update a tool_shed_repository database record with new information retrieved from the
+ Tool Shed. This happens when updating an installed repository to a new changeset revision.
+ """
+ repository.metadata = updated_metadata_dict
+ # Update the repository.changeset_revision column in the database.
+ repository.changeset_revision = updated_changeset_revision
+ repository.ctx_rev = updated_ctx_rev
+ # Update the repository.tool_shed_status column in the database.
+ tool_shed_status_dict = suc.get_tool_shed_status_for_installed_repository( trans.app, repository )
+ if tool_shed_status_dict:
+ repository.tool_shed_status = tool_shed_status_dict
+ else:
+ repository.tool_shed_status = None
+ trans.install_model.context.add( repository )
+ trans.install_model.context.flush()
+ trans.install_model.context.refresh( repository )
+ return repository
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -359,13 +359,14 @@
can_install_tool_dependency = True
if can_install_tool_dependency:
package_install_version = package_elem.get( 'version', '1.0' )
- tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=package_name,
- version=package_version,
- type='package',
- status=app.install_model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=package_name,
+ version=package_version,
+ type='package',
+ status=app.install_model.ToolDependency.installation_status.INSTALLING,
+ set_status=True )
# Get the information about the current platform in case the tool dependency definition includes tag sets
# for installing compiled binaries.
platform_info_dict = tool_dependency_util.get_platform_info_dict()
@@ -879,25 +880,28 @@
# Tool dependencies of type "set_environmnet" always have the version attribute set to None.
attr_tup = ( env_var_name, None, 'set_environment' )
if attr_tup in attr_tups_of_dependencies_for_install:
- install_dir = tool_dependency_util.get_tool_dependency_install_dir( app=app,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dependency_type='set_environment',
- tool_dependency_name=env_var_name,
- tool_dependency_version=None )
+ install_dir = \
+ tool_dependency_util.get_tool_dependency_install_dir( app=app,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dependency_type='set_environment',
+ tool_dependency_name=env_var_name,
+ tool_dependency_version=None )
tool_shed_repository_install_dir = get_tool_shed_repository_install_dir( app, tool_shed_repository )
env_var_dict = td_common_util.create_env_var_dict( env_var_elem, tool_shed_repository_install_dir=tool_shed_repository_install_dir )
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
- tool_dependency = tool_dependency_util.create_or_update_tool_dependency( app=app,
- tool_shed_repository=tool_shed_repository,
- name=env_var_name,
- version=None,
- type='set_environment',
- status=app.install_model.ToolDependency.installation_status.INSTALLING,
- set_status=True )
+ status = app.install_model.ToolDependency.installation_status.INSTALLING
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=env_var_name,
+ version=None,
+ type='set_environment',
+ status=status,
+ set_status=True )
if env_var_version == '1.0':
# Create this tool dependency's env.sh file.
env_file_builder = fabric_util.EnvFileBuilder( install_dir )
@@ -906,28 +910,34 @@
error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
( str( tool_dependency.name ), str( return_code ) )
log.debug( error_message )
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.ERROR,
- error_message=error_message,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.ERROR
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=error_message,
+ remove_from_disk=False )
else:
if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR,
app.install_model.ToolDependency.installation_status.INSTALLED ]:
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.INSTALLED,
- error_message=None,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.INSTALLED
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=None,
+ remove_from_disk=False )
log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \
( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) )
else:
error_message = 'Only set_environment version 1.0 is currently supported (i.e., change your tag to be <set_environment version="1.0">).'
- tool_dependency = tool_dependency_util.set_tool_dependency_attributes( app,
- tool_dependency=tool_dependency,
- status=app.install_model.ToolDependency.installation_status.ERROR,
- error_message=error_message,
- remove_from_disk=False )
+ status = app.install_model.ToolDependency.installation_status.ERROR
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=error_message,
+ remove_from_disk=False )
return tool_dependency
def strip_path( fpath ):
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -70,7 +70,7 @@
if display_path is not None:
datatype_util.load_installed_display_applications( trans.app, installed_repository_dict, deactivate=False )
-def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies ):
+def get_dependencies_for_repository( trans, tool_shed_url, repo_info_dict, includes_tool_dependencies, updating=False ):
"""
Return dictionaries containing the sets of installed and missing tool dependencies and repository
dependencies associated with the repository defined by the received repo_info_dict.
@@ -90,7 +90,7 @@
# Inspect the tool_dependencies dictionary to separate the installed and missing tool dependencies.
# We don't add to installed_td and missing_td here because at this point they are empty.
installed_td, missing_td = \
- get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, tool_dependencies )
+ get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies )
# In cases where a repository dependency is required only for compiling a dependent repository's
# tool dependency, the value of repository_dependencies will be an empty dictionary here.
if repository_dependencies:
@@ -101,10 +101,11 @@
name,
repository_owner,
changeset_revision )
- if repository and repository.metadata:
+ if not updating and repository and repository.metadata:
installed_rd, missing_rd = get_installed_and_missing_repository_dependencies( trans, repository )
else:
- installed_rd, missing_rd = get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple )
+ installed_rd, missing_rd = \
+ get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
all_repo_info_dict = get_required_repo_info_dicts( trans, tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
@@ -119,7 +120,8 @@
required_tool_dependencies = {}
for rid in required_repo_info_dicts:
for name, repo_info_tuple in rid.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
+ description, repository_clone_url, changeset_revision, ctx_rev, \
+ repository_owner, rid_repository_dependencies, rid_tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
if rid_tool_dependencies:
for td_key, td_dict in rid_tool_dependencies.items():
@@ -128,7 +130,9 @@
if required_tool_dependencies:
# Discover and categorize all tool dependencies defined for this repository's repository dependencies.
required_installed_td, required_missing_td = \
- get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, required_tool_dependencies )
+ get_installed_and_missing_tool_dependencies_for_repository( trans,
+ tool_shed_url,
+ required_tool_dependencies )
if required_installed_td:
if not includes_tool_dependencies:
includes_tool_dependencies = True
@@ -168,11 +172,12 @@
def get_installed_and_missing_repository_dependencies( trans, repository ):
"""
- Return the installed and missing repository dependencies for a tool shed repository that has a record in the Galaxy database, but
- may or may not be installed. In this case, the repository dependencies are associated with the repository in the database. Do not
- include a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since these
- special kinds of repository dependencies are really a dependency of the dependent repository's contained tool dependency, and only if
- that tool dependency requires compilation.
+ Return the installed and missing repository dependencies for a tool shed repository that has a record
+ in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are
+ associated with the repository in the database. Do not include a repository dependency if it is required
+ only to compile a tool dependency defined for the dependent repository since these special kinds of repository
+ dependencies are really a dependency of the dependent repository's contained tool dependency, and only
+ if that tool dependency requires compilation.
"""
missing_repository_dependencies = {}
installed_repository_dependencies = {}
@@ -228,7 +233,7 @@
missing_repository_dependencies[ 'description' ] = description
return installed_repository_dependencies, missing_repository_dependencies
-def get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple ):
+def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( trans, repo_info_tuple ):
"""
Parse the received repository_dependencies dictionary that is associated with a repository being
installed into Galaxy for the first time and attempt to determine repository dependencies that are
@@ -264,13 +269,13 @@
tmp_repo_info_tuple )
if repository:
new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- repository.id,
- repository.status ]
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ repository.id,
+ repository.status ]
if repository.status == trans.install_model.ToolShedRepository.installation_status.INSTALLED:
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
@@ -285,12 +290,12 @@
missing_rd_tups.append( new_rd_tup )
else:
new_rd_tup = [ tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td,
- None,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td,
+ None,
'Never installed' ]
if not util.asbool( only_if_compiling_contained_td ):
# A repository dependency that is not installed will not be considered missing if it's value for
@@ -307,11 +312,17 @@
missing_repository_dependencies[ 'description' ] = description
return installed_repository_dependencies, missing_repository_dependencies
-def get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, tool_dependencies_dict ):
+def get_installed_and_missing_tool_dependencies_for_repository( trans, tool_shed_url, tool_dependencies_dict ):
"""
Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
being installed into Galaxy.
"""
+ # FIXME: This implementation breaks when updates to a repository contain dependencies that result in
+ # multiple entries for a specific tool dependency. A scenario where this can happen is where 2 repositories
+ # define the same dependency internally (not using the complex repository dependency definition to a separate
+ # package repository approach). If 2 repositories contain the same tool_dependencies.xml file, one dependency
+ # will be lost since the values in these returned dictionaries are not lists. All tool dependency dictionaries
+ # should have lists as values. These scenarios are probably extreme corner cases, but still should be handled.
installed_tool_dependencies = {}
missing_tool_dependencies = {}
if tool_dependencies_dict:
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/util/container_util.py
--- a/lib/tool_shed/util/container_util.py
+++ b/lib/tool_shed/util/container_util.py
@@ -821,8 +821,8 @@
tool_dependencies = metadata[ 'tool_dependencies' ]
if trans.webapp.name == 'tool_shed':
if 'orphan_tool_dependencies' in metadata:
- # The use of the orphan_tool_dependencies category in metadata has been deprecated, but we still need to check in case
- # the metadata is out of date.
+ # The use of the orphan_tool_dependencies category in metadata has been deprecated,
+ # but we still need to check in case the metadata is out of date.
orphan_tool_dependencies = metadata[ 'orphan_tool_dependencies' ]
tool_dependencies.update( orphan_tool_dependencies )
# Tool dependencies can be categorized as orphans only if the repository contains tools.
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/util/encoding_util.py
--- a/lib/tool_shed/util/encoding_util.py
+++ b/lib/tool_shed/util/encoding_util.py
@@ -21,7 +21,6 @@
try:
values = json.loads( value )
except Exception, e:
- #log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
pass
if values is not None:
try:
@@ -34,7 +33,7 @@
return values
def tool_shed_encode( val ):
- if isinstance( val, dict ):
+ if isinstance( val, dict ) or isinstance( val, list ):
value = json.dumps( val )
else:
value = val
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -662,9 +662,11 @@
# See if we have a repository dependencies defined.
if name == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
path_to_repository_dependencies_config = os.path.join( root, name )
- metadata_dict, error_message = generate_repository_dependency_metadata( app,
- path_to_repository_dependencies_config,
- metadata_dict )
+ metadata_dict, error_message = \
+ generate_repository_dependency_metadata( app,
+ path_to_repository_dependencies_config,
+ metadata_dict,
+ updating_installed_repository=updating_installed_repository )
if error_message:
invalid_file_tups.append( ( name, error_message ) )
# See if we have one or more READ_ME files.
@@ -795,7 +797,10 @@
# is True, the tool dependency definition will be set as invalid. This is currently the only case
# where a tool dependency definition is considered invalid.
repository_dependency_tup, repository_dependency_is_valid, error_message = \
- handle_repository_elem( app=app, repository_elem=sub_elem, only_if_compiling_contained_td=False )
+ handle_repository_elem( app=app,
+ repository_elem=sub_elem,
+ only_if_compiling_contained_td=False,
+ updating_installed_repository=False )
elif sub_elem.tag == 'install':
package_install_version = sub_elem.get( 'version', '1.0' )
if package_install_version == '1.0':
@@ -829,7 +834,8 @@
repository_dependency_tup, repository_dependency_is_valid, error_message = \
handle_repository_elem( app=app,
repository_elem=sub_action_elem,
- only_if_compiling_contained_td=True )
+ only_if_compiling_contained_td=True,
+ updating_installed_repository=False )
if requirements_dict:
dependency_key = '%s/%s' % ( package_name, package_version )
if repository_dependency_is_valid:
@@ -841,10 +847,10 @@
return valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, \
repository_dependency_is_valid, error_message
-def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict ):
+def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict, updating_installed_repository=False ):
"""
- Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method
- is called from the tool shed as well as from Galaxy.
+ Generate a repository dependencies dictionary based on valid information defined in the received
+ repository_dependencies_config. This method is called from the tool shed as well as from Galaxy.
"""
error_message = ''
# Make sure we're looking at a valid repository_dependencies.xml file.
@@ -861,7 +867,10 @@
valid_repository_dependency_tups = []
for repository_elem in root.findall( 'repository' ):
repository_dependency_tup, repository_dependency_is_valid, err_msg = \
- handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False )
+ handle_repository_elem( app,
+ repository_elem,
+ only_if_compiling_contained_td=False,
+ updating_installed_repository=updating_installed_repository )
if repository_dependency_is_valid:
valid_repository_dependency_tups.append( repository_dependency_tup )
else:
@@ -885,11 +894,12 @@
metadata_dict[ 'repository_dependencies' ] = valid_repository_dependencies_dict
return metadata_dict, error_message
-def generate_tool_dependency_metadata( app, repository, changeset_revision, repository_clone_url, tool_dependencies_config, metadata_dict,
- original_repository_metadata=None ):
+def generate_tool_dependency_metadata( app, repository, changeset_revision, repository_clone_url, tool_dependencies_config,
+ metadata_dict, original_repository_metadata=None ):
"""
- If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository,
- then update the received metadata_dict with information from the parsed tool_dependencies_config.
+ If the combination of name, version and type of each element is defined in the <requirement> tag for
+ at least one tool in the repository, then update the received metadata_dict with information from the
+ parsed tool_dependencies_config.
"""
error_message = ''
if original_repository_metadata:
@@ -1171,7 +1181,7 @@
deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
return updated_tool_dependency_names, deleted_tool_dependency_names
-def handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False ):
+def handle_repository_elem( app, repository_elem, only_if_compiling_contained_td=False, updating_installed_repository=False ):
"""
Process the received repository_elem which is a <repository> tag either from a repository_dependencies.xml
file or a tool_dependencies.xml file. If the former, we're generating repository dependencies metadata for
@@ -1187,14 +1197,17 @@
changeset_revision = repository_elem.get( 'changeset_revision', None )
prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
if app.name == 'galaxy':
- # We're installing a repository into Galaxy, so make sure its contained repository dependency definition
- # is valid.
- if toolshed is None or name is None or owner is None or changeset_revision is None:
- # Raise an exception here instead of returning an error_message to keep the installation from
- # proceeding. Reaching here implies a bug in the Tool Shed framework.
- error_message = 'Installation halted because the following repository dependency definition is invalid:\n'
- error_message += xml_util.xml_to_string( repository_elem, use_indent=True )
- raise Exception( error_message )
+ if updating_installed_repository:
+ pass
+ else:
+ # We're installing a repository into Galaxy, so make sure its contained repository dependency definition
+ # is valid.
+ if toolshed is None or name is None or owner is None or changeset_revision is None:
+ # Raise an exception here instead of returning an error_message to keep the installation from
+ # proceeding. Reaching here implies a bug in the Tool Shed framework.
+ error_message = 'Installation halted because the following repository dependency definition is invalid:\n'
+ error_message += xml_util.xml_to_string( repository_elem, use_indent=True )
+ raise Exception( error_message )
if not toolshed:
# Default to the current tool shed.
toolshed = str( url_for( '/', qualified=True ) ).rstrip( '/' )
@@ -1230,6 +1243,14 @@
updated_changeset_revision )
if repository:
return repository_dependency_tup, is_valid, error_message
+ if updating_installed_repository:
+ # The repository dependency was included in an update to the installed repository, so it will
+ # not yet be installed. Return the tuple for later installation.
+ return repository_dependency_tup, is_valid, error_message
+ if updating_installed_repository:
+ # The repository dependency was included in an update to the installed repository, so it will not yet
+ # be installed. Return the tuple for later installation.
+ return repository_dependency_tup, is_valid, error_message
# Don't generate an error message for missing repository dependencies that are required only if compiling the
# dependent repository's tool dependency.
if not only_if_compiling_contained_td:
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -90,7 +90,6 @@
if tree is None:
return tool_dependency_objects
root = tree.getroot()
- fabric_version_checked = False
for elem in root:
tool_dependency_type = elem.tag
if tool_dependency_type == 'package':
@@ -574,8 +573,8 @@
sa_session.flush()
return tool_dependency
-def sync_database_with_file_system( app, tool_shed_repository, tool_dependency_name, tool_dependency_version, tool_dependency_install_dir,
- tool_dependency_type='package' ):
+def sync_database_with_file_system( app, tool_shed_repository, tool_dependency_name, tool_dependency_version,
+ tool_dependency_install_dir, tool_dependency_type='package' ):
"""
The installation directory defined by the received tool_dependency_install_dir exists, so check for the presence
of fabric_util.INSTALLATION_LOG. If the files exists, we'll assume the tool dependency is installed, but not
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -90,6 +90,7 @@
return str( self.count )
repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
+ missing_repository_dependencies_root_folder = containers_dict.get( 'missing_repository_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
env_settings_heaader_row_displayed = False
@@ -112,7 +113,7 @@
</p></div></div>
- %if repository_dependencies_root_folder:
+ %if repository_dependencies_root_folder or missing_repository_dependencies_root_folder:
%if repository_dependencies_check_box is not None:
<div class="form-row">
%if export:
@@ -131,14 +132,26 @@
</div><div style="clear: both"></div>
%endif
- <div class="form-row">
- <p/>
- <% row_counter = RowCounter() %>
- <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
- ${render_folder( repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
- </table>
- <div style="clear: both"></div>
- </div>
+ %if repository_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
+ ${render_folder( repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ %if missing_repository_dependencies_root_folder:
+ <div class="form-row">
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table">
+ ${render_folder( missing_repository_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
%endif
%if tool_dependencies_root_folder or missing_tool_dependencies_root_folder:
%if install_tool_dependencies_check_box is not None:
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/install_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/install_tool_dependencies.mako
+++ /dev/null
@@ -1,81 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-
-<% import os %>
-
-%if message:
- ${render_msg( message, status )}
-%endif
-
-<div class="warningmessage">
- <p>
- The tool dependencies listed below can be automatically installed with the repository. Installing them provides significant
- benefits and Galaxy includes various features to manage them.
- </p>
- <p>
- Each of these dependencies may require their own build requirements (e.g., CMake, g++, etc). Galaxy will not attempt to install
- these build requirements, so if any are missing from your environment tool dependency installation may partially fail. The
- repository and all of it's contents will be installed in any case.
- </p>
- <p>
- If tool dependency installation fails in any way, you can install the missing build requirements and have Galaxy attempt to install
- the tool dependencies again using the <b>Install tool dependencies</b> pop-up menu option on the <b>Manage repository</b> page.
- </p>
-</div>
-
-<div class="toolForm">
- <div class="toolFormBody">
- <form name="install_tool_dependenceies" id="install_tool_dependenceies" action="${h.url_for( controller='admin_toolshed', action='install_tool_dependencies' )}" method="post" >
- <div class="form-row">
- <table class="grid">
- <tr><td colspan="4" bgcolor="#D8D8D8"><b>Tool dependencies</b></td></tr>
- <tr>
- <th>Name</th>
- <th>Version</th>
- <th>Type</th>
- <th>Install directory</th>
- </tr>
- <% tool_shed_repository = None %>
- %for tool_dependency in tool_dependencies:
- <input type="hidden" name="tool_dependency_ids" value="${trans.security.encode_id( tool_dependency.id )}"/>
- <%
- readme_text = None
- if tool_shed_repository is None:
- tool_shed_repository = tool_dependency.tool_shed_repository
- metadata = tool_shed_repository.metadata
- tool_dependencies_dict = metadata[ 'tool_dependencies' ]
- for key, requirements_dict in tool_dependencies_dict.items():
- key_items = key.split( '/' )
- key_name = key_items[ 0 ]
- key_version = key_items[ 1 ]
- if key_name == tool_dependency.name and key_version == tool_dependency.version:
- readme_text = requirements_dict.get( 'readme', None )
- install_dir = os.path.join( trans.app.config.tool_dependency_dir,
- tool_dependency.name,
- tool_dependency.version,
- tool_shed_repository.owner,
- tool_shed_repository.name,
- tool_shed_repository.installed_changeset_revision )
- %>
- %if not os.path.exists( install_dir ):
- <tr>
- <td>${tool_dependency.name}</td>
- <td>${tool_dependency.version}</td>
- <td>${tool_dependency.type}</td>
- <td>${install_dir}</td>
- </tr>
- %if readme_text:
- <tr><td colspan="4" bgcolor="#FFFFCC">${tool_dependency.name} ${tool_dependency.version} requirements and installation information</td></tr>
- <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
- %endif
- %endif
- %endfor
- </table>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <input type="submit" name="install_tool_dependencies_button" value="Install"/>
- </div>
- </form>
- </div>
-</div>
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/install_tool_dependencies_with_update.mako
--- /dev/null
+++ b/templates/admin/tool_shed_repository/install_tool_dependencies_with_update.mako
@@ -0,0 +1,91 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/admin/tool_shed_repository/repository_actions_menu.mako" import="*" />
+<%namespace file="/message.mako" import="render_msg" />
+
+<%
+ import os
+ from tool_shed.util.common_util import parse_repository_dependency_tuple
+%>
+
+${render_galaxy_repository_actions( repository )}
+
+%if message:
+ ${render_msg( message, status )}
+%endif
+
+<div class="warningmessage">
+ <p>
+ The updates to the <b>${repository.name}</b> repository require the following packages. Click the <b>Install</b> button to install them.
+ Installing some packages may take a while, but you can continue to use Galaxy during installation.
+ </p>
+</div>
+
+<div class="toolForm">
+ <div class="toolFormBody">
+ <form name="install_tool_dependencies_with_update" id="install_tool_dependencies_with_update" action="${h.url_for( controller='admin_toolshed', action='install_tool_dependencies_with_update' )}" method="post" >
+ <input type="hidden" name="updating_repository_id" value="${updating_repository_id}"/>
+ <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev}"/>
+ <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision}"/>
+ <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata}"/>
+ <input type="hidden" name="encoded_relative_install_dir" value="${encoded_relative_install_dir}"/>
+ <input type="hidden" name="encoded_tool_dependencies_dict" value="${encoded_tool_dependencies_dict}"/>
+ %if tool_dependencies_dict:
+ %if install_tool_dependencies_check_box is not None:
+ <div class="form-row">
+ <label>Handle tool dependencies?</label>
+ <% disabled = trans.app.config.tool_dependency_dir is None %>
+ ${install_tool_dependencies_check_box.get_html( disabled=disabled )}
+ <div class="toolParamHelp" style="clear: both;">
+ %if disabled:
+ Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies.
+ %else:
+ Un-check to skip automatic handling of these tool dependencies.
+ %endif
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ %endif
+ <div class="form-row">
+ <table class="grid">
+ <tr><td colspan="4" bgcolor="#D8D8D8"><b>New tool dependencies included in update</b></td></tr>
+ <tr>
+ <th>Name</th>
+ <th>Version</th>
+ <th>Install directory</th>
+ </tr>
+ %for key, requirements_dict in tool_dependencies_dict.items():
+ <%
+ readme_text = None
+ key_items = key.split( '/' )
+ key_name = key_items[ 0 ]
+ key_version = key_items[ 1 ]
+ readme_text = requirements_dict.get( 'readme', None )
+ install_dir = os.path.join( trans.app.config.tool_dependency_dir,
+ key_name,
+ key_version,
+ repository.owner,
+ repository.name,
+ repository.installed_changeset_revision )
+ %>
+ %if not os.path.exists( install_dir ):
+ <tr>
+ <td>${key_name}</td>
+ <td>${key_version}</td>
+ <td>${install_dir}</td>
+ </tr>
+ %if readme_text:
+ <tr><td colspan="4" bgcolor="#FFFFCC">${key_name} ${key_version} requirements and installation information</td></tr>
+ <tr><td colspan="4"><pre>${readme_text}</pre></td></tr>
+ %endif
+ %endif
+ %endfor
+ </table>
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <div class="form-row">
+ <input type="submit" name="install_tool_dependencies_with_update_button" value="Install"/>
+ </div>
+ </form>
+ </div>
+</div>
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako
--- a/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako
+++ b/templates/admin/tool_shed_repository/manage_repository_tool_dependencies.mako
@@ -30,8 +30,6 @@
<tr><th bgcolor="#D8D8D8">Name</th><th bgcolor="#D8D8D8">Version</th><th bgcolor="#D8D8D8">Type</th><th bgcolor="#D8D8D8">Status</th><th bgcolor="#D8D8D8">Error</th></tr>
%for tool_dependency in repository.tool_dependencies:
<%
- # Tool dependencies cannot be uninstalled if they have a status of 'Installed'. Only the containing repository
- # can be uninstalled (only if it has no dependent repositories) if a tool dependency has been successfully installed.
if tool_dependency.error_message:
error_message = tool_dependency.error_message
else:
@@ -41,8 +39,7 @@
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
can_install = True
if not can_uninstall:
- if tool_dependency.status not in [ trans.install_model.ToolDependency.installation_status.INSTALLED,
- trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
+ if tool_dependency.status not in [ trans.install_model.ToolDependency.installation_status.NEVER_INSTALLED,
trans.install_model.ToolDependency.installation_status.UNINSTALLED ]:
can_uninstall = True
%>
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako
--- a/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako
+++ b/templates/admin/tool_shed_repository/select_shed_tool_panel_config.mako
@@ -46,8 +46,8 @@
<div class="warningmessage"><p>
- The core Galaxy development team does not maintain the contents of many Galaxy tool shed repositories. Some repository tools
- may include code that produces malicious behavior, so be aware of what you are installing.
+ The Galaxy development team does not maintain the contents of many Galaxy Tool Shed repositories. Some
+ repository tools may include code that produces malicious behavior, so be aware of what you are installing.
</p><p>
If you discover a repository that causes problems after installation, contact <a href="http://wiki.g2.bx.psu.edu/Support" target="_blank">Galaxy support</a>,
@@ -63,6 +63,11 @@
<form name="select_shed_tool_panel_config" id="select_shed_tool_panel_config" action="${h.url_for( controller='admin_toolshed', action='prepare_for_install' )}" method="post" ><div class="form-row"><input type="hidden" name="encoded_repo_info_dicts" value="${encoded_repo_info_dicts}" />
+ <input type="hidden" name="updating" value="${updating}" />
+ <input type="hidden" name="updating_repository_id" value="${updating_repository_id}" />
+ <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev}" />
+ <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision}" />
+ <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata}" /><input type="hidden" name="includes_tools" value="${includes_tools}" /><input type="hidden" name="includes_tool_dependencies" value="${includes_tool_dependencies}" /><input type="hidden" name="includes_tools_for_display_in_tool_panel" value="${includes_tools_for_display_in_tool_panel}" />
diff -r 09985439d17f9bef026554938b05d0d6eedd06cb -r 90b4baa5c2b18d49bcea5a0807f0df8924abebb1 templates/admin/tool_shed_repository/select_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/select_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/select_tool_panel_section.mako
@@ -46,8 +46,8 @@
<div class="warningmessage"><p>
- The core Galaxy development team does not maintain the contents of many Galaxy tool shed repositories. Some repository tools
- may include code that produces malicious behavior, so be aware of what you are installing.
+ The Galaxy development team does not maintain the contents of many Galaxy Tool Shed repositories. Some
+ repository tools may include code that produces malicious behavior, so be aware of what you are installing.
</p><p>
If you discover a repository that causes problems after installation, contact <a href="http://wiki.g2.bx.psu.edu/Support" target="_blank">Galaxy support</a>,
@@ -67,6 +67,11 @@
<input type="hidden" name="includes_tools_for_display_in_tool_panel" value="${includes_tools_for_display_in_tool_panel}" /><input type="hidden" name="tool_shed_url" value="${tool_shed_url}" /><input type="hidden" name="encoded_repo_info_dicts" value="${encoded_repo_info_dicts}" />
+ <input type="hidden" name="updating" value="${updating}" />
+ <input type="hidden" name="updating_repository_id" value="${updating_repository_id}" />
+ <input type="hidden" name="updating_to_ctx_rev" value="${updating_to_ctx_rev}" />
+ <input type="hidden" name="updating_to_changeset_revision" value="${updating_to_changeset_revision}" />
+ <input type="hidden" name="encoded_updated_metadata" value="${encoded_updated_metadata}" /></div><div style="clear: both"></div><% readme_files_dict = containers_dict.get( 'readme_files', None ) %>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.