commit/galaxy-central: 3 new changesets
3 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/9ca6ac4cba2f/ Changeset: 9ca6ac4cba2f User: jgoecks Date: 2013-09-23 21:08:06 Summary: Trackster refactoring: (a) remove use of the variable 'resolution' as much as possible because it is a duplicate, less useful version of 'w_scale'; (b) fold _get_tile_region() function into _draw because it's only needed there. Affected #: 1 file diff -r 66d484a18d198725ab5f388d4e538ac86d9f081b -r 9ca6ac4cba2ff876823aeb9967bc981eedafc237 static/scripts/viz/trackster/tracks.js --- a/static/scripts/viz/trackster/tracks.js +++ b/static/scripts/viz/trackster/tracks.js @@ -1508,11 +1508,8 @@ // -- Drawing code -- - // Calculate resolution in both pixels/base and bases/pixel. - // TODO: require minimum difference in new resolution to update? This - // would help alleviate issues when window is being resized. - this.resolution_b_px = (this.high - this.low) / this.viewport_container.width(); - this.resolution_px_b = 1 / this.resolution_b_px; + // Resolution is a pixel density. + this.resolution_px_b = this.viewport_container.width() / (this.high - this.low); // Overview var left_px = ( this.low / (this.max_high - this.max_low) * this.overview_viewport.width() ) || 0; @@ -2110,12 +2107,12 @@ /** * Tiles drawn by tracks. */ -var Tile = function(track, region, resolution, canvas, data) { +var Tile = function(track, region, w_scale, canvas, data) { this.track = track; this.region = region; this.low = region.get('start'); this.high = region.get('end'); - this.resolution = resolution; + this.w_scale = w_scale; // Wrap element in div for background and explicitly set height. Use canvas // height attribute because canvas may not have height if it is not in document yet. this.html_elt = $("<div class='track-tile'/>").append(canvas).height( $(canvas).attr("height") ); @@ -2128,14 +2125,14 @@ */ Tile.prototype.predisplay_actions = function() {}; -var LineTrackTile = function(track, region, resolution, canvas, data) { - Tile.call(this, track, region, resolution, canvas, data); +var LineTrackTile = function(track, region, w_scale, canvas, data) { + Tile.call(this, track, region, w_scale, canvas, data); }; LineTrackTile.prototype.predisplay_actions = function() {}; -var FeatureTrackTile = function(track, region, resolution, canvas, data, w_scale, mode, message, all_slotted, feature_mapper) { +var FeatureTrackTile = function(track, region, w_scale, canvas, data, w_scale, mode, message, all_slotted, feature_mapper) { // Attribute init. - Tile.call(this, track, region, resolution, canvas, data); + Tile.call(this, track, region, w_scale, canvas, data); this.mode = mode; this.all_slotted = all_slotted; this.feature_mapper = feature_mapper; @@ -2170,7 +2167,7 @@ more_down_icon.click(function() { // Mark tile as stale, request more data, and redraw track. tile.stale = true; - track.data_manager.get_more_data(tile_region, track.mode, tile.resolution, {}, track.data_manager.DEEP_DATA_REQ); + track.data_manager.get_more_data(tile_region, track.mode, 1 / tile.w_scale, {}, track.data_manager.DEEP_DATA_REQ); $(".tooltip").hide(); track.request_draw(); }).dblclick(function(e) { @@ -2181,7 +2178,7 @@ more_across_icon.click(function() { // Mark tile as stale, request more data, and redraw track. tile.stale = true; - track.data_manager.get_more_data(tile_region, track.mode, tile.resolution, {}, track.data_manager.BROAD_DATA_REQ); + track.data_manager.get_more_data(tile_region, track.mode, 1 / tile.w_scale, {}, track.data_manager.BROAD_DATA_REQ); $(".tooltip").hide(); track.request_draw(); }).dblclick(function(e) { @@ -2834,7 +2831,6 @@ css_class: "exclamation", on_click_fn: function(track) { $(".tooltip").remove(); - // HACKish: is it always reasonble to use view to get w_scale/current resolution? track.slotters[ track.view.resolution_px_b ].max_rows *= 2; track.request_draw({ clear_tile_cache: true }); }, @@ -3012,14 +3008,14 @@ range = high - low, width = this.view.container.width(), w_scale = this.view.resolution_px_b, - resolution = this.view.resolution_b_px; + resolution = 1 / w_scale; // For overview, adjust high, low, resolution, and w_scale. if (this.is_overview) { low = this.view.max_low; high = this.view.max_high; - resolution = ( view.max_high - view.max_low ) / width; - w_scale = 1 / resolution; + w_scale = width / (view.max_high - view.max_low); + resolution = 1 / w_scale; } this.before_draw(); @@ -3039,14 +3035,22 @@ var // Index of first tile that overlaps visible region. tile_index = Math.floor( low / (resolution * TILE_SIZE) ), + tile_low, tile_region, tile_promise, tile_promises = [], tiles = []; // Draw tiles. while ( ( tile_index * TILE_SIZE * resolution ) < high ) { - tile_region = this._get_tile_bounds(tile_index, resolution); - tile_promise = this.draw_helper(tile_region, resolution, w_scale, options); + // Get tile region. + tile_low = Math.floor(tile_index * TILE_SIZE * resolution) + tile_region = new visualization.GenomeRegion({ + chrom: this.view.chrom, + start: tile_low, + // Tile high cannot be larger than view.max_high, which the chromosome length. + end: Math.min( tile_low + Math.ceil( TILE_SIZE * resolution ), this.view.max_high ) + }); + tile_promise = this.draw_helper(tile_region, w_scale, options); tile_promises.push(tile_promise); $.when(tile_promise).then(function(tile) { tiles.push(tile); @@ -3130,7 +3134,7 @@ _.each(tiles, function(tile) { if (!(tile instanceof LineTrackTile)) { tile.html_elt.remove(); - track.draw_helper(tile.region, tile.resolution, w_scale, { force: true, mode: 'Coverage' }); + track.draw_helper(tile.region, w_scale, { force: true, mode: 'Coverage' }); } }); @@ -3187,7 +3191,7 @@ * -force: force a redraw rather than use cached tiles (default: false) * -data_fetch: fetch data if necessary (default: true) */ - draw_helper: function(region, resolution, w_scale, options) { + draw_helper: function(region, w_scale, options) { // Init options if necessary to avoid having to check if options defined. if (!options) { options = {}; } @@ -3195,6 +3199,7 @@ // Fetch data as long as data_fetch option is not set to false. data_fetch = !( options.data_fetch === false ), mode = options.mode || this.mode, + resolution = 1 / w_scale, // Useful vars. track = this, @@ -3289,7 +3294,7 @@ ctx.globalCompositeOperation = "source-over"; } _.each(drawables, function(d, i) { - tile = d.draw_tile(tracks_data[i], ctx, drawing_modes[i], resolution, region, w_scale, seq_data); + tile = d.draw_tile(tracks_data[i], ctx, drawing_modes[i], region, w_scale, seq_data); }); // Don't cache, show if no tile. @@ -3315,12 +3320,12 @@ /** * Draw line (bigwig) data onto tile. */ - _draw_line_track_tile: function(result, ctx, mode, resolution, region, w_scale) { + _draw_line_track_tile: function(result, ctx, mode, region, w_scale) { var canvas = ctx.canvas, painter = new painters.LinePainter(result.data, region.get('start'), region.get('end'), this.prefs, mode); painter.draw(ctx, canvas.width, canvas.height, w_scale); - return new LineTrackTile(this, region, resolution, canvas, result.data); + return new LineTrackTile(this, region, w_scale, canvas, result.data); }, /** @@ -3328,12 +3333,11 @@ * @param result result from server * @param ctx canvas context to draw on * @param mode mode to draw in - * @param resolution view resolution * @param region region to draw on tile * @param w_scale pixels per base * @param ref_seq reference sequence data */ - draw_tile: function(result, ctx, mode, resolution, region, w_scale, ref_seq) {}, + draw_tile: function(result, ctx, mode, region, w_scale, ref_seq) {}, /** * Show track tile and perform associated actions. Showing tile may actually move @@ -3384,21 +3388,6 @@ }, /** - * Returns a genome region that corresponds to a tile at a particular resolution - */ - _get_tile_bounds: function(tile_index, resolution) { - var tile_low = Math.floor( tile_index * TILE_SIZE * resolution ), - tile_length = Math.ceil( TILE_SIZE * resolution ), - // Tile high cannot be larger than view.max_high, which the chromosome length. - tile_high = (tile_low + tile_length <= this.view.max_high ? tile_low + tile_length : this.view.max_high); - return new visualization.GenomeRegion({ - chrom: this.view.chrom, - start: tile_low, - end: tile_high - }); - }, - - /** * Utility function that creates a label string describing the region and parameters of a track's tool. */ tool_region_and_parameters_str: function(region) { @@ -3715,7 +3704,7 @@ for (var i = 0; i < tiles.length; i++) { var tile = tiles[i]; if (tile.html_elt.find("canvas").height() !== max_height) { - this.draw_helper(tile.region, tile.resolution, w_scale, { force: true, height: max_height } ); + this.draw_helper(tile.region, w_scale, { force: true, height: max_height } ); tile.html_elt.remove(); } } @@ -3767,10 +3756,10 @@ /** * Retrieves data and draws tile if reference data can be displayed. */ - draw_helper: function(region, resolution, w_scale, options) { + draw_helper: function(region, w_scale, options) { if (w_scale > this.view.canvas_manager.char_width_px) { this.tiles_div.show(); - return TiledTrack.prototype.draw_helper.call(this, region, resolution, w_scale, options); + return TiledTrack.prototype.draw_helper.call(this, region, w_scale, options); } else { this.tiles_div.hide(); @@ -3783,7 +3772,7 @@ /** * Draw ReferenceTrack tile. */ - draw_tile: function(data, ctx, mode, resolution, region, w_scale) { + draw_tile: function(data, ctx, mode, region, w_scale) { // Try to subset data. var subset = this.data_manager.subset_entry(data, region), seq_data = subset.data; @@ -3796,7 +3785,7 @@ ctx.fillStyle = this.view.get_base_color(seq_data[c]); ctx.fillText(seq_data[c], Math.floor(c * w_scale), 10); } - return new Tile(this, region, resolution, canvas, subset); + return new Tile(this, region, w_scale, canvas, subset); } }); @@ -3841,12 +3830,12 @@ /** * Draw track tile. */ - draw_tile: function(result, ctx, mode, resolution, region, w_scale) { - return this._draw_line_track_tile(result, ctx, mode, resolution, region, w_scale); + draw_tile: function(result, ctx, mode, region, w_scale) { + return this._draw_line_track_tile(result, ctx, mode, region, w_scale); }, /** - * Subset line tracks only if resolution is single-base pair. + * Subset data only if data is at single-base pair resolution. */ can_subset: function(entry) { return (entry.data[1][0] - entry.data[0][0] === 1); @@ -3894,13 +3883,13 @@ /** * Draw tile. */ - draw_tile: function(result, ctx, mode, resolution, region, w_scale) { + draw_tile: function(result, ctx, mode, region, w_scale) { // Paint onto canvas. var canvas = ctx.canvas, painter = new painters.DiagonalHeatmapPainter(result.data, region.get('start'), region.get('end'), this.prefs, mode); painter.draw(ctx, canvas.width, canvas.height, w_scale); - return new Tile(this, region, resolution, canvas, result.data); + return new Tile(this, region, w_scale, canvas, result.data); } }); @@ -3998,7 +3987,7 @@ var tile = tiles[i]; if (tile.max_val !== global_max) { tile.html_elt.remove(); - track.draw_helper(tile.index, tile.resolution, w_scale, { more_tile_data: { force: true, max: global_max } } ); + track.draw_helper(tile.index, w_scale, { more_tile_data: { force: true, max: global_max } } ); } } } @@ -4153,12 +4142,11 @@ * @param result result from server * @param cxt canvas context to draw on * @param mode mode to draw in - * @param resolution view resolution * @param region region to draw on tile * @param w_scale pixels per base * @param ref_seq reference sequence data */ - draw_tile: function(result, ctx, mode, resolution, region, w_scale, ref_seq) { + draw_tile: function(result, ctx, mode, region, w_scale, ref_seq) { var track = this, canvas = ctx.canvas, tile_low = region.get('start'), @@ -4167,7 +4155,7 @@ // If data is line track data, draw line track tile. if (result.dataset_type === 'bigwig') { - return this._draw_line_track_tile(result, ctx, mode, resolution, region, w_scale); + return this._draw_line_track_tile(result, ctx, mode, region, w_scale); } // Handle row-by-row tracks @@ -4222,7 +4210,7 @@ feature_mapper.translation = -left_offset; } - return new FeatureTrackTile(track, region, resolution, canvas, result.data, w_scale, mode, result.message, all_slotted, feature_mapper); + return new FeatureTrackTile(track, region, w_scale, canvas, result.data, w_scale, mode, result.message, all_slotted, feature_mapper); } }); @@ -4263,17 +4251,17 @@ /** * Draw tile. */ - draw_tile: function(result, ctx, mode, resolution, region, w_scale) { + draw_tile: function(result, ctx, mode, region, w_scale) { // Data could be coverage data or variant data. if (result.dataset_type === 'bigwig') { - return this._draw_line_track_tile(result, ctx, "Histogram", resolution, region, w_scale); + return this._draw_line_track_tile(result, ctx, "Histogram", region, w_scale); } else { // result.dataset_type === 'variant' var view = this.view, painter = new (this.painter)(result.data, region.get('start'), region.get('end'), this.prefs, mode, function(b) { return view.get_base_color(b); }); painter.draw(ctx, ctx.canvas.width, ctx.canvas.height, w_scale); - return new Tile(this, region, resolution, ctx.canvas, result.data); + return new Tile(this, region, w_scale, ctx.canvas, result.data); } }, https://bitbucket.org/galaxy/galaxy-central/commits/5db5c07f9154/ Changeset: 5db5c07f9154 User: jgoecks Date: 2013-09-24 20:26:58 Summary: Trackster CSS: add spacing to less file and shrink border around tracks. Affected #: 2 files diff -r 9ca6ac4cba2ff876823aeb9967bc981eedafc237 -r 5db5c07f9154039f44447ebecb4f87832dbaf030 static/style/blue/trackster.css --- a/static/style/blue/trackster.css +++ b/static/style/blue/trackster.css @@ -27,7 +27,7 @@ .track{background:white} .track-header{text-align:left;padding:4px 0px;color:#666} .track-header .menubutton{margin-left:0px} -.tiles{text-align:center;border-top:1px solid #eee;border-bottom:2px solid #eee;background:#eee url('../images/tracks/diag_bg.gif');min-height:16px;position:relative;overflow:hidden} +.tiles{text-align:center;background:#eee url('../images/tracks/diag_bg.gif');min-height:16px;position:relative;overflow:hidden} .overlay{position:absolute;left:0;top:0} .track-name{float:left;margin-top:2px;height:16px} .label-track .track-content{background:white} diff -r 9ca6ac4cba2ff876823aeb9967bc981eedafc237 -r 5db5c07f9154039f44447ebecb4f87832dbaf030 static/style/src/less/trackster.less --- a/static/style/src/less/trackster.less +++ b/static/style/src/less/trackster.less @@ -3,12 +3,14 @@ overflow-y: auto; background: white; } + .trackster-nav-container { width: 100%; /* Used to push nav into title bar: */ height: 0; text-align: center; } + .trackster-nav { padding: 0 0; color:#333; @@ -21,16 +23,20 @@ background: transparent; border: none; } + .content { font: 10px verdana; } + .nav-controls { text-align: center; padding: 1px 0; } + .nav-controls input { margin: 0 5px; } + #zoom-in, #zoom-out { display:inline-block; height: 16px; @@ -38,23 +44,28 @@ margin-bottom: -3px; cursor: pointer; } + #zoom-out { background: transparent url(../images/fugue/magnifier-zoom-out.png) center center no-repeat; } + #zoom-in { margin-left: 10px; background: transparent url(../images/fugue/magnifier-zoom.png) center center no-repeat; } + .nav-input { font-size: 12px; width: 30em; z-index: 1000; } + .location { display: inline-block; width: 15em; margin: 0 10px; } + .draghandle { margin-top: 2px; cursor: move; @@ -63,6 +74,7 @@ width: 10px; height: 12px; } + .intro { z-index: 1000; /* margin-top: 200px;*/ @@ -72,11 +84,13 @@ text-align: center; font-size: 16px; } + .overview { width: 100%; margin: 0px; color: white; } + .overview-viewport { position: relative; height: 14px; @@ -85,6 +99,7 @@ background: white; margin: 0; } + .overview-close { font: 9px verdana; position: absolute; @@ -94,6 +109,7 @@ z-index: 500; background-color: white; } + .overview-highlight { top: 0px; position: absolute; @@ -102,6 +118,7 @@ border-color: #666; border-width: 0px 1px; } + .overview-boxback { width: 100%; bottom: 0px; @@ -111,6 +128,7 @@ background: #eee; border: solid #999 1px; } + .overview-box { cursor: pointer; bottom: 0px; @@ -121,6 +139,7 @@ background: #C1C9E5 url(../images/visualization/draggable_horizontal.png) center center no-repeat; border: solid #666 1px; } + .viewport-canvas { width: 100%; height: 100px; @@ -155,11 +174,13 @@ width: 12px; height: 12px; } + .group { min-height: 20px; border-top: 2px solid #888; border-bottom: 2px solid #888; -} +} + .track { /* border-top: solid #DDDDDD 1px; */ /* border-bottom: solid #DDDDDD 1px; */ @@ -178,8 +199,6 @@ .tiles { text-align: center; - border-top: 1px solid #eee; - border-bottom: 2px solid #eee; background: #eee url('../images/tracks/diag_bg.gif'); min-height: 16px; position: relative; @@ -239,6 +258,7 @@ .loading { min-height: 100px; } + .label-track { /* font-weight: bold; */ font-size: 10px; @@ -248,20 +268,24 @@ height: 1.5em; overflow: hidden; } + .label-track .label { border-left: solid #999 1px; padding: 1px; padding-bottom: 2px; display: inline-block; } + .label-track .track-content { border: none; } + .reference-track { border: none; margin: 0; padding: 0; } + .right-float { float: right; margin-left: 5px; @@ -281,158 +305,203 @@ input { font: 10px verdana; } + .dynamic-tool, .filters { margin-left: 0.25em; padding-bottom:0.5em; } + .dynamic-tool { width:410px; } + .filters { float: left; margin: 1em; width: 60%; position:relative; } + .display-controls { float: left; margin-left: 1em; } + .slider-row { margin-left: 1em; height: 16px; } + .slider-row.input { height: 22px; } + .elt-label { float: left; width: 30%; font-weight: bold; margin-right: 1em; } + .slider { float: left; width: 40%; position: relative; padding-top:2px; } + .tool-name { font-size: 110%; font-weight: bold; } + .param-row { margin-top: 0.2em; margin-left: 1em; } + .param-label{ float: left; font-weight: bold; padding-top: 0.2em; width: 50%; } + .menu-button { margin: 0px 4px 0px 4px; } + .exclamation{ background: transparent url(../images/fugue/exclamation.png) no-repeat; margin-left: 5em; } + .layer-transparent { background: transparent url(../images/fugue/layer-transparent-bw.png) no-repeat; } + .layer-transparent.active { background: transparent url(../images/fugue/layer-transparent.png) no-repeat; } + .arrow-resize-090 { background: transparent url(../images/fugue/arrow-resize-090-bw.png) no-repeat; } + .arrow-resize-090.active { background: transparent url(../images/fugue/arrow-resize-090.png) no-repeat; } + .layers-stack { background: transparent url(../images/fugue/layers-stack-bw.png) no-repeat; } + .layers-stack:hover { background:transparent url(../images/fugue/layers-stack.png) no-repeat; } + .settings-icon { background: transparent url(../images/fugue/gear-bw.png) no-repeat; } + .settings-icon:hover { background: transparent url(../images/fugue/gear.png) no-repeat; } + .overview-icon { background:transparent url(../images/fugue/application-dock-270-bw.png) no-repeat; } + .overview-icon:hover { background:transparent url(../images/fugue/application-dock-270.png) no-repeat; } + .hammer { background: transparent url(../images/fugue/hammer-bw.png) no-repeat; } + .hammer:hover { background: transparent url(../images/fugue/hammer.png) no-repeat; } + .toolbox { background: transparent url(../images/fugue/toolbox-bw.png) no-repeat; } + .toolbox:hover { background: transparent url(../images/fugue/toolbox.png) no-repeat; } + .filters-icon { background: transparent url(../images/fugue/ui-slider-050-bw.png) no-repeat; } + .filters-icon:hover { background: transparent url(../images/fugue/ui-slider-050.png) no-repeat; } + .globe { background: transparent url(../images/fugue/globe-bw.png) no-repeat; } + .globe:hover { background: transparent url(../images/fugue/globe.png) no-repeat; } + .remove-icon, .overview-close { background: transparent url(../images/fugue/cross-small-bw.png) no-repeat; } + .icon-button.remove-icon:hover, .overview-close:hover { background: transparent url(../images/fugue/cross-circle.png) no-repeat; } + .icon-button.toggle { background: transparent url(../images/fugue/toggle-bw.png) no-repeat;margin-right:0px; } + .icon-button.toggle:hover { background: transparent url(../images/fugue/toggle.png) no-repeat; } + .icon-button.toggle-expand { background: transparent url(../images/fugue/toggle-expand-bw.png) no-repeat;margin-right:0px; } + .icon-button.toggle-expand:hover { background: transparent url(../images/fugue/toggle-expand.png) no-repeat; } + .icon-button.block--plus { background: transparent url(../images/fugue/block--plus-bw.png) no-repeat; } + .icon-button.block--plus:hover { background: transparent url(../images/fugue/block--plus.png) no-repeat; } + .icon-button.bookmarks { background: transparent url(../images/fugue/bookmarks-bw.png) no-repeat; } + .icon-button.bookmarks:hover { background: transparent url(../images/fugue/bookmarks.png) no-repeat; } + .icon-button.toolbox { background: transparent url(../images/fugue/toolbox-bw.png) no-repeat; } + .icon-button.toolbox:hover { background: transparent url(../images/fugue/toolbox.png) no-repeat; } + .child-track-icon { background:url('../images/fugue/arrow-000-small-bw.png') no-repeat; width: 30px; cursor: move; } + .track-resize { background: white url('../images/visualization/draggable_vertical.png') no-repeat top center; position: absolute; @@ -443,6 +512,7 @@ border: solid #999 1px; z-index: 100; } + .bookmark { background:white; border:solid #999 1px; @@ -451,29 +521,36 @@ margin-right:0; padding:0.5em; } + .bookmark .position { font-weight:bold; } + .delete-icon-container { float:right; } + .icon { display:inline-block; width:16px; height:16px; } + .icon.more-down { background:url('../images/fugue/arrow-transition-270-bw.png') no-repeat 0px 0px; } + .icon.more-across { background: url('../images/fugue/arrow-transition-bw.png') no-repeat 0px 0px; } + .intro > .action-button { background-color: #CCC; margin-top: 10px; padding: 1em; text-decoration:underline; } + .feature-popup { position: absolute; z-index: 1000; @@ -484,6 +561,7 @@ background-image: url(../images/tipsy.gif); background-position: top center; } + .feature-popup-inner { padding: 5px 8px 4px 8px; background-color: black; https://bitbucket.org/galaxy/galaxy-central/commits/d0bbd2a2b4a2/ Changeset: d0bbd2a2b4a2 User: jgoecks Date: 2013-09-24 20:28:05 Summary: Automated merge. Affected #: 57 files diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 config/disposable_email_blacklist.conf.sample --- /dev/null +++ b/config/disposable_email_blacklist.conf.sample @@ -0,0 +1,9 @@ +If you want to disable registration for users that are using disposable email address +rename this file to disposable_email_blacklist.conf and fill it with the disposable domains +that you want to have blacklisted. Each on its own line without the '@' character as shown below. +Users using emails from these domains will get an error during the registration. + +mailinator.com +sogetthis.com +spamgourmet.com +trashmail.net \ No newline at end of file diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 dist-eggs.ini --- a/dist-eggs.ini +++ b/dist-eggs.ini @@ -1,88 +1,54 @@ ; ; Config for building eggs for distribution (via a site such as -; eggs.g2.bx.psu.edu). Probably only useful to Galaxy developers at -; Penn State. This file is used by scripts/dist-scramble.py +; eggs.galaxyproject.org). Probably only useful to members of the Galaxy Team +; building eggs for distribution. This file is used by +; scripts/dist-scramble.py ; -; More information: http://wiki.g2.bx.psu.edu/Admin/Config/Eggs +; More information: http://wiki.galaxyproject.org/Admin/Config/Eggs ; [hosts] -py2.5-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.5 -py2.5-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.5 py2.6-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.6 py2.6-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.6 py2.7-linux-i686-ucs2 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs2/bin/python2.7 py2.7-linux-i686-ucs4 = stegmaier.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.7 -py2.5-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.5 -py2.5-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.5 py2.6-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.6 py2.6-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.6 py2.7-linux-x86_64-ucs2 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs2/bin/python2.7 py2.7-linux-x86_64-ucs4 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.7 -py2.5-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.5/bin/python2.5 py2.6-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.6/bin/python2.6 py2.7-macosx-10.3-fat-ucs2 = weyerbacher.bx.psu.edu /Library/Frameworks/Python.framework/Versions/2.7/bin/python2.7 py2.6-macosx-10.6-universal-ucs2 = lion.bx.psu.edu /usr/bin/python2.6 py2.7-macosx-10.6-intel-ucs2 = lion.bx.psu.edu /usr/local/bin/python2.7 -py2.5-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.5 -py2.6-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.6 -py2.7-solaris-2.10-i86pc_32-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_32-ucs2/bin/python2.7 -py2.5-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.5 -py2.6-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.6 -py2.7-solaris-2.10-i86pc_64-ucs2 = thumper.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-i86pc_64-ucs2/bin/python2.7 -py2.5-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.5 -py2.6-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.6 -py2.7-solaris-2.10-sun4u_32-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.8-sun4u_32-ucs2/bin/python2.7 -py2.5-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.5 -py2.6-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.6 -py2.7-solaris-2.10-sun4u_64-ucs2 = early.bx.psu.edu /afs/bx.psu.edu/project/pythons/solaris-2.10-sun4u_64-ucs2/bin/python2.7 - +; ; these hosts are used to build eggs with no C extensions -py2.5 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.5 py2.6 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.6 py2.7 = straub.bx.psu.edu /afs/bx.psu.edu/project/pythons/linux-x86_64-ucs4/bin/python2.7 [groups] -py2.5-linux-i686 = py2.5-linux-i686-ucs2 py2.5-linux-i686-ucs4 -py2.5-linux-x86_64 = py2.5-linux-x86_64-ucs2 py2.5-linux-x86_64-ucs4 py2.6-linux-i686 = py2.6-linux-i686-ucs2 py2.6-linux-i686-ucs4 py2.6-linux-x86_64 = py2.6-linux-x86_64-ucs2 py2.6-linux-x86_64-ucs4 py2.7-linux-i686 = py2.7-linux-i686-ucs2 py2.7-linux-i686-ucs4 py2.7-linux-x86_64 = py2.7-linux-x86_64-ucs2 py2.7-linux-x86_64-ucs4 -py2.5-linux = py2.5-linux-i686 py2.5-linux-x86_64 py2.6-linux = py2.6-linux-i686 py2.6-linux-x86_64 py2.7-linux = py2.7-linux-i686 py2.7-linux-x86_64 -linux-i686 = py2.5-linux-i686 py2.6-linux-i686 py2.7-linux-i686 -linux-x86_64 = py2.5-linux-x86_64 py2.6-linux-x86_64 py2.7-linux-x86_64 +linux-i686 = py2.6-linux-i686 py2.7-linux-i686 +linux-x86_64 = py2.6-linux-x86_64 py2.7-linux-x86_64 linux = linux-i686 linux-x86_64 -py2.5-macosx = py2.5-macosx-10.3-fat-ucs2 py2.6-macosx = py2.6-macosx-10.3-fat-ucs2 py2.6-macosx-10.6-universal-ucs2 py2.7-macosx = py2.7-macosx-10.3-fat-ucs2 py2.7-macosx-10.6-intel-ucs2 -macosx = py2.5-macosx py2.6-macosx py2.7-macosx -py2.5-solaris-i86pc = py2.5-solaris-2.10-i86pc_32-ucs2 py2.5-solaris-2.10-i86pc_64-ucs2 -py2.6-solaris-i86pc = py2.6-solaris-2.10-i86pc_32-ucs2 py2.6-solaris-2.10-i86pc_64-ucs2 -py2.7-solaris-i86pc = py2.7-solaris-2.10-i86pc_32-ucs2 py2.7-solaris-2.10-i86pc_64-ucs2 -py2.5-solaris-sun4u = py2.5-solaris-2.10-sun4u_32-ucs2 py2.5-solaris-2.10-sun4u_64-ucs2 -py2.6-solaris-sun4u = py2.6-solaris-2.10-sun4u_32-ucs2 py2.6-solaris-2.10-sun4u_64-ucs2 -py2.7-solaris-sun4u = py2.7-solaris-2.10-sun4u_32-ucs2 py2.7-solaris-2.10-sun4u_64-ucs2 -py2.5-solaris = py2.5-solaris-i86pc py2.5-solaris-sun4u -py2.6-solaris = py2.6-solaris-i86pc py2.6-solaris-sun4u -py2.7-solaris = py2.7-solaris-i86pc py2.7-solaris-sun4u -solaris-i86pc = py2.5-solaris-i86pc py2.6-solaris-i86pc py2.7-solaris-i86pc -solaris-sun4u = py2.5-solaris-sun4u py2.6-solaris-sun4u py2.7-solaris-sun4u -solaris = solaris-i86pc solaris-sun4u -py2.5-all = py2.5-linux py2.5-macosx py2.5-solaris -py2.6-all = py2.6-linux py2.6-macosx py2.6-solaris -py2.7-all = py2.7-linux py2.7-macosx py2.7-solaris +macosx = py2.6-macosx py2.7-macosx +py2.6-all = py2.6-linux py2.6-macosx +py2.7-all = py2.7-linux py2.7-macosx ; the 'all' key is used internally by the build system to specify which hosts ; to build on when no hosts are specified on the dist-eggs.py command line. -all = linux macosx solaris +all = linux macosx ; the 'noplatform' key, likewise, is for which build hosts should be used when ; building pure python (noplatform) eggs. -noplatform = py2.5 py2.6 py2.7 +noplatform = py2.6 py2.7 ; don't build these eggs on these platforms: [ignore] -ctypes = py2.5-linux-i686-ucs2 py2.5-linux-i686-ucs4 py2.6-linux-i686-ucs2 py2.6-linux-i686-ucs4 py2.7-linux-i686-ucs2 py2.7-linux-i686-ucs4 py2.5-linux-x86_64-ucs2 py2.5-linux-x86_64-ucs4 py2.6-linux-x86_64-ucs2 py2.6-linux-x86_64-ucs4 py2.7-linux-x86_64-ucs2 py2.7-linux-x86_64-ucs4 py2.5-macosx-10.3-fat-ucs2 py2.6-macosx-10.3-fat-ucs2 py2.6-macosx-10.6-universal-ucs2 py2.7-macosx-10.3-fat-ucs2 py2.5-solaris-2.10-i86pc_32-ucs2 py2.6-solaris-2.10-i86pc_32-ucs2 py2.7-solaris-2.10-i86pc_32-ucs2 py2.5-solaris-2.10-i86pc_64-ucs2 py2.6-solaris-2.10-i86pc_64-ucs2 py2.7-solaris-2.10-i86pc_64-ucs2 py2.5-solaris-2.10-sun4u_32-ucs2 py2.6-solaris-2.10-sun4u_32-ucs2 py2.7-solaris-2.10-sun4u_32-ucs2 py2.5-solaris-2.10-sun4u_64-ucs2 py2.6-solaris-2.10-sun4u_64-ucs2 py2.7-solaris-2.10-sun4u_64-ucs2 +;ctypes = diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 eggs.ini --- a/eggs.ini +++ b/eggs.ini @@ -21,7 +21,7 @@ PyRods = 3.2.4 numpy = 1.6.0 pbs_python = 4.3.5 -psycopg2 = 2.0.13 +psycopg2 = 2.5.1 pycrypto = 2.5 pysam = 0.4.2 pysqlite = 2.5.6 @@ -38,15 +38,16 @@ boto = 2.5.2 decorator = 3.1.2 docutils = 0.7 -drmaa = 0.4b3 +drmaa = 0.6 elementtree = 1.2.6_20050316 -Fabric = 1.4.2 +Fabric = 1.7.0 GeneTrack = 2.0.0_beta_1 lrucache = 0.2 Mako = 0.4.1 nose = 0.11.1 NoseHTML = 0.4.1 NoseTestDiff = 0.1 +paramiko = 1.11.1 Parsley = 1.1 Paste = 1.7.5.1 PasteDeploy = 1.5.0 @@ -71,7 +72,7 @@ ; extra version information [tags] -psycopg2 = _8.4.2_static +psycopg2 = _9.2.4_static pysqlite = _3.6.17_static MySQL_python = _5.1.41_static bx_python = _7b95ff194725 @@ -82,7 +83,7 @@ ; the wiki page above [source] MySQL_python = mysql-5.1.41 -psycopg2 = postgresql-8.4.2 +psycopg2 = postgresql-9.2.4 pysqlite = sqlite-amalgamation-3_6_17 [dependencies] diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/config.py --- a/lib/galaxy/config.py +++ b/lib/galaxy/config.py @@ -63,7 +63,7 @@ elif 'tool_config_files' in kwargs: tcf = kwargs[ 'tool_config_files' ] else: - tcf = 'tool_conf.xml' + tcf = 'tool_conf.xml,shed_tool_conf.xml' self.tool_filters = listify( kwargs.get( "tool_filters", [] ) ) self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ) ) self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ) ) @@ -132,6 +132,21 @@ self.admin_users = kwargs.get( "admin_users", "" ) self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join@bx.psu.edu") self.error_email_to = kwargs.get( 'error_email_to', None ) + self.admin_email = kwargs.get( 'admin_email', None ) + self.user_activation_on = kwargs.get( 'user_activation_on', None ) + self.activation_grace_period = kwargs.get( 'activation_grace_period', None ) + self.inactivity_box_content = kwargs.get( 'inactivity_box_content', None ) + self.registration_warning_message = kwargs.get( 'registration_warning_message', None ) + # Get the disposable email domains blacklist file and its contents + self.blacklist_location = kwargs.get( 'blacklist_file', None ) + self.blacklist_content = None + if self.blacklist_location is not None: + self.blacklist_file = resolve_path( kwargs.get( 'blacklist_file', None ), self.root ) + try: + with open(self.blacklist_file) as blacklist: + self.blacklist_content = [ line.rstrip() for line in blacklist.readlines() ] + except IOError: + print ( "CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str( self.blacklist_file ) ) self.smtp_server = kwargs.get( 'smtp_server', None ) self.smtp_username = kwargs.get( 'smtp_username', None ) self.smtp_password = kwargs.get( 'smtp_password', None ) diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/jobs/handler.py --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -84,12 +84,25 @@ Checks all jobs that are in the 'new', 'queued' or 'running' state in the database and requeues or cleans up as necessary. Only run as the job handler starts. + In case the activation is enforced it will filter out the jobs of inactive users. """ - for job in self.sa_session.query( model.Job ).enable_eagerloads( False ) \ + jobs_at_startup = [] + if self.app.config.user_activation_on: + jobs_at_startup = self.sa_session.query( model.Job ).enable_eagerloads( False ) \ + .outerjoin( model.User ) \ .filter( ( ( model.Job.state == model.Job.states.NEW ) \ | ( model.Job.state == model.Job.states.RUNNING ) \ | ( model.Job.state == model.Job.states.QUEUED ) ) \ - & ( model.Job.handler == self.app.config.server_name ) ): + & ( model.Job.handler == self.app.config.server_name ) \ + & or_( ( model.Job.user_id == None ),( model.User.active == True ) ) ).all() + else: + jobs_at_startup = self.sa_session.query( model.Job ).enable_eagerloads( False ) \ + .filter( ( ( model.Job.state == model.Job.states.NEW ) \ + | ( model.Job.state == model.Job.states.RUNNING ) \ + | ( model.Job.state == model.Job.states.QUEUED ) ) \ + & ( model.Job.handler == self.app.config.server_name ) ).all() + + for job in jobs_at_startup: if job.tool_id not in self.app.toolbox.tools_by_id: log.warning( "(%s) Tool '%s' removed from tool config, unable to recover job" % ( job.id, job.tool_id ) ) JobWrapper( job, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator.' ) @@ -146,8 +159,9 @@ over all new and waiting jobs to check the state of the jobs each depends on. If the job has dependencies that have not finished, it it goes to the waiting queue. If the job has dependencies with errors, - it is marked as having errors and removed from the queue. Otherwise, - the job is dispatched. + it is marked as having errors and removed from the queue. If the job + belongs to an inactive user it is ignored. + Otherwise, the job is dispatched. """ # Pull all new jobs from the queue at once jobs_to_check = [] @@ -173,7 +187,17 @@ (model.LibraryDatasetDatasetAssociation.deleted == True), (model.Dataset.state != model.Dataset.states.OK), (model.Dataset.deleted == True)))).subquery() - jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \ + if self.app.config.user_activation_on: + jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \ + .outerjoin( model.User ) \ + .filter(and_((model.Job.state == model.Job.states.NEW), + or_((model.Job.user_id == None),(model.User.active == True)), + (model.Job.handler == self.app.config.server_name), + ~model.Job.table.c.id.in_(hda_not_ready), + ~model.Job.table.c.id.in_(ldda_not_ready))) \ + .order_by(model.Job.id).all() + else: + jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \ .filter(and_((model.Job.state == model.Job.states.NEW), (model.Job.handler == self.app.config.server_name), ~model.Job.table.c.id.in_(hda_not_ready), diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -79,6 +79,8 @@ self.external = False self.deleted = False self.purged = False + self.active = False + self.activation_token = None self.username = None # Relationships self.histories = [] @@ -1681,7 +1683,7 @@ rval += child.get_disk_usage( user ) return rval - def to_dict( self, view='collection' ): + def to_dict( self, view='collection', expose_dataset_path=False ): """ Return attributes of this HDA that are exposed using the API. """ @@ -1714,6 +1716,9 @@ for name, spec in hda.metadata.spec.items(): val = hda.metadata.get( name ) if isinstance( val, MetadataFile ): + # only when explicitly set: fetching filepaths can be expensive + if not expose_dataset_path: + continue val = val.file_name # If no value for metadata, look in datatype for metadata. elif val == None and hasattr( hda.datatype, name ): @@ -2324,9 +2329,12 @@ self.id = None self.user = None + class StoredWorkflow( object, Dictifiable): + dict_collection_visible_keys = ( 'id', 'name', 'published' ) dict_element_visible_keys = ( 'id', 'name', 'published' ) + def __init__( self ): self.id = None self.user = None @@ -2354,7 +2362,11 @@ return rval -class Workflow( object ): +class Workflow( object, Dictifiable ): + + dict_collection_visible_keys = ( 'name', 'has_cycles', 'has_errors' ) + dict_element_visible_keys = ( 'name', 'has_cycles', 'has_errors' ) + def __init__( self ): self.user = None self.name = None @@ -2362,7 +2374,9 @@ self.has_errors = None self.steps = [] + class WorkflowStep( object ): + def __init__( self ): self.id = None self.type = None @@ -2373,36 +2387,48 @@ self.input_connections = [] self.config = None + class WorkflowStepConnection( object ): + def __init__( self ): self.output_step_id = None self.output_name = None self.input_step_id = None self.input_name = None + class WorkflowOutput(object): + def __init__( self, workflow_step, output_name): self.workflow_step = workflow_step self.output_name = output_name + class StoredWorkflowUserShareAssociation( object ): + def __init__( self ): self.stored_workflow = None self.user = None + class StoredWorkflowMenuEntry( object ): + def __init__( self ): self.stored_workflow = None self.user = None self.order_index = None + class WorkflowInvocation( object ): pass + class WorkflowInvocationStep( object ): pass + class MetadataFile( object ): + def __init__( self, dataset = None, name = None ): if isinstance( dataset, HistoryDatasetAssociation ): self.history_dataset = dataset diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -52,7 +52,9 @@ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ), Column( "deleted", Boolean, index=True, default=False ), Column( "purged", Boolean, index=True, default=False ), - Column( "disk_usage", Numeric( 15, 0 ), index=True ) ) + Column( "disk_usage", Numeric( 15, 0 ), index=True ) , + Column( "active", Boolean, index=True, default=True, nullable=False ), + Column( "activation_token", TrimmedString( 64 ), nullable=True, index=True ) ) model.UserAddress.table = Table( "user_address", metadata, Column( "id", Integer, primary_key=True), diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/model/migrate/versions/0117_add_user_activation.py --- /dev/null +++ b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py @@ -0,0 +1,57 @@ +''' +Created on Sep 10, 2013 + +@author: marten + +Adds 'active' and 'activation_token' columns to the galaxy_user table. +''' + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +from galaxy.model.custom_types import TrimmedString + +import logging +log = logging.getLogger( __name__ ) + +user_active_column = Column( "active", Boolean, default=True, nullable=True ) +user_activation_token_column = Column( "activation_token", TrimmedString( 64 ), nullable=True ) + + +def display_migration_details(): + print "" + print "This migration script adds active and activation_token columns to the user table" + +def upgrade(migrate_engine): + print __doc__ + metadata = MetaData() + metadata.bind = migrate_engine + metadata.reflect() + + # Add the active and activation_token columns to the user table in one try because the depend on each other. + try: + user_table = Table( "galaxy_user", metadata, autoload=True ) + user_active_column.create( table = user_table , populate_default = True) + user_activation_token_column.create( table = user_table ) + assert user_active_column is user_table.c.active + assert user_activation_token_column is user_table.c.activation_token + except Exception, e: + print str(e) + log.error( "Adding columns 'active' and 'activation_token' to galaxy_user table failed: %s" % str( e ) ) + return + +def downgrade(migrate_engine): + metadata = MetaData() + metadata.bind = migrate_engine + metadata.reflect() + + # Drop the user table's active and activation_token columns in one try because the depend on each other. + try: + user_table = Table( "galaxy_user", metadata, autoload=True ) + user_active = user_table.c.active + user_activation_token = user_table.c.activation_token + user_active.drop() + user_activation_token.drop() + except Exception, e: + log.debug( "Dropping 'active' and 'activation_token' columns from galaxy_user table failed: %s" % ( str( e ) ) ) \ No newline at end of file diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/security/validate_user_input.py --- a/lib/galaxy/security/validate_user_input.py +++ b/lib/galaxy/security/validate_user_input.py @@ -2,18 +2,27 @@ VALID_PUBLICNAME_RE = re.compile( "^[a-z0-9\-]+$" ) VALID_PUBLICNAME_SUB = re.compile( "[^a-z0-9\-]" ) +# Basic regular expression to check email validity. +VALID_EMAIL_RE = re.compile( "[^@]+@[^@]+\.[^@]+" ) FILL_CHAR = '-' def validate_email( trans, email, user=None, check_dup=True ): + """ + Validates the email format, also checks whether the domain is blacklisted in the disposable domains configuration. + """ message = '' if user and user.email == email: return message - if len( email ) == 0 or "@" not in email or "." not in email: - message = "Enter a real email address" + if not( VALID_EMAIL_RE.match( email ) ): + message = "Please enter your real email address." elif len( email ) > 255: - message = "Email address exceeds maximum allowable length" + message = "Email address exceeds maximum allowable length." elif check_dup and trans.sa_session.query( trans.app.model.User ).filter_by( email=email ).first(): - message = "User with that email already exists" + message = "User with that email already exists." + # If the blacklist is not empty filter out the disposable domains. + elif trans.app.config.blacklist_content is not None: + if email.split('@')[1] in trans.app.config.blacklist_content: + message = "Please enter your permanent email address." return message def validate_publicname( trans, publicname, user=None ): diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/web/base/controller.py --- a/lib/galaxy/web/base/controller.py +++ b/lib/galaxy/web/base/controller.py @@ -583,7 +583,8 @@ """ #precondition: the user's access to this hda has already been checked #TODO:?? postcondition: all ids are encoded (is this really what we want at this level?) - hda_dict = hda.to_dict( view='element' ) + expose_dataset_path = trans.user_is_admin() or trans.app.config.expose_dataset_path + hda_dict = hda.to_dict( view='element', expose_dataset_path=expose_dataset_path ) hda_dict[ 'api_type' ] = "file" # Add additional attributes that depend on trans can hence must be added here rather than at the model level. @@ -599,7 +600,7 @@ #TODO: to_dict should really go AFTER this - only summary data return trans.security.encode_dict_ids( hda_dict ) - if trans.user_is_admin() or trans.app.config.expose_dataset_path: + if expose_dataset_path: hda_dict[ 'file_name' ] = hda.file_name hda_dict[ 'download_url' ] = url_for( 'history_contents_display', diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -10,6 +10,7 @@ from tool_shed.galaxy_install import repository_util from tool_shed.util import common_util from tool_shed.util import encoding_util +from tool_shed.util import workflow_util import tool_shed.util.shed_util_common as suc log = logging.getLogger( __name__ ) @@ -33,6 +34,102 @@ """RESTful controller for interactions with tool shed repositories.""" @web.expose_api + def exported_workflows( self, trans, id, **kwd ): + """ + GET /api/tool_shed_repositories/{encoded_tool_shed_repository_id}/exported_workflows + + Display a list of dictionaries containing information about this tool shed repository's exported workflows. + + :param id: the encoded id of the ToolShedRepository object + """ + # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_w... + # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the + # list based on the following dictionary of those few attributes. + exported_workflows = [] + repository = suc.get_tool_shed_repository_by_id( trans, id ) + metadata = repository.metadata + if metadata: + exported_workflow_tups = metadata.get( 'workflows', [] ) + else: + exported_workflow_tups = [] + for index, exported_workflow_tup in enumerate( exported_workflow_tups ): + # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location + # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located. + exported_workflow_dict = exported_workflow_tup[ 1 ] + annotation = exported_workflow_dict.get( 'annotation', '' ) + format_version = exported_workflow_dict.get( 'format-version', '' ) + workflow_name = exported_workflow_dict.get( 'name', '' ) + # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. + display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name ) + exported_workflows.append( display_dict ) + return exported_workflows + + @web.expose_api + def import_workflow( self, trans, payload, **kwd ): + """ + POST /api/tool_shed_repositories/import_workflow + + Import the specified exported workflow contained in the specified installed tool shed repository into Galaxy. + + :param key: the API key of the Galaxy user with which the imported workflow will be associated. + :param id: the encoded id of the ToolShedRepository object + + The following parameters are included in the payload. + :param index: the index location of the workflow tuple in the list of exported workflows stored in the metadata for the specified repository + """ + api_key = kwd.get( 'key', None ) + if api_key is None: + raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." ) + tool_shed_repository_id = kwd.get( 'id', '' ) + if not tool_shed_repository_id: + raise HTTPBadRequest( detail="Missing required parameter 'id'." ) + index = payload.get( 'index', None ) + if index is None: + raise HTTPBadRequest( detail="Missing required parameter 'index'." ) + repository = suc.get_tool_shed_repository_by_id( trans, tool_shed_repository_id ) + exported_workflows = json.from_json_string( self.exported_workflows( trans, tool_shed_repository_id ) ) + # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. + exported_workflow = exported_workflows[ int( index ) ] + workflow_name = exported_workflow[ 'workflow_name' ] + workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + if status == 'error': + log.error( message, exc_info=True ) + trans.response.status = 500 + return message + else: + return workflow.to_dict( view='element' ) + + @web.expose_api + def import_workflows( self, trans, **kwd ): + """ + POST /api/tool_shed_repositories/import_workflow + + Import all of the exported workflows contained in the specified installed tool shed repository into Galaxy. + + :param key: the API key of the Galaxy user with which the imported workflows will be associated. + :param id: the encoded id of the ToolShedRepository object + """ + api_key = kwd.get( 'key', None ) + if api_key is None: + raise HTTPBadRequest( detail="Missing required parameter 'key' whose value is the API key for the Galaxy user importing the specified workflow." ) + tool_shed_repository_id = kwd.get( 'id', '' ) + if not tool_shed_repository_id: + raise HTTPBadRequest( detail="Missing required parameter 'id'." ) + repository = suc.get_tool_shed_repository_by_id( trans, tool_shed_repository_id ) + exported_workflows = json.from_json_string( self.exported_workflows( trans, tool_shed_repository_id ) ) + imported_workflow_dicts = [] + for exported_workflow_dict in exported_workflows: + workflow_name = exported_workflow_dict[ 'workflow_name' ] + workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + if status == 'error': + log.error( message, exc_info=True ) + trans.response.status = 500 + return message + else: + imported_workflow_dicts.append( workflow.to_dict( view='element' ) ) + return imported_workflow_dicts + + @web.expose_api def index( self, trans, **kwd ): """ GET /api/tool_shed_repositories @@ -58,28 +155,6 @@ return message @web.expose_api - def show( self, trans, id, **kwd ): - """ - GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id} - Display a dictionary containing information about a specified tool_shed_repository. - - :param id: the encoded id of the ToolShedRepository object - """ - # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e - try: - tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) - tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) - tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', - action='show', - id=trans.security.encode_id( tool_shed_repository.id ) ) - return tool_shed_repository_dict - except Exception, e: - message = "Error in tool_shed_repositories API in index: " + str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message - - @web.expose_api def install_repository_revision( self, trans, payload, **kwd ): """ POST /api/tool_shed_repositories/install_repository_revision @@ -412,3 +487,25 @@ tool_shed_repositories.append( repository_dict ) # Display the list of repaired repositories. return tool_shed_repositories + + @web.expose_api + def show( self, trans, id, **kwd ): + """ + GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id} + Display a dictionary containing information about a specified tool_shed_repository. + + :param id: the encoded id of the ToolShedRepository object + """ + # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e + try: + tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) + tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) + tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', + action='show', + id=trans.security.encode_id( tool_shed_repository.id ) ) + return tool_shed_repository_dict + except Exception, e: + message = "Error in tool_shed_repositories API in index: " + str( e ) + log.error( message, exc_info=True ) + trans.response.status = 500 + return message diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/webapps/galaxy/buildapp.py --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -51,6 +51,8 @@ webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app ) # Force /history to go to /root/history -- needed since the tests assume this webapp.add_route( '/history', controller='root', action='history' ) + # Force /activate to go to the controller + webapp.add_route( '/activate', controller='user', action='activate' ) # These two routes handle our simple needs at the moment webapp.add_route( '/async/:tool_id/:data_id/:data_secret', controller='async', action='index', tool_id=None, data_id=None, data_secret=None ) webapp.add_route( '/:controller/:action', action='index' ) @@ -176,7 +178,10 @@ # Galaxy API for tool shed features. webapp.mapper.resource( 'tool_shed_repository', 'tool_shed_repositories', - member={ 'repair_repository_revision' : 'POST' }, + member={ 'repair_repository_revision' : 'POST', + 'exported_workflows' : 'GET', + 'import_workflow' : 'POST', + 'import_workflows' : 'POST' }, controller='tool_shed_repositories', name_prefix='tool_shed_repository_', path_prefix='/api', diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -358,66 +358,26 @@ @web.expose @web.require_admin def import_workflow( self, trans, workflow_name, repository_id, **kwd ): - """Import a workflow contained in an installed tool shed repository into the Galaxy instance.""" + """Import a workflow contained in an installed tool shed repository into Galaxy.""" message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) if workflow_name: workflow_name = encoding_util.tool_shed_decode( workflow_name ) - repository = suc.get_tool_shed_repository_by_id( trans, repository_id ) - changeset_revision = repository.changeset_revision - metadata = repository.metadata - workflows = metadata.get( 'workflows', [] ) - tools_metadata = metadata.get( 'tools', [] ) - workflow_dict = None - for workflow_data_tuple in workflows: - # The value of workflow_data_tuple is ( relative_path_to_workflow_file, exported_workflow_dict ). - relative_path_to_workflow_file, exported_workflow_dict = workflow_data_tuple - if exported_workflow_dict[ 'name' ] == workflow_name: - # If the exported workflow is available on disk, import it. - if os.path.exists( relative_path_to_workflow_file ): - workflow_file = open( relative_path_to_workflow_file, 'rb' ) - workflow_data = workflow_file.read() - workflow_file.close() - workflow_dict = json.from_json_string( workflow_data ) + repository = suc.get_tool_shed_repository_by_id( trans, repository_id ) + if repository: + workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + if workflow: + workflow_name = encoding_util.tool_shed_encode( str( workflow.name ) ) else: - # Use the current exported_workflow_dict. - workflow_dict = exported_workflow_dict - break - if workflow_dict: - # Create workflow if possible. - workflow, missing_tool_tups = workflow_util.get_workflow_from_dict( trans=trans, - workflow_dict=workflow_dict, - tools_metadata=tools_metadata, - repository_id=repository_id, - changeset_revision=changeset_revision ) - # Save the workflow in the Galaxy database. - # Pass workflow_dict along to create annotation at this point - stored_workflow = workflow_util.save_workflow( trans, workflow, workflow_dict ) - # Use the latest version of the saved workflow. - workflow = stored_workflow.latest_workflow - if workflow_name: - workflow.name = workflow_name - # Provide user feedback and show workflow list. - if workflow.has_errors: - message += "Imported, but some steps in this workflow have validation errors. " - status = "error" - if workflow.has_cycles: - message += "Imported, but this workflow contains cycles. " - status = "error" + message += 'Unable to locate a workflow named <b>%s</b> within the installed tool shed repository named <b>%s</b>' % \ + ( str( workflow_name ), str( repository.name ) ) + status = 'error' else: - message += "Workflow <b>%s</b> imported successfully. " % workflow.name - if missing_tool_tups: - # TODO: rework this since it is used in the tool shed, but shoudn't be used in Galaxy. - name_and_id_str = '' - for missing_tool_tup in missing_tool_tups: - tool_id, tool_name, other = missing_tool_tup - name_and_id_str += 'name: %s, id: %s' % ( str( tool_id ), str( tool_name ) ) - log.debug( "The following tools required by this workflow are missing from this Galaxy instance: %s" % name_and_id_str ) + message = 'Invalid repository id <b>%s</b> received.' % str( repository_id ) + status = 'error' else: - message += 'The workflow named %s is not included in the metadata for revision %s of repository %s' % \ - ( str( workflow_name ), str( changeset_revision ), str( repository.name ) ) + message = 'The value of workflow_name is required, but was not received.' status = 'error' - workflow_name = encoding_util.tool_shed_encode( workflow.name ), return trans.response.send_redirect( web.url_for( controller='admin_toolshed', action='view_workflow', workflow_name=workflow_name, diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/webapps/galaxy/controllers/user.py --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -8,6 +8,7 @@ import socket import string import random +import urllib from galaxy import web from galaxy import util, model from galaxy.model.orm import and_ @@ -17,6 +18,8 @@ from galaxy.web.base.controller import BaseUIController, UsesFormDefinitionsMixin from galaxy.web.form_builder import CheckboxField, build_select_field from galaxy.web.framework.helpers import time_ago, grids +from datetime import datetime, timedelta +from galaxy.util import hash_util log = logging.getLogger( __name__ ) @@ -147,7 +150,14 @@ if trans.user: if user_openid.user and user_openid.user.id != trans.user.id: message = "The OpenID <strong>%s</strong> is already associated with another Galaxy account, <strong>%s</strong>. Please disassociate it from that account before attempting to associate it with a new account." % ( display_identifier, user_openid.user.email ) - status = "error" + if not trans.user.active and trans.app.config.user_activation_on: # Account activation is ON and the user is INACTIVE. + if ( trans.app.config.activation_grace_period != 0 ): # grace period is ON + if self.is_outside_grace_period( trans, trans.user.create_time ): # User is outside the grace period. Login is disabled and he will have the activation email resent. + message = self.resend_verification_email( trans, trans.user.email ) + else: # User is within the grace period, let him log in. + pass + else: # Grace period is off. Login is disabled and user will have the activation email resent. + message = self.resend_verification_email( trans, trans.user.email ) elif not user_openid.user or user_openid.user == trans.user: if openid_provider_obj.id: user_openid.provider = openid_provider_obj.id @@ -282,7 +292,7 @@ subscribe_checked = CheckboxField.is_checked( subscribe ) error = '' if not trans.app.config.allow_user_creation and not trans.user_is_admin(): - error = 'User registration is disabled. Please contact your Galaxy administrator for an account.' + error = 'User registration is disabled. Please contact your local Galaxy administrator for an account.' else: # Check email and password validity error = self.__validate( trans, params, email, password, confirm, username ) @@ -465,9 +475,13 @@ openid_providers=trans.app.openid_providers, form_input_auto_focus=True, active_view="user" ) + def __validate_login( self, trans, **kwd ): + """ + Function validates numerous cases that might happen during the login time. + """ message = kwd.get( 'message', '' ) - status = kwd.get( 'status', 'done' ) + status = kwd.get( 'status', 'error' ) email = kwd.get( 'email', '' ) password = kwd.get( 'password', '' ) redirect = kwd.get( 'redirect', trans.request.referer ).strip() @@ -475,26 +489,68 @@ user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first() if not user: message = "No such user (please note that login is case sensitive)" - status = 'error' elif user.deleted: - message = "This account has been marked deleted, contact your Galaxy administrator to restore the account." - status = 'error' + message = "This account has been marked deleted, contact your local Galaxy administrator to restore the account." + if trans.app.config.admin_email is not None: + message += 'Contact: %s' % trans.app.config.admin_email elif user.external: message = "This account was created for use with an external authentication method, contact your local Galaxy administrator to activate it." - status = 'error' + if trans.app.config.admin_email is not None: + message += 'Contact: %s' % trans.app.config.admin_email elif not user.check_password( password ): message = "Invalid password" - status = 'error' + elif trans.app.config.user_activation_on and not user.active: # activation is ON and the user is INACTIVE + if ( trans.app.config.activation_grace_period != 0 ): # grace period is ON + if self.is_outside_grace_period( trans, user.create_time ): # User is outside the grace period. Login is disabled and he will have the activation email resent. + message = self.resend_verification_email( trans, email ) + else: # User is within the grace period, let him log in. + message, success, status = self.proceed_login( trans, user, redirect ) + else: # Grace period is off. Login is disabled and user will have the activation email resent. + message = self.resend_verification_email( trans, email ) + else: # activation is OFF + message, success, status = self.proceed_login( trans, user, redirect ) + return ( message, status, user, success ) + + def proceed_login ( self, trans, user, redirect ): + """ + Function processes user login. It is called in case all the login requirements are valid. + """ + trans.handle_user_login( user ) + if trans.webapp.name == 'galaxy': + trans.log_event( "User logged in" ) + message = 'You are now logged in as %s.<br>You can <a target="_top" href="%s">go back to the page you were visiting</a> or <a target="_top" href="%s">go to the home page</a>.' % \ + ( user.email, redirect, url_for( '/' ) ) + if trans.app.config.require_login: + message += ' <a target="_top" href="%s">Click here</a> to continue to the home page.' % web.url_for( controller="root", action="welcome" ) + success = True + status = 'done' + return message, success, status + + def resend_verification_email( self, trans, email ): + """ + Function resends the verification email in case user wants to log in with an inactive account. + """ + is_activation_sent = self.send_verification_email( trans, email ) + if is_activation_sent: + message = 'This account has not been activated yet. The activation link has been sent again. Please check your email address %s.<br>' % email else: - trans.handle_user_login( user ) - if trans.webapp.name == 'galaxy': - trans.log_event( "User logged in" ) - message = 'You are now logged in as %s.<br>You can <a target="_top" href="%s">go back to the page you were visiting</a> or <a target="_top" href="%s">go to the home page</a>.' % \ - ( user.email, redirect, url_for( '/' ) ) - if trans.app.config.require_login: - message += ' <a target="_top" href="%s">Click here</a> to continue to the home page.' % web.url_for( controller="root", action="welcome" ) - success = True - return ( message, status, user, success ) + message = 'This account has not been activated yet but we are unable to send the activation link. Please contact your local Galaxy administrator.' + if trans.app.config.admin_email is not None: + message += 'Contact: %s' % trans.app.config.admin_email + return message + + def is_outside_grace_period ( self, trans, create_time ): + """ + Function checks whether the user is outside the config-defined grace period for inactive accounts. + """ + # Activation is forced and the user is not active yet. Check the grace period. + activation_grace_period = trans.app.config.activation_grace_period + # Default value is 3 hours. + if activation_grace_period is None: + activation_grace_period = 3 + delta = timedelta( hours = int( activation_grace_period ) ) + time_difference = datetime.utcnow() - create_time + return ( time_difference > delta or activation_grace_period == 0 ) @web.expose def logout( self, trans, logout_all=False ): @@ -533,7 +589,9 @@ redirect = kwd.get( 'redirect', trans.request.referer ).strip() is_admin = cntrller == 'admin' and trans.user_is_admin if not trans.app.config.allow_user_creation and not trans.user_is_admin(): - message = 'User registration is disabled. Please contact your Galaxy administrator for an account.' + message = 'User registration is disabled. Please contact your local Galaxy administrator for an account.' + if trans.app.config.admin_email is not None: + message += 'Contact: %s' % trans.app.config.admin_email status = 'error' else: if not refresh_frames: @@ -582,6 +640,8 @@ user_type_fd_id_select_field = None user_type_form_definition = None widgets = [] + # Warning message that is shown on the registration page. + registration_warning_message = trans.app.config.registration_warning_message return trans.fill_template( '/user/register.mako', cntrller=cntrller, email=email, @@ -594,6 +654,7 @@ redirect=redirect, redirect_url=redirect_url, refresh_frames=refresh_frames, + registration_warning_message=registration_warning_message, message=message, status=status ) @@ -606,6 +667,8 @@ user = trans.app.model.User( email=email ) user.set_password_cleartext( password ) user.username = username + if trans.app.config.user_activation_on: # Do not set the active flag in case activation is OFF. + user.active = False trans.sa_session.add( user ) trans.sa_session.flush() trans.app.security_agent.create_private_user_role( user ) @@ -633,7 +696,7 @@ if subscribe_checked: # subscribe user to email list if trans.app.config.smtp_server is None: - error = "Now logged in as " + user.email + ". However, subscribing to the mailing list has failed because mail is not configured for this Galaxy instance." + error = "Now logged in as " + user.email + ". However, subscribing to the mailing list has failed because mail is not configured for this Galaxy instance. <br>Please contact your local Galaxy administrator." else: body = 'Join Mailing list.\n' to = trans.app.config.mailing_join_addr @@ -659,9 +722,86 @@ status = 'error' success = False else: - message = 'Now logged in as %s.<br><a target="_top" href="%s">Return to the home page.</a>' % ( user.email, url_for( '/' ) ) - success = True + is_activation_sent = self.send_verification_email( trans, email ) + if is_activation_sent: + message = 'Now logged in as %s.<br>Verification email has been sent to your email address. Please verify it by clicking the activation link in the email.<br><a target="_top" href="%s">Return to the home page.</a>' % ( user.email, url_for( '/' ) ) + success = True + else: + message = 'Unable to send activation email, please contact your local Galaxy administrator.' + if trans.app.config.admin_email is not None: + message += 'Contact: %s' % trans.app.config.admin_email + success = False return ( message, status, user, success ) + + def send_verification_email( self, trans, email ): + """ + Send the verification email containing the activation link to the user's email. + """ + activation_link = self.prepare_activation_link( trans, email ) + + body = ("Hi %s,\n\n" + "Please click the activation link below in order to activate your account.\n\n" + "Activation link: %s \n\n" + "Your Galaxy Team" % ( email, activation_link )) + to = email + frm = trans.app.config.admin_email + subject = 'How to activate your Galaxy account' + try: + util.send_mail( frm, to, subject, body, trans.app.config ) + return True + except: + return False + + def prepare_activation_link( self, trans, email ): + """ + Prepares the account activation link for the user. + """ + activation_token = self.get_activation_token( trans, email ) + host = trans.request.host.split( ':' )[ 0 ] + if host == 'localhost': + host = socket.getfqdn() + activation_link = str( trans.request.host ) + url_for( controller='user', action='activate' ) + "?activation_token=" + str( activation_token ) + "&email=" + urllib.quote( email ) + return activation_link + + def get_activation_token ( self, trans, email ): + """ + Checks for the activation token. Creates new activation token and stores it in the database if none found. + """ + user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email == email ).first() + activation_token = user.activation_token + if activation_token is None: + activation_token = hash_util.new_secure_hash( str( random.getrandbits( 256 ) ) ) + user.activation_token = activation_token + trans.sa_session.add( user ) + trans.sa_session.flush() + return activation_token + + @web.expose + def activate( self, trans, **kwd ): + """ + Function checks whether token fits the user and then activates the user's account. + """ + params = util.Params( kwd, sanitize=False ) + email = urllib.unquote( params.get( 'email', None ) ) + activation_token = params.get( 'activation_token', None ) + + if email is None or activation_token is None: + # We don't have the email or activation_token, show error. + return trans.show_error_message( "You are using wrong activation link. Try to log-in and we will send you a new activation email.<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller="root", action="index" ) + else: + # Find the user + user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first() + if user.activation_token == activation_token: + user.activation_token = None + user.active = True + trans.sa_session.add(user) + trans.sa_session.flush() + return trans.show_ok_message( "Your account has been successfully activated!<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' ) + else: + # Tokens don't match. Activation is denied. + return trans.show_error_message( "You are using wrong activation link. Try to log in and we will send you a new activation email.<br><a href='%s'>Go to login page.</a>" ) % web.url_for( controller='root', action='index' ) + return + def __get_user_type_form_definition( self, trans, user=None, **kwd ): params = util.Params( kwd ) if user and user.values: @@ -885,7 +1025,7 @@ @web.expose def reset_password( self, trans, email=None, **kwd ): if trans.app.config.smtp_server is None: - return trans.show_error_message( "Mail is not configured for this Galaxy instance. Please contact an administrator." ) + return trans.show_error_message( "Mail is not configured for this Galaxy instance. Please contact your local Galaxy administrator." ) message = util.restore_text( kwd.get( 'message', '' ) ) status = 'done' if kwd.get( 'reset_password_button', False ): @@ -1042,7 +1182,7 @@ phone = util.restore_text( params.get( 'phone', '' ) ) ok = True if not trans.app.config.allow_user_creation and not is_admin: - return trans.show_error_message( 'User registration is disabled. Please contact your Galaxy administrator for an account.' ) + return trans.show_error_message( 'User registration is disabled. Please contact your local Galaxy administrator for an account.' ) if params.get( 'new_address_button', False ): if not short_desc: ok = False diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -416,7 +416,8 @@ repository_id = kwd.get( 'id', None ) if repository_id: repository = suc.get_repository_in_tool_shed( trans, repository_id ) - kwd[ 'user_id' ] = trans.security.encode_id( repository.user.id ) + user_id = trans.security.encode_id( repository.user.id ) + kwd[ 'user_id' ] = user_id else: # The user selected a repository revision which results in a refresh_on_change. selected_changeset_revision, repository = suc.get_repository_from_refresh_on_change( trans, **kwd ) @@ -973,9 +974,9 @@ flush_needed = True if flush_needed: trans.sa_session.flush() - message = "Repository '%s' has been created." % repository.name + message = "Repository <b>%s</b> has been created." % str( repository.name ) trans.response.send_redirect( web.url_for( controller='repository', - action='view_repository', + action='manage_repository', message=message, id=trans.security.encode_id( repository.id ) ) ) repository_type_select_field = rt_util.build_repository_type_select_field( trans ) diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -160,8 +160,15 @@ if rd_key in [ 'root_key', 'description' ]: continue for rd_tup in rd_tups: - rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = \ - common_util.parse_repository_dependency_tuple( rd_tup ) + if len( rd_tup ) == 4: + rd_tool_shed, rd_name, rd_owner, rd_changeset_revision = \ + common_util.parse_repository_dependency_tuple( rd_tup ) + elif len( rd_tup ) == 5: + rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required = \ + common_util.parse_repository_dependency_tuple( rd_tup ) + elif len( rd_tup ) == 6: + rd_tool_shed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, only_if_compiling_contained_td = \ + common_util.parse_repository_dependency_tuple( rd_tup ) # TODO: Make sure the repository description is applied to the new repository record during installation. tool_shed_repository = self.create_or_update_tool_shed_repository_record( rd_name, rd_owner, rd_changeset_revision, description=None ) if tool_shed_repository: diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/galaxy_install/repository_util.py --- a/lib/tool_shed/galaxy_install/repository_util.py +++ b/lib/tool_shed/galaxy_install/repository_util.py @@ -100,19 +100,14 @@ continue # rd_key is something like: 'http://localhost:9009__ESEP__package_rdkit_2012_12__ESEP__test__ESEP__d635ff...' # rd_val is something like: [['http://localhost:9009', 'package_numpy_1_7', 'test', 'cddd64ecd985', 'True']] - try: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - container_util.get_components_from_key( rd_key ) - except: - tool_shed, name, owner, changeset_revision = container_util.get_components_from_key( rd_val ) + repository_components_tuple = container_util.get_components_from_key( rd_key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + tool_shed, name, owner, changeset_revision = components_list[ 0:4 ] installed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed, name, owner, changeset_revision ) if installed_repository not in installed_repositories: installed_repositories.append( installed_repository ) for rd_val in rd_vals: - try: - tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = rd_val - except: - tool_shed, name, owner, changeset_revision = rd_val + tool_shed, name, owner, changeset_revision = rd_val[ 0:4 ] installed_repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed, name, owner, changeset_revision ) if installed_repository not in installed_repositories: installed_repositories.append( installed_repository ) @@ -631,13 +626,10 @@ # Change the folder id so it won't confict with others being merged. old_container_repository_dependencies_folder.id = folder_id folder_id += 1 + repository_components_tuple = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + name = components_list[ 1 ] # Generate the label by retrieving the repository name. - try: - toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release. - toolshed, name, owner, changeset_revision = container_util.get_components_from_key( old_container_repository_dependencies_folder.key ) old_container_repository_dependencies_folder.label = str( name ) repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder ) # Merge tool_dependencies. diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -352,19 +352,20 @@ for key, val in repository_dependencies.items(): if key in [ 'root_key', 'description' ]: continue - try: - toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - container_util.get_components_from_key( key ) - components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ] - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required and only_if_compiling_contained_td - # to False in the caller. - toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key ) - components_list = [ toolshed, name, owner, changeset_revision ] - only_if_compiling_contained_td = 'False' + repository_components_tuple = container_util.get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) # Skip listing a repository dependency if it is required only to compile a tool dependency defined for the dependent repository since # in this case, the repository dependency is really a dependency of the dependent repository's contained tool dependency, and only if # that tool dependency requires compilation. + # For backward compatibility to the 12/20/12 Galaxy release. + prior_installation_required = 'False' + only_if_compiling_contained_td = 'False' + if len( components_list ) == 4: + prior_installation_required = 'False' + only_if_compiling_contained_td = 'False' + elif len( components_list ) == 5: + prior_installation_required = components_list[ 4 ] + only_if_compiling_contained_td = 'False' if not util.asbool( only_if_compiling_contained_td ): if components_list not in required_repository_tups: required_repository_tups.append( components_list ) diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/container_util.py --- a/lib/tool_shed/util/container_util.py +++ b/lib/tool_shed/util/container_util.py @@ -855,7 +855,7 @@ folder_id, data_managers_root_folder = build_invalid_data_managers_folder( trans, folder_id, data_managers, error_messages, label="Invalid Data Managers" ) containers_dict[ 'invalid_data_managers' ] = data_managers_root_folder except Exception, e: - log.debug( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) ) + log.exception( "Exception in build_repository_containers_for_tool_shed: %s" % str( e ) ) finally: lock.release() return containers_dict @@ -1325,29 +1325,31 @@ repository_name = items[ 1 ] repository_owner = items[ 2 ] changeset_revision = items[ 3 ] - if len( items ) >= 5: - try: - prior_installation_required = items[ 4 ] - except: - prior_installation_required = 'False' - try: - only_if_compiling_contained_td = items[ 5 ] - except: - only_if_compiling_contained_td = 'False' + if len( items ) == 5: + prior_installation_required = items[ 4 ] + return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required + elif len( items ) == 6: + prior_installation_required = items[ 4 ] + only_if_compiling_contained_td = items[ 5 ] return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td else: # For backward compatibility to the 12/20/12 Galaxy release we have to return the following, and callers must handle exceptions. return toolshed_base_url, repository_name, repository_owner, changeset_revision def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ): - try: - toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - get_components_from_key( rd_key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required and only_if_compiling_contained_td to 'False'. - toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key ) + repository_components_tuple = get_components_from_key( rd_key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + toolshed, repository_name, repository_owner, changeset_revision = components_list[ 0:4 ] + # For backward compatibility to the 12/20/12 Galaxy release. + if len( components_list ) == 4: prior_installation_required = 'False' only_if_compiling_contained_td = 'False' + elif len( components_list ) == 5: + prior_installation_required = components_list[ 4 ] + only_if_compiling_contained_td = 'False' + elif len( components_list ) == 6: + prior_installation_required = components_list[ 4 ] + only_if_compiling_contained_td = components_list[ 5 ] folder = get_folder( repository_dependencies_folder, rd_key ) label = generate_repository_dependencies_folder_label_from_key( repository_name, repository_owner, @@ -1416,14 +1418,19 @@ return False def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, key ): - try: - toolshed_base_url, key_name, key_owner, key_changeset_revision, key_prior_installation_required, key_only_if_compiling_contained_td = \ - get_components_from_key( key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release, default key_prior_installation_required to False. - toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key ) + repository_components_tuple = get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + toolshed, key_name, key_owner, key_changeset_revision = components_list[ 0:4 ] + # For backward compatibility to the 12/20/12 Galaxy release. + if len( components_list ) == 4: key_prior_installation_required = 'False' key_only_if_compiling_contained_td = 'False' + elif len( components_list ) == 5: + key_prior_installation_required = components_list[ 4 ] + key_only_if_compiling_contained_td = 'False' + elif len( components_list ) == 6: + key_prior_installation_required = components_list[ 4 ] + key_only_if_compiling_contained_td = components_list[ 5 ] if repository_name == key_name and \ repository_owner == key_owner and \ changeset_revision == key_changeset_revision and \ diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/metadata_util.py --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -1896,7 +1896,7 @@ changeset_revisions.append( changeset_revision ) add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions ) elif len( repo ) == 1 and not invalid_file_tups: - message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip( trans.app ) ) + message = "Revision <b>%s</b> includes no Galaxy utilities for which metadata can " % str( repository.tip( trans.app ) ) message += "be defined so this revision cannot be automatically installed into a local Galaxy instance." status = "error" if invalid_file_tups: diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/repository_dependency_util.py --- a/lib/tool_shed/util/repository_dependency_util.py +++ b/lib/tool_shed/util/repository_dependency_util.py @@ -36,12 +36,9 @@ if key in [ 'root_key', 'description' ]: continue d_repository = None - try: - d_toolshed, d_name, d_owner, d_changeset_revision, d_prior_installation_required, d_only_if_compiling_contained_td = \ - container_util.get_components_from_key( key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release. - d_toolshed, d_name, d_owner, d_changeset_revision = container_util.get_components_from_key( key ) + repository_components_tuple = container_util.get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + d_toolshed, d_name, d_owner, d_changeset_revision = components_list[ 0:4 ] for tsr in tool_shed_repositories: # Get the the tool_shed_repository defined by name, owner and changeset_revision. This is the repository that will be # dependent upon each of the tool shed repositories contained in val. We'll need to check tool_shed_repository.tool_shed @@ -437,22 +434,22 @@ # We have the updated changset revision. updated_key_rd_dicts.append( new_key_rd_dict ) else: - try: - toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required, rd_only_if_compiling_contained_td = \ - container_util.get_components_from_key( key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release. - toolshed, repository_name, repository_owner, repository_changeset_revision = container_util.get_components_from_key( key ) + repository_components_tuple = container_util.get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ] + # For backward compatibility to the 12/20/12 Galaxy release. + if len( components_list ) == 4: + prior_installation_required = 'False' + rd_only_if_compiling_contained_td = 'False' + elif len( components_list ) == 5: + rd_only_if_compiling_contained_td = 'False' message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \ ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) ) log.debug( message ) else: - try: - toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required, only_if_compiling_contained_td = \ - container_util.get_components_from_key( key ) - except ValueError: - # For backward compatibility to the 12/20/12 Galaxy release. - toolshed, repository_name, repository_owner, repository_changeset_revision = container_util.get_components_from_key( key ) + repository_components_tuple = container_util.get_components_from_key( key ) + components_list = suc.extract_components_from_tuple( repository_components_tuple ) + toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ] message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \ ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) ) log.debug( message ) diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -307,6 +307,21 @@ sa_session.flush() return tool_shed_repository +def extract_components_from_tuple( repository_components_tuple ): + '''Extract the repository components from the provided tuple in a backward-compatible manner.''' + toolshed = repository_components_tuple[ 0 ] + name = repository_components_tuple[ 1 ] + owner = repository_components_tuple[ 2 ] + changeset_revision = repository_components_tuple[ 3 ] + components_list = [ toolshed, name, owner, changeset_revision ] + if len( repository_components_tuple ) == 5: + toolshed, name, owner, changeset_revision, prior_installation_required = repository_components_tuple + components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ] + elif len( repository_components_tuple ) == 6: + toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = repository_components_tuple + components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ] + return components_list + def generate_clone_url_for_installed_repository( app, repository ): """Generate the URL for cloning a repository that has been installed into a Galaxy instance.""" tool_shed_url = get_url_from_tool_shed( app, repository.tool_shed ) diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 lib/tool_shed/util/workflow_util.py --- a/lib/tool_shed/util/workflow_util.py +++ b/lib/tool_shed/util/workflow_util.py @@ -1,4 +1,5 @@ import logging +import os import galaxy.tools import galaxy.tools.parameters import galaxy.webapps.galaxy.controllers.workflow @@ -151,7 +152,7 @@ def new( self, trans, type, tools_metadata=None, tool_id=None ): """Return module for type and (optional) tool_id initialized with new / default state.""" assert type in self.module_types - return self.module_types[type].new( trans, tool_id ) + return self.module_types[ type ].new( trans, tool_id ) def from_dict( self, trans, repository_id, changeset_revision, step_dict, **kwd ): """Return module initialized from the data in dictionary `step_dict`.""" @@ -219,13 +220,11 @@ tool_errors = module.type == 'tool' and not module.tool module_data_inputs = get_workflow_data_inputs( step, module ) module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps ) - step_dict = { - 'id' : step.order_index, - 'data_inputs' : module_data_inputs, - 'data_outputs' : module_data_outputs, - 'position' : step.position, - 'tool_errors' : tool_errors - } + step_dict = { 'id' : step.order_index, + 'data_inputs' : module_data_inputs, + 'data_outputs' : module_data_outputs, + 'position' : step.position, + 'tool_errors' : tool_errors } input_conn_dict = {} for conn in step.input_connections: input_conn_dict[ conn.input_name ] = dict( id=conn.output_step.order_index, output_name=conn.output_name ) @@ -401,8 +400,9 @@ post_job_actions = step_dict.get( 'post_job_actions', {} ) for name, pja_dict in post_job_actions.items(): trans.model.PostJobAction( pja_dict[ 'action_type' ], - step, pja_dict[ 'output_name' ], - pja_dict[ 'action_arguments' ] ) + step, + pja_dict[ 'output_name' ], + pja_dict[ 'action_arguments' ] ) steps.append( step ) steps_by_external_id[ step_dict[ 'id' ] ] = step # Second pass to deal with connections between steps. @@ -433,6 +433,64 @@ break return module_name +def import_workflow( trans, repository, workflow_name ): + """Import a workflow contained in an installed tool shed repository into Galaxy (this method is called only from Galaxy).""" + status = 'done' + message = '' + changeset_revision = repository.changeset_revision + metadata = repository.metadata + workflows = metadata.get( 'workflows', [] ) + tools_metadata = metadata.get( 'tools', [] ) + workflow_dict = None + for workflow_data_tuple in workflows: + # The value of workflow_data_tuple is ( relative_path_to_workflow_file, exported_workflow_dict ). + relative_path_to_workflow_file, exported_workflow_dict = workflow_data_tuple + if exported_workflow_dict[ 'name' ] == workflow_name: + # If the exported workflow is available on disk, import it. + if os.path.exists( relative_path_to_workflow_file ): + workflow_file = open( relative_path_to_workflow_file, 'rb' ) + workflow_data = workflow_file.read() + workflow_file.close() + workflow_dict = json.from_json_string( workflow_data ) + else: + # Use the current exported_workflow_dict. + workflow_dict = exported_workflow_dict + break + if workflow_dict: + # Create workflow if possible. + workflow, missing_tool_tups = get_workflow_from_dict( trans=trans, + workflow_dict=workflow_dict, + tools_metadata=tools_metadata, + repository_id=repository.id, + changeset_revision=changeset_revision ) + # Save the workflow in the Galaxy database. Pass workflow_dict along to create annotation at this point. + stored_workflow = save_workflow( trans, workflow, workflow_dict ) + # Use the latest version of the saved workflow. + workflow = stored_workflow.latest_workflow + if workflow_name: + workflow.name = workflow_name + # Provide user feedback and show workflow list. + if workflow.has_errors: + message += "Imported, but some steps in this workflow have validation errors. " + status = "error" + if workflow.has_cycles: + message += "Imported, but this workflow contains cycles. " + status = "error" + else: + message += "Workflow <b>%s</b> imported successfully. " % workflow.name + if missing_tool_tups: + name_and_id_str = '' + for missing_tool_tup in missing_tool_tups: + tool_id, tool_name, other = missing_tool_tup + name_and_id_str += 'name: %s, id: %s' % ( str( tool_id ), str( tool_name ) ) + message += "The following tools required by this workflow are missing from this Galaxy instance: %s. " % name_and_id_str + else: + workflow = None + message += 'The workflow named %s is not included in the metadata for revision %s of repository %s' % \ + ( str( workflow_name ), str( changeset_revision ), str( repository.name ) ) + status = 'error' + return workflow, status, message + def save_workflow( trans, workflow, workflow_dict = None): """Use the received in-memory Workflow object for saving to the Galaxy database.""" stored = trans.model.StoredWorkflow() diff -r 5db5c07f9154039f44447ebecb4f87832dbaf030 -r d0bbd2a2b4a2c5eb7b0eb8684a1cd030fd01f269 scripts/api/import_workflows_from_installed_tool_shed_repository.py --- /dev/null +++ b/scripts/api/import_workflows_from_installed_tool_shed_repository.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +""" +Import one or more exported workflows contained within a specified tool shed repository installed into Galaxy. + +Here is a working example of how to use this script to repair a repository installed into Galaxy. +python ./import_workflows_from_installed_tool_shed_repository.py -a 22be3b -l http://localhost:8763/ -n workflow_with_tools -o test -r ef45bb64237e -u http://localhost:9009/ +""" + +import os +import sys +import argparse +sys.path.insert( 0, os.path.dirname( __file__ ) ) +from common import display +from common import submit + +def clean_url( url ): + if url.find( '//' ) > 0: + # We have an url that includes a protocol, something like: http://localhost:9009 + items = url.split( '//' ) + return items[ 1 ].rstrip( '/' ) + return url.rstrip( '/' ) + +def main( options ): + api_key = options.api + base_galaxy_url = options.local_url.rstrip( '/' ) + base_tool_shed_url = options.tool_shed_url.rstrip( '/' ) + cleaned_tool_shed_url = clean_url( base_tool_shed_url ) + installed_tool_shed_repositories_url = '%s/api/tool_shed_repositories' % base_galaxy_url + tool_shed_repository_id = None + installed_tool_shed_repositories = display( api_key, installed_tool_shed_repositories_url, return_formatted=False ) + for installed_tool_shed_repository in installed_tool_shed_repositories: + tool_shed = str( installed_tool_shed_repository[ 'tool_shed' ] ) + name = str( installed_tool_shed_repository[ 'name' ] ) + owner = str( installed_tool_shed_repository[ 'owner' ] ) + changeset_revision = str( installed_tool_shed_repository[ 'changeset_revision' ] ) + if tool_shed == cleaned_tool_shed_url and name == options.name and owner == options.owner and changeset_revision == options.changeset_revision: + tool_shed_repository_id = installed_tool_shed_repository[ 'id' ] + break + if tool_shed_repository_id: + # Get the list of exported workflows contained in the installed repository. + url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/exported_workflows' % str( tool_shed_repository_id ) ) + exported_workflows = display( api_key, url, return_formatted=False ) + if exported_workflows: + # Import all of the workflows in the list of exported workflows. + data = {} + # NOTE: to import a single workflow, add an index to data (e.g., + # data[ 'index' ] = 0 + # and change the url to be ~/import_workflow (simgular). For example, + # url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflow' % str( tool_shed_repository_id ) ) + url = '%s%s' % ( base_galaxy_url, '/api/tool_shed_repositories/%s/import_workflows' % str( tool_shed_repository_id ) ) + submit( options.api, url, data ) + else: + print "Invalid tool_shed / name / owner / changeset_revision." + +if __name__ == '__main__': + parser = argparse.ArgumentParser( description='Import workflows contained in an installed tool shed repository via the Galaxy API.' ) + parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" ) + parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" ) + parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." ) + parser.add_argument( "-n", "--name", required=True, help="Repository name." ) + parser.add_argument( "-o", "--owner", required=True, help="Repository owner." ) + parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." ) + options = parser.parse_args() + main( options ) This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org