galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
September 2014
- 2 participants
- 236 discussions
commit/galaxy-central: guerler: ToolForm: Implements error handling, applies fixes to styles and structure, UI: Fixes code style for frames
by commits-noreply@bitbucket.org 11 Sep '14
by commits-noreply@bitbucket.org 11 Sep '14
11 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/deb003947e0e/
Changeset: deb003947e0e
User: guerler
Date: 2014-09-11 18:02:21
Summary: ToolForm: Implements error handling, applies fixes to styles and structure, UI: Fixes code style for frames
Affected #: 11 files
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/tools/tools-form.js
--- a/static/scripts/mvc/tools/tools-form.js
+++ b/static/scripts/mvc/tools/tools-form.js
@@ -1,8 +1,11 @@
+/*
+ This is the main class of the tool form plugin. It is referenced as 'app' in all lower level modules.
+*/
define(['mvc/ui/ui-portlet', 'mvc/ui/ui-misc',
'mvc/citation/citation-model', 'mvc/citation/citation-view',
- 'mvc/tools', 'mvc/tools/tools-template', 'mvc/tools/tools-datasets', 'mvc/tools/tools-section', 'mvc/tools/tools-tree'],
+ 'mvc/tools', 'mvc/tools/tools-template', 'mvc/tools/tools-datasets', 'mvc/tools/tools-section', 'mvc/tools/tools-tree', 'mvc/tools/tools-jobs'],
function(Portlet, Ui, CitationModel, CitationView,
- Tools, ToolTemplate, ToolDatasets, ToolSection, ToolTree) {
+ Tools, ToolTemplate, ToolDatasets, ToolSection, ToolTree, ToolJobs) {
// create tool model
var Model = Backbone.Model.extend({
@@ -32,12 +35,18 @@
// creates a tree/json structure from the input form
this.tree = new ToolTree(this);
+ // creates the job handler
+ this.job_handler = new ToolJobs(this);
+
// reset field list
this.field_list = {};
// reset sequential input definition list
this.input_list = {};
+ // reset input element definition list
+ this.element_list = {};
+
// initialize datasets
this.datasets = new ToolDatasets({
history_id : this.options.history_id,
@@ -47,6 +56,13 @@
});
},
+ // reset form
+ reset: function() {
+ for (var i in this.element_list) {
+ this.element_list[i].reset();
+ }
+ },
+
// initialize tool form
_initializeToolForm: function() {
// link this
@@ -102,7 +118,7 @@
title : 'Execute',
floating : 'clear',
onclick : function() {
- self._submit();
+ self.job_handler.submit();
}
})
},
@@ -157,7 +173,7 @@
// trigger refresh
self.refresh();
- self._submit();
+ //self.job_handler.submit();
}
});
},
@@ -174,11 +190,6 @@
// log
console.debug('tools-form::refresh() - Recreated tree structure. Refresh.');
- },
-
- // submit
- _submit: function() {
- console.log(this.tree.finalize());
}
});
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/tools/tools-jobs.js
--- a/static/scripts/mvc/tools/tools-jobs.js
+++ b/static/scripts/mvc/tools/tools-jobs.js
@@ -1,9 +1,7 @@
-// dependencies
+/*
+ This class handles job submissions and the error handling.
+*/
define(['utils/utils'], function(Utils) {
-
-/**
- * This class handles job submissions.
- */
return Backbone.Model.extend({
// initialize
initialize: function(app, options) {
@@ -15,34 +13,49 @@
},
// create job
- submit: function(data, success, error) {
+ submit: function() {
// link this
var self = this;
+ // create job definition for submission to tools api
+ var job_def = {
+ tool_id : this.app.options.id,
+ inputs : this.app.tree.finalize()
+ }
+
+ // reset
+ this.app.reset();
+
// post job
- Utils.request('POST', config.root + 'api/tools', data,
+ Utils.request('POST', galaxy_config.root + 'api/tools', job_def,
// success handler
function(response) {
if (!response.outputs || response.outputs.length == 0) {
- // call error
- error && error();
- } else {
- // update galaxy history
console.log(response);
}
+ self._refreshHdas();
},
// error handler
function(response) {
- var message = '';
- if (response && response.message && response.message.data && response.message.data.input) {
- message = response.message.data.input + '.';
+ if (response && response.message && response.message.data) {
+ var error_messages = self.app.tree.match(response.message.data);
+ for (var id in error_messages) {
+ var error_text = error_messages[id];
+ if (!error_text) {
+ error_text = 'Please verify this parameter.';
+ }
+ self.app.element_list[id].error(error_text);
+ }
}
-
- // call error
- error && error();
-
}
);
+ },
+
+ // refresh history panel
+ _refreshHdas: function() {
+ if (parent.Galaxy && parent.Galaxy.currHistoryPanel) {
+ parent.Galaxy.currHistoryPanel.refreshContents();
+ }
}
});
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/tools/tools-section.js
--- a/static/scripts/mvc/tools/tools-section.js
+++ b/static/scripts/mvc/tools/tools-section.js
@@ -1,6 +1,60 @@
+/*
+ This class creates a tool form section and populates it with input elements. It also handles repeat blocks and conditionals by recursively creating new sub sections. New input elements can be plugged in by adding cases to the switch block defined in the _addRow() function.
+*/
define(['utils/utils', 'mvc/ui/ui-table', 'mvc/ui/ui-misc', 'mvc/ui/ui-tabs', 'mvc/tools/tools-select-dataset'],
function(Utils, Table, Ui, Tabs, SelectDataset) {
+ // input field element wrapper
+ var InputElement = Backbone.View.extend({
+ // initialize input wrapper
+ initialize: function(options) {
+ this.setElement(this._template(options));
+ },
+
+ // set error text
+ error: function(text) {
+ // set text
+ this.$el.find('.ui-table-form-error-text').html(text);
+ this.$el.find('.ui-table-form-error').fadeIn();
+ this.$el.addClass('ui-table-row-error');
+ },
+
+ // reset
+ reset: function() {
+ this.$el.find('.ui-table-form-error').hide();
+ this.$el.removeClass('ui-table-form-error');
+ },
+
+ // template
+ _template: function(options) {
+ var $input;
+ if (options.highlight) {
+ $input = $('<div class="ui-table-element ui-table-form-section"/>');
+ } else {
+ $input = $('<div class="ui-table-element"/>');
+ }
+
+ // add error
+ $input.append('<div class="ui-table-form-error"><span class="fa fa-arrow-down"/><span class="ui-table-form-error-text"></div>');
+
+ // add label
+ if (options.label) {
+ $input.append('<div class="ui-table-form-title-strong">' + options.label + '</div>');
+ }
+
+ // add input element
+ $input.append(options.$el);
+
+ // add help
+ if (options.help) {
+ $input.append('<div class="ui-table-form-info">' + options.help + '</div>');
+ }
+
+ // return input element
+ return $input;
+ }
+ });
+
// create form view
var View = Backbone.View.extend({
// initialize
@@ -76,7 +130,7 @@
input_def.value = input_def.test_param.value;
// build options field
- this._addRow('conditional', input_def);
+ var table_row = this._addRow('conditional', input_def);
// add fields
for (var i in input_def.cases) {
@@ -89,13 +143,16 @@
cls : 'ui-table-plain'
});
+ // create input field wrapper
+ var input_element = new InputElement({
+ label : '',
+ help : input_def.help,
+ $el : sub_section.$el,
+ highlight : true
+ });
+
// create table row
- this.table.add(this._create_field({
- label : '',
- help : input_def.help,
- $el : sub_section.$el,
- color : true
- }));
+ this.table.add(input_element.$el);
// append to table
this.table.append(sub_section_id);
@@ -175,13 +232,16 @@
// retitle tabs
tabs.retitle(input_def.title);
+ // create input field wrapper
+ var input_element = new InputElement({
+ label : input_def.title,
+ help : input_def.help,
+ $el : tabs.$el,
+ highlight : true
+ });
+
// create table row
- this.table.add(this._create_field({
- label : input_def.title,
- help : input_def.help,
- $el : tabs.$el,
- color : true
- }));
+ this.table.add(input_element.$el);
// append row to table
this.table.append(input_def.id);
@@ -265,15 +325,24 @@
// add to field list
this.app.field_list[id] = field;
- // create table row
- this.table.add(this._create_field({
+ // create input field wrapper
+ var input_element = new InputElement({
label : input_def.label,
help : input_def.help,
$el : field.$el
- }));
+ });
+
+ // add to element list
+ this.app.element_list[id] = input_element;
+
+ // create table row
+ this.table.add(input_element.$el);
// append to table
this.table.append(id);
+
+ // return table row
+ return this.table.get(id)
},
// conditional input field
@@ -349,7 +418,7 @@
}
// get referenced columns
- var column_list = self.app.tree.findReferences(id, 'data_column');
+ var column_list = self.app.tree.references(id, 'data_column');
// find selected dataset
var dataset = self.app.datasets.filter(value);
@@ -385,11 +454,20 @@
// get column type
var column_type = meta[key];
+ // column index
+ var column_index = (parseInt(key) + 1);
+
+ // column type label
+ var column_label = 'Text';
+ if (column_type == 'int' || column_type == 'float') {
+ column_label = 'Number';
+ }
+
// add to selection
if (column_type == 'int' || column_type == 'float' || !numerical) {
columns.push({
- 'label' : 'Column: ' + (parseInt(key) + 1) + ' [' + meta[key] + ']',
- 'value' : key
+ 'label' : 'Column: ' + column_index + ' [' + column_label + ']',
+ 'value' : column_index
});
}
}
@@ -485,27 +563,9 @@
_field_boolean : function(input_def) {
return new Ui.RadioButton.View({
id : 'field-' + input_def.id,
- data : [ { label : 'Yes', value : true },
- { label : 'No', value : false }]
+ data : [ { label : 'Yes', value : 'true' },
+ { label : 'No', value : 'false' }]
});
- },
-
- // create a field element with title and help information
- _create_field: function(options) {
- var $input;
- if (options.color) {
- $input = $('<div class="ui-table-form-section"/>');
- } else {
- $input = $('<div/>');
- }
- if (options.label) {
- $input.append('<div class="ui-table-form-title-strong">' + options.label + '</div>');
- }
- $input.append(options.$el);
- if (options.help) {
- $input.append('<div class="ui-table-form-info">' + options.help + '</div>');
- }
- return $input;
}
});
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/tools/tools-tree.js
--- a/static/scripts/mvc/tools/tools-tree.js
+++ b/static/scripts/mvc/tools/tools-tree.js
@@ -1,3 +1,6 @@
+/*
+ This class maps the tool form to javascript datastructures. Once refreshed it converts the tool form (including sub sections) into a xml (containing only ids) and a detailed dictionary representation. The xml object is a jquery element and can be searched/filtered e.g. in order to hierarchically identify referenced fields. Once the job is ready for submission, the finalize function will transform the generic dictionary representation into the specific flat dictionary format required by the tools api.
+*/
// dependencies
define([], function() {
@@ -9,7 +12,8 @@
this.app = app;
},
- // creates tree structure
+ /** Refresh the datastructures representing the ToolForm.
+ */
refresh: function() {
// create dictionary
this.dict = {};
@@ -26,13 +30,23 @@
this._iterate(this.app.section.$el, this.dict, this.xml);
},
- // convert to job dictionary
+ /** Convert dictionary representation into tool api specific flat dictionary format.
+ */
finalize: function() {
// link this
var self = this;
// dictionary formatted for job submission
- var job_def = {};
+ this.job_def = {};
+
+ // dictionary with api specific identifiers
+ this.job_ids = {};
+
+ // add identifier and value to job definition
+ function add(job_input_id, input_id, input_value) {
+ self.job_def[job_input_id] = input_value;
+ self.job_ids[job_input_id] = input_id;
+ };
// converter between raw dictionary and job dictionary
function convert(identifier, head) {
@@ -65,6 +79,9 @@
// get conditional value
var value = self.app.field_list[input.id].value();
+ // add conditional value
+ add (job_input_id + '|' + input.test_param.name, input.id, value);
+
// find selected case
for (var j in input.cases) {
if (input.cases[j].value == value) {
@@ -72,13 +89,28 @@
break;
}
}
-
- // break
+ break;
+ // handle data inputs
+ case 'data':
+ var value = {
+ id : self.app.field_list[input.id].value(),
+ src : 'hda'
+ }
+ add(job_input_id, input.id, value);
+ break;
+ // handle boolean input
+ case 'boolean':
+ var value = self.app.field_list[input.id].value();
+ if (value === 'true') {
+ value = input.truevalue;
+ } else {
+ value = input.falsevalue;
+ }
+ add (job_input_id, input.id, value);
break;
default:
// handle default value
- var value = self.app.field_list[input.id].value();
- job_def[job_input_id] = value;
+ add (job_input_id, input.id, self.app.field_list[input.id].value());
}
}
}
@@ -88,11 +120,46 @@
convert('', this.dict);
// return result
- return job_def;
+ return this.job_def;
},
- // find referenced elements
- findReferences: function(identifier, type) {
+ /** Matches identifier from api response to input element
+ */
+ match: function(response) {
+ // final result dictionary
+ var result = {};
+
+ // link this
+ var self = this;
+
+ // search throughout response
+ function search (id, head) {
+ if (typeof head === 'string') {
+ var input_id = self.app.tree.job_ids[id];
+ if (input_id) {
+ result[input_id] = head;
+ }
+ } else {
+ for (var i in head) {
+ var new_id = i;
+ if (id !== '') {
+ new_id = id + '|' + new_id;
+ }
+ search (new_id, head[i]);
+ }
+ }
+ }
+
+ // match all ids and return messages
+ search('', response);
+
+ // return matched results
+ return result;
+ },
+
+ /** Find referenced elements.
+ */
+ references: function(identifier, type) {
// referenced elements
var referenced = [];
@@ -164,7 +231,8 @@
return referenced;
},
- // iterate
+ /** Iterate through the tool form dom and map it to the dictionary and xml representation.
+ */
_iterate: function(parent, dict, xml) {
// get child nodes
var self = this;
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/ui/ui-frames.js
--- a/static/scripts/mvc/ui/ui-frames.js
+++ b/static/scripts/mvc/ui/ui-frames.js
@@ -83,10 +83,10 @@
this.setElement(this._template());
// load background
- $(this.el).append(this._template_background());
+ $(this.el).append(this._templateBackground());
// load menu buttons
- $(this.el).append(this._template_menu());
+ $(this.el).append(this._templateMenu());
// load to main frame
$(this.el_main).append($(this.el));
@@ -97,7 +97,7 @@
var id_shadow = '#frame-shadow';
// add shadow template
- $(this.el).append(this._template_shadow(id_shadow.substring(1)));
+ $(this.el).append(this._templateShadow(id_shadow.substring(1)));
// initialize frame
this.frame_shadow = {
@@ -109,13 +109,13 @@
};
// initialize size
- this._frame_resize(this.frame_shadow, {width: 0, height: 0});
+ this._frameResize(this.frame_shadow, {width: 0, height: 0});
// add shadow to frame list
this.frame_list[id_shadow] = this.frame_shadow;
// initialize panel
- this._panel_refresh();
+ this._panelRefresh();
// apply visibility
if (!this.visible) {
@@ -128,7 +128,7 @@
var self = this;
$(window).resize(function () {
if (self.visible)
- self._panel_refresh();
+ self._panelRefresh();
});
},
@@ -183,9 +183,9 @@
// append
var $frame_el = null;
if (options.type === 'url') {
- $frame_el = $(this._template_frame_url(frame_id.substring(1), options.title, options.content));
+ $frame_el = $(this._templateFrameUrl(frame_id.substring(1), options.title, options.content));
} else if (options.type === 'other') {
- $frame_el = $(this._template_frame(frame_id.substring(1), options.title));
+ $frame_el = $(this._templateFrame(frame_id.substring(1), options.title));
// Load content into frame.
var content_elt = $frame_el.find('.f-content');
@@ -208,8 +208,8 @@
};
// set dimensions
- options.width = this._to_pixel_coord('width', this.options.frame.cols);
- options.height = this._to_pixel_coord('height', this.options.frame.rows);
+ options.width = this._toPixelCoord('width', this.options.frame.cols);
+ options.height = this._toPixelCoord('height', this.options.frame.rows);
// default z-index
this.frame_z = parseInt($(frame.id).css('z-index'));
@@ -221,10 +221,10 @@
this.frame_counter++;
// resize
- this._frame_resize(frame, {width: options.width, height: options.height});
+ this._frameResize(frame, {width: options.width, height: options.height});
// place frame
- this._frame_insert(frame, {top: 0, left: 0}, true);
+ this._frameInsert(frame, {top: 0, left: 0}, true);
// show frames if hidden
if (!this.visible) {
@@ -247,10 +247,10 @@
this.$el.find(".frame-background").show();
// show panel
- this._panel_refresh();
+ this._panelRefresh();
// refresh
- this._menu_refresh();
+ this._menuRefresh();
},
// hide panel
@@ -272,7 +272,7 @@
this.$el.find(".frame-menu").hide();
// refresh
- this._menu_refresh();
+ this._menuRefresh();
},
// length
@@ -299,23 +299,23 @@
// events
events: {
// global frame events
- 'mousemove' : '_event_frame_mouse_move',
- 'mouseup' : '_event_frame_mouse_up',
- 'mouseleave' : '_event_frame_mouse_up',
- 'mousewheel' : '_event_panel_scroll',
- 'DOMMouseScroll' : '_event_panel_scroll',
+ 'mousemove' : '_eventFrameMouseMove',
+ 'mouseup' : '_eventFrameMouseUp',
+ 'mouseleave' : '_eventFrameMouseUp',
+ 'mousewheel' : '_eventPanelScroll',
+ 'DOMMouseScroll' : '_eventPanelScroll',
// events fixed to elements
- 'mousedown .frame' : '_event_frame_mouse_down',
- 'mousedown .frame-background' : '_event_hide',
- 'mousedown .frame-scroll-up' : '_event_panel_scroll_up',
- 'mousedown .frame-scroll-down' : '_event_panel_scroll_down',
- 'mousedown .f-close' : '_event_frame_close',
- 'mousedown .f-pin' : '_event_frame_lock'
+ 'mousedown .frame' : '_eventFrameMouseDown',
+ 'mousedown .frame-background' : '_eventHide',
+ 'mousedown .frame-scroll-up' : '_eventPanelScroll_up',
+ 'mousedown .frame-scroll-down' : '_eventPanelScroll_down',
+ 'mousedown .f-close' : '_eventFrameClose',
+ 'mousedown .f-pin' : '_eventFrameLock'
},
// drag start
- _event_frame_mouse_down: function (e) {
+ _eventFrameMouseDown: function (e) {
// skip if event is already active
if (this.event.type !== null) {
return;
@@ -340,7 +340,7 @@
e.preventDefault();
// identify frame
- this.event.target = this._frame_identify(e.target);
+ this.event.target = this._frameIdentify(e.target);
// check if frame is locked
if (this.event.target.grid_lock) {
@@ -355,11 +355,11 @@
};
// prepare drag/resize
- this._frame_drag_start(this.event.target);
+ this._frameDragStart(this.event.target);
},
// mouse move event
- _event_frame_mouse_move: function (e) {
+ _eventFrameMouseMove: function (e) {
// check
if (this.event.type != 'drag' && this.event.type != 'resize') {
return;
@@ -381,7 +381,7 @@
this.event.xy = event_xy_new;
// object position / size
- var p = this._frame_screen (this.event.target);
+ var p = this._frameScreen (this.event.target);
// resize event
if (this.event.type == 'resize') {
@@ -395,23 +395,23 @@
p.height = Math.max(p.height, min_dim);
// apply resize to frame
- this._frame_resize(this.event.target, p);
+ this._frameResize(this.event.target, p);
// break down to grid coordinates
- p.width = this._to_grid_coord('width', p.width) + 1;
- p.height = this._to_grid_coord('height', p.height) + 1;
+ p.width = this._toGridCoord('width', p.width) + 1;
+ p.height = this._toGridCoord('height', p.height) + 1;
// transfer back to pixels
- p.width = this._to_pixel_coord('width', p.width);
- p.height = this._to_pixel_coord('height', p.height);
+ p.width = this._toPixelCoord('width', p.width);
+ p.height = this._toPixelCoord('height', p.height);
// apply
- this._frame_resize(this.frame_shadow, p);
+ this._frameResize(this.frame_shadow, p);
// fix position
- this._frame_insert(this.frame_shadow, {
- top : this._to_grid_coord('top', p.top),
- left : this._to_grid_coord('left', p.left)
+ this._frameInsert(this.frame_shadow, {
+ top : this._toGridCoord('top', p.top),
+ left : this._toGridCoord('left', p.left)
});
}
@@ -422,12 +422,12 @@
p.top += event_xy_delta.y;
// apply
- this._frame_offset(this.event.target, p);
+ this._frameOffset(this.event.target, p);
// get location of shadow
var l = {
- top : this._to_grid_coord('top', p.top),
- left : this._to_grid_coord('left', p.left)
+ top : this._toGridCoord('top', p.top),
+ left : this._toGridCoord('left', p.left)
};
// increase priority of current frame
@@ -436,26 +436,26 @@
}
// fix position
- this._frame_insert(this.frame_shadow, l);
+ this._frameInsert(this.frame_shadow, l);
}
},
// mouse up
- _event_frame_mouse_up: function (e) {
+ _eventFrameMouseUp: function (e) {
// check
if (this.event.type != 'drag' && this.event.type != 'resize') {
return;
}
// stop
- this._frame_drag_stop(this.event.target);
+ this._frameDragStop(this.event.target);
// reset event
this.event.type = null;
},
// drag start
- _event_frame_close: function (e) {
+ _eventFrameClose: function (e) {
// check
if (this.event.type !== null) {
return;
@@ -465,7 +465,7 @@
e.preventDefault();
// get frame
- var frame = this._frame_identify(e.target);
+ var frame = this._frameIdentify(e.target);
var self = this;
// fade out
@@ -480,10 +480,10 @@
self.frame_counter--;
// reload
- self._panel_refresh(true);
+ self._panelRefresh(true);
// refresh scroll state once all animations completed
- self._panel_animation_complete();
+ self._panelAnimationComplete();
// hide if no frames left
if (self.visible && self.frame_counter == 0)
@@ -492,7 +492,7 @@
},
// drag start
- _event_frame_lock: function (e) {
+ _eventFrameLock: function (e) {
// check
if (this.event.type !== null) {
return;
@@ -502,7 +502,7 @@
e.preventDefault();
// get frame
- var frame = this._frame_identify(e.target);
+ var frame = this._frameIdentify(e.target);
// check
if (frame.grid_lock) {
@@ -529,7 +529,7 @@
},
// show/hide panel
- _event_hide: function (e) {
+ _eventHide: function (e) {
// check
if (this.event.type !== null) {
return;
@@ -542,7 +542,7 @@
/**
* Fired when scrolling occurs on panel.
*/
- _event_panel_scroll: function(e) {
+ _eventPanelScroll: function(e) {
// check
if (this.event.type !== null || !this.visible) {
return;
@@ -563,11 +563,11 @@
var delta = e.originalEvent.detail ? e.originalEvent.detail : e.originalEvent.wheelDelta / -3;
// refresh panel
- this._panel_scroll(delta);
+ this._panelScroll(delta);
},
// scroll up
- _event_panel_scroll_up: function(e) {
+ _eventPanelScroll_up: function(e) {
// check
if (this.event.type !== null)
return;
@@ -576,11 +576,11 @@
e.preventDefault();
// scroll up
- this._panel_scroll(-this.options.scroll);
+ this._panelScroll(-this.options.scroll);
},
// scroll down
- _event_panel_scroll_down: function(e) {
+ _eventPanelScroll_down: function(e) {
// check
if (this.event.type !== null)
return;
@@ -589,7 +589,7 @@
e.preventDefault();
// scroll down
- this._panel_scroll(this.options.scroll);
+ this._panelScroll(this.options.scroll);
},
/*
@@ -597,21 +597,21 @@
*/
// identify
- _frame_identify: function(target) {
+ _frameIdentify: function(target) {
return this.frame_list['#' + $(target).closest('.frame').attr('id')];
},
// drag start
- _frame_drag_start : function (frame) {
+ _frameDragStart : function (frame) {
// set focus
- this._frame_focus(frame, true);
+ this._frameFocus(frame, true);
// get current dimensions
- var p = this._frame_screen (frame);
+ var p = this._frameScreen (frame);
// initialize shadow
- this._frame_resize(this.frame_shadow, p);
- this._frame_grid(this.frame_shadow, frame.grid_location);
+ this._frameResize(this.frame_shadow, p);
+ this._frameGrid(this.frame_shadow, frame.grid_location);
// reset location
frame.grid_location = null;
@@ -624,16 +624,16 @@
},
// drag stop
- _frame_drag_stop : function (frame) {
+ _frameDragStop : function (frame) {
// remove focus
- this._frame_focus(frame, false);
+ this._frameFocus(frame, false);
// get new dimensions
- var p = this._frame_screen(this.frame_shadow);
+ var p = this._frameScreen(this.frame_shadow);
// update frame
- this._frame_resize(frame, p);
- this._frame_grid(frame, this.frame_shadow.grid_location, true);
+ this._frameResize(frame, p);
+ this._frameGrid(frame, this.frame_shadow.grid_location, true);
// reset location of shadow
this.frame_shadow.grid_location = null;
@@ -645,7 +645,7 @@
$('.f-cover').hide();
// refresh scroll state once all animations completed
- this._panel_animation_complete();
+ this._panelAnimationComplete();
},
/*
@@ -653,7 +653,7 @@
*/
// converts a pixel coordinate to grids
- _to_grid_coord: function (type, px) {
+ _toGridCoord: function (type, px) {
// determine sign
var sign = (type == 'width' || type == 'height') ? 1 : -1;
@@ -664,7 +664,7 @@
},
// converts a grid coordinate to pixels
- _to_pixel_coord: function (type, g) {
+ _toPixelCoord: function (type, g) {
// determine sign
var sign = (type == 'width' || type == 'height') ? 1 : -1;
@@ -677,23 +677,23 @@
},
// get grid coordinates
- _to_grid: function (px) {
+ _toGrid: function (px) {
// full set
return {
- top : this._to_grid_coord('top', px.top),
- left : this._to_grid_coord('left', px.left),
- width : this._to_grid_coord('width', px.width),
- height : this._to_grid_coord('height', px.height)
+ top : this._toGridCoord('top', px.top),
+ left : this._toGridCoord('left', px.left),
+ width : this._toGridCoord('width', px.width),
+ height : this._toGridCoord('height', px.height)
};
},
// get pixel coordinates
- _to_pixel: function(g) {
+ _toPixel: function(g) {
return {
- top : this._to_pixel_coord('top', g.top),
- left : this._to_pixel_coord('left', g.left),
- width : this._to_pixel_coord('width', g.width),
- height : this._to_pixel_coord('height', g.height)
+ top : this._toPixelCoord('top', g.top),
+ left : this._toPixelCoord('left', g.left),
+ width : this._toPixelCoord('width', g.width),
+ height : this._toPixelCoord('height', g.height)
};
},
@@ -702,7 +702,7 @@
*/
// check collision
- _is_collision: function(g) {
+ _isCollision: function(g) {
// is collision pair
function is_collision_pair (a, b) {
return !(a.left > b.left + b.width - 1 || a.left + a.width - 1 < b.left ||
@@ -728,7 +728,7 @@
},
// location/grid rank
- _location_rank: function(loc) {
+ _locationRank: function(loc) {
return (loc.top * this.cols) + loc.left;
},
@@ -737,7 +737,7 @@
*/
// update frame counter
- _menu_refresh: function() {
+ _menuRefresh: function() {
// scroll up possible?
if (this.visible) {
if (this.top == this.options.top_min)
@@ -763,22 +763,22 @@
*/
// panel on animation complete / frames not moving
- _panel_animation_complete: function() {
+ _panelAnimationComplete: function() {
var self = this;
- $(".frame").promise().done(function() {self._panel_scroll(0, true)});
+ $(".frame").promise().done(function() {self._panelScroll(0, true)});
},
// refresh panel
- _panel_refresh: function(animate) {
+ _panelRefresh: function(animate) {
// get current size
this.cols = parseInt($(window).width() / this.options.cell, 10) + 1;
// recalculate frame positions
- this._frame_insert(null, null, animate);
+ this._frameInsert(null, null, animate);
},
// update scroll
- _panel_scroll: function(delta, animate) {
+ _panelScroll: function(delta, animate) {
// new top value
var top_new = this.top - this.options.scroll * delta;
@@ -799,7 +799,7 @@
top : frame.screen_location.top - (this.top - top_new),
left : frame.screen_location.left
}
- this._frame_offset(frame, screen_location, animate);
+ this._frameOffset(frame, screen_location, animate);
}
}
@@ -808,7 +808,7 @@
}
// refresh
- this._menu_refresh();
+ this._menuRefresh();
},
/*
@@ -816,7 +816,7 @@
*/
// frame insert at given location
- _frame_insert: function(frame, new_loc, animate) {
+ _frameInsert: function(frame, new_loc, animate) {
// define
var place_list = [];
@@ -826,7 +826,7 @@
frame.grid_location = null;
// set first one to be placed
- place_list.push([frame, this._location_rank(new_loc)]);
+ place_list.push([frame, this._locationRank(new_loc)]);
}
// search
@@ -854,7 +854,7 @@
// place
for (i = 0; i < place_list.length; i++) {
- this._frame_place(place_list[i][0], animate);
+ this._framePlace(place_list[i][0], animate);
}
// identify maximum viewport size
@@ -875,16 +875,16 @@
this.top_max = Math.min(this.top_max, this.options.top_min);
// panel menu
- this._menu_refresh();
+ this._menuRefresh();
},
// naive frame place
- _frame_place: function(frame, animate) {
+ _framePlace: function(frame, animate) {
// reset grid location
frame.grid_location = null;
// grid coordinates of new frame
- var g = this._to_grid(this._frame_screen(frame));
+ var g = this._toGrid(this._frameScreen(frame));
// try grid coordinates
var done = false;
@@ -896,7 +896,7 @@
g.left = j;
// no collision
- if (!this._is_collision(g)) {
+ if (!this._isCollision(g)) {
done = true;
break;
}
@@ -910,14 +910,14 @@
// check if valid spot was found
if (done) {
- this._frame_grid(frame, g, animate);
+ this._frameGrid(frame, g, animate);
} else {
console.log("Grid dimensions exceeded.");
}
},
// focus
- _frame_focus: function(frame, has_focus) {
+ _frameFocus: function(frame, has_focus) {
// get new z-value
var z = this.frame_z + (has_focus ? 1 : 0);
@@ -926,7 +926,7 @@
},
// new left/top position frame
- _frame_offset: function(frame, p, animate) {
+ _frameOffset: function(frame, p, animate) {
// update screen location
frame.screen_location.left = p.left;
frame.screen_location.top = p.top;
@@ -934,7 +934,7 @@
// animate
if (animate) {
// set focus on animated
- this._frame_focus(frame, true);
+ this._frameFocus(frame, true);
// prepare for callback
var self = this;
@@ -943,7 +943,7 @@
$(frame.id).animate({top: p.top, left: p.left}, 'fast', function()
{
// remove focus
- self._frame_focus(frame, false);
+ self._frameFocus(frame, false);
});
} else
// update css
@@ -951,7 +951,7 @@
},
// resize frame
- _frame_resize: function(frame, p) {
+ _frameResize: function(frame, p) {
// update css
$(frame.id).css({width: p.width, height: p.height});
@@ -961,19 +961,19 @@
},
// new grid location
- _frame_grid: function (frame, l, animate) {
+ _frameGrid: function (frame, l, animate) {
// update grid location
frame.grid_location = l;
// place frame
- this._frame_offset(frame, this._to_pixel(l), animate);
+ this._frameOffset(frame, this._toPixel(l), animate);
// update grid rank
- frame.grid_rank = this._location_rank(l);
+ frame.grid_rank = this._locationRank(l);
},
// get frame dimensions
- _frame_screen: function(frame) {
+ _frameScreen: function(frame) {
var p = frame.screen_location;
return {top: p.top, left: p.left, width: p.width, height: p.height};
},
@@ -988,7 +988,7 @@
},
// fill regular frame template
- _template_frame: function(id, title) {
+ _templateFrame: function(id, title) {
// check title
if (!title)
title = '';
@@ -1008,7 +1008,7 @@
},
// fill regular frame template
- _template_frame_url: function(id, title, url) {
+ _templateFrameUrl: function(id, title, url) {
// url
if (url.indexOf('?') == -1)
url += '?';
@@ -1017,7 +1017,7 @@
url += 'widget=True';
// element
- var $el = $(this._template_frame(id, title));
+ var $el = $(this._templateFrame(id, title));
$el.find('.f-content').append('<iframe scrolling="auto" class="f-iframe" src="' + url + '"></iframe>');
// load template
@@ -1025,17 +1025,17 @@
},
// fill shadow template
- _template_shadow: function(id) {
+ _templateShadow: function(id) {
return '<div id="' + id + '" class="frame-shadow corner"></div>';
},
// fill background template in order to cover underlying iframes
- _template_background: function() {
+ _templateBackground: function() {
return '<div class="frame-background"></div>';
},
// fill menu button template
- _template_menu: function() {
+ _templateMenu: function() {
return '<div class="frame-scroll-up frame-menu fa fa-chevron-up fa-2x"></div>' +
'<div class="frame-scroll-down frame-menu fa fa-chevron-down fa-2x"></div>';
}
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/mvc/ui/ui-portlet.js
--- a/static/scripts/mvc/ui/ui-portlet.js
+++ b/static/scripts/mvc/ui/ui-portlet.js
@@ -1,9 +1,8 @@
// dependencies
define(['utils/utils'], function(Utils) {
-// return
-var View = Backbone.View.extend(
-{
+// portlet view class
+var View = Backbone.View.extend({
// visibility
visible: false,
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/packed/mvc/tools/tools-section.js
--- a/static/scripts/packed/mvc/tools/tools-section.js
+++ b/static/scripts/packed/mvc/tools/tools-section.js
@@ -1,1 +1,1 @@
-define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs","mvc/tools/tools-select-dataset"],function(d,b,f,a,c){var e=Backbone.View.extend({initialize:function(h,g){this.app=h;this.inputs=g.inputs;g.cls_tr="section-row";this.table=new b.View(g);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var g in this.inputs){this._add(this.inputs[g])}},_add:function(i){var h=this;var g=jQuery.extend(true,{},i);g.id=d.uuid();this.app.input_list[g.id]=g;var j=g.type;switch(j){case"conditional":this._addConditional(g);break;case"repeat":this._addRepeat(g);break;default:this._addRow(j,g)}},_addConditional:function(g){g.label=g.test_param.label;g.value=g.test_param.value;this._addRow("conditional",g);for(var j in g.cases){var h=g.id+"-section-"+j;var k=new e(this.app,{inputs:g.cases[j].inputs,cls:"ui-table-plain"});this.table.add(this._create_field({label:"",help:g.help,$el:k.$el,color:true}));this.table.append(h)}},_addRepeat:function(g){var h=this;var l=new a.View({title_new:"Add "+g.title,max:g.max,onnew:function(){var i=g.id+"-section-"+d.uuid();var n=new e(h.app,{inputs:g.inputs,cls:"ui-table-plain"});l.add({id:i,title:g.title,$el:n.$el,ondel:function(){l.del(i);l.retitle(g.title);h.app.refresh()}});l.retitle(g.title);l.show(i);h.app.refresh()}});for(var k=0;k<g.min;k++){var j=g.id+"-section-"+d.uuid();var m=new e(h.app,{inputs:g.inputs,cls:"ui-table-plain"});l.add({id:j,title:g.title,$el:m.$el})}l.retitle(g.title);this.table.add(this._create_field({label:g.title,help:g.help,$el:l.$el,color:true}));this.table.append(g.id)},_addRow:function(i,g){var j=g.id;var h=null;switch(i){case"text":h=this._field_text(g);break;case"select":h=this._field_select(g);break;case"data":h=this._field_data(g);break;case"data_column":h=this._field_data_colum(g);break;case"conditional":h=this._field_conditional(g);break;case"hidden":h=this._field_hidden(g);break;case"integer":h=this._field_slider(g);break;case"float":h=this._field_slider(g);break;case"boolean":h=this._field_boolean(g);break}if(!h){if(g.options){h=this._field_select(g)}else{h=this._field_text(g)}console.debug("tools-form::_addRow() : Auto matched field type ("+i+").")}if(g.value!==undefined){h.value(g.value)}this.app.field_list[j]=h;this.table.add(this._create_field({label:g.label,help:g.help,$el:h.$el}));this.table.append(j)},_field_conditional:function(g){var h=this;var j=[];for(var k in g.test_param.options){var l=g.test_param.options[k];j.push({label:l[0],value:l[1]})}return new f.Select.View({id:"field-"+g.id,data:j,onchange:function(t){for(var r in g.cases){var n=g.cases[r];var q=g.id+"-section-"+r;var m=h.table.get(q);var p=false;for(var o in n.inputs){var s=n.inputs[o].type;if(s&&s!=="hidden"){p=true;break}}if(n.value==t&&p){m.fadeIn("fast")}else{m.hide()}}}})},_field_data:function(g){var h=this;var i=g.id;return new c.View(this.app,{id:"field-"+i,extensions:g.extensions,multiple:g.multiple,onchange:function(r){if(r instanceof Array){r=r[0]}var p=h.app.tree.findReferences(i,"data_column");var k=h.app.datasets.filter(r);if(k&&p.length>0){console.debug("tool-form::field_data() - Selected dataset "+r+".");var t=k.get("metadata_column_types");if(!t){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+r+".")}for(var m in p){var n=h.app.input_list[p[m]];var o=h.app.field_list[p[m]];if(!n||!o){console.debug("tool-form::field_data() - FAILED: Column not found.")}var l=n.numerical;var j=[];for(var s in t){var q=t[s];if(q=="int"||q=="float"||!l){j.push({label:"Column: "+(parseInt(s)+1)+" ["+t[s]+"]",value:s})}}if(o){o.update(j);if(!o.exists(o.value())){o.value(o.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+r+".")}}})},_field_select:function(g){var h=[];for(var j in g.options){var k=g.options[j];h.push({label:k[0],value:k[1]})}var l=f.Select;switch(g.display){case"checkboxes":l=f.Checkbox;break;case"radio":l=f.RadioButton;break}return new l.View({id:"field-"+g.id,data:h,multiple:g.multiple})},_field_data_colum:function(g){return new f.Select.View({id:"field-"+g.id,multiple:g.multiple})},_field_text:function(g){return new f.Input({id:"field-"+g.id,area:g.area})},_field_slider:function(g){var h=1;if(g.type=="float"){h=(g.max-g.min)/10000}return new f.Slider.View({id:"field-"+g.id,min:g.min||0,max:g.max||1000,step:h})},_field_hidden:function(g){return new f.Hidden({id:"field-"+g.id})},_field_boolean:function(g){return new f.RadioButton.View({id:"field-"+g.id,data:[{label:"Yes",value:true},{label:"No",value:false}]})},_create_field:function(g){var h;if(g.color){h=$('<div class="ui-table-form-section"/>')}else{h=$("<div/>")}if(g.label){h.append('<div class="ui-table-form-title-strong">'+g.label+"</div>")}h.append(g.$el);if(g.help){h.append('<div class="ui-table-form-info">'+g.help+"</div>")}return h}});return{View:e}});
\ No newline at end of file
+define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs","mvc/tools/tools-select-dataset"],function(d,b,g,a,c){var e=Backbone.View.extend({initialize:function(h){this.setElement(this._template(h))},error:function(h){this.$el.find(".ui-table-form-error-text").html(h);this.$el.find(".ui-table-form-error").fadeIn();this.$el.addClass("ui-table-row-error")},reset:function(){this.$el.find(".ui-table-form-error").hide();this.$el.removeClass("ui-table-form-error")},_template:function(h){var i;if(h.highlight){i=$('<div class="ui-table-element ui-table-form-section"/>')}else{i=$('<div class="ui-table-element"/>')}i.append('<div class="ui-table-form-error"><span class="fa fa-arrow-down"/><span class="ui-table-form-error-text"></div>');if(h.label){i.append('<div class="ui-table-form-title-strong">'+h.label+"</div>")}i.append(h.$el);if(h.help){i.append('<div class="ui-table-form-info">'+h.help+"</div>")}return i}});var f=Backbone.View.extend({initialize:function(i,h){this.app=i;this.inputs=h.inputs;h.cls_tr="section-row";this.table=new b.View(h);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var h in this.inputs){this._add(this.inputs[h])}},_add:function(j){var i=this;var h=jQuery.extend(true,{},j);h.id=d.uuid();this.app.input_list[h.id]=h;var k=h.type;switch(k){case"conditional":this._addConditional(h);break;case"repeat":this._addRepeat(h);break;default:this._addRow(k,h)}},_addConditional:function(h){h.label=h.test_param.label;h.value=h.test_param.value;var j=this._addRow("conditional",h);for(var l in h.cases){var k=h.id+"-section-"+l;var n=new f(this.app,{inputs:h.cases[l].inputs,cls:"ui-table-plain"});var m=new e({label:"",help:h.help,$el:n.$el,highlight:true});this.table.add(m.$el);this.table.append(k)}},_addRepeat:function(h){var j=this;var m=new a.View({title_new:"Add "+h.title,max:h.max,onnew:function(){var i=h.id+"-section-"+d.uuid();var p=new f(j.app,{inputs:h.inputs,cls:"ui-table-plain"});m.add({id:i,title:h.title,$el:p.$el,ondel:function(){m.del(i);m.retitle(h.title);j.app.refresh()}});m.retitle(h.title);m.show(i);j.app.refresh()}});for(var l=0;l<h.min;l++){var k=h.id+"-section-"+d.uuid();var o=new f(j.app,{inputs:h.inputs,cls:"ui-table-plain"});m.add({id:k,title:h.title,$el:o.$el})}m.retitle(h.title);var n=new e({label:h.title,help:h.help,$el:m.$el,highlight:true});this.table.add(n.$el);this.table.append(h.id)},_addRow:function(j,h){var l=h.id;var i=null;switch(j){case"text":i=this._field_text(h);break;case"select":i=this._field_select(h);break;case"data":i=this._field_data(h);break;case"data_column":i=this._field_data_colum(h);break;case"conditional":i=this._field_conditional(h);break;case"hidden":i=this._field_hidden(h);break;case"integer":i=this._field_slider(h);break;case"float":i=this._field_slider(h);break;case"boolean":i=this._field_boolean(h);break}if(!i){if(h.options){i=this._field_select(h)}else{i=this._field_text(h)}console.debug("tools-form::_addRow() : Auto matched field type ("+j+").")}if(h.value!==undefined){i.value(h.value)}this.app.field_list[l]=i;var k=new e({label:h.label,help:h.help,$el:i.$el});this.app.element_list[l]=k;this.table.add(k.$el);this.table.append(l);return this.table.get(l)},_field_conditional:function(h){var j=this;var k=[];for(var l in h.test_param.options){var m=h.test_param.options[l];k.push({label:m[0],value:m[1]})}return new g.Select.View({id:"field-"+h.id,data:k,onchange:function(u){for(var s in h.cases){var o=h.cases[s];var r=h.id+"-section-"+s;var n=j.table.get(r);var q=false;for(var p in o.inputs){var t=o.inputs[p].type;if(t&&t!=="hidden"){q=true;break}}if(o.value==u&&q){n.fadeIn("fast")}else{n.hide()}}}})},_field_data:function(h){var i=this;var j=h.id;return new c.View(this.app,{id:"field-"+j,extensions:h.extensions,multiple:h.multiple,onchange:function(u){if(u instanceof Array){u=u[0]}var s=i.app.tree.references(j,"data_column");var m=i.app.datasets.filter(u);if(m&&s.length>0){console.debug("tool-form::field_data() - Selected dataset "+u+".");var w=m.get("metadata_column_types");if(!w){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+u+".")}for(var o in s){var q=i.app.input_list[s[o]];var r=i.app.field_list[s[o]];if(!q||!r){console.debug("tool-form::field_data() - FAILED: Column not found.")}var n=q.numerical;var l=[];for(var v in w){var t=w[v];var k=(parseInt(v)+1);var p="Text";if(t=="int"||t=="float"){p="Number"}if(t=="int"||t=="float"||!n){l.push({label:"Column: "+k+" ["+p+"]",value:k})}}if(r){r.update(l);if(!r.exists(r.value())){r.value(r.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+u+".")}}})},_field_select:function(h){var j=[];for(var k in h.options){var l=h.options[k];j.push({label:l[0],value:l[1]})}var m=g.Select;switch(h.display){case"checkboxes":m=g.Checkbox;break;case"radio":m=g.RadioButton;break}return new m.View({id:"field-"+h.id,data:j,multiple:h.multiple})},_field_data_colum:function(h){return new g.Select.View({id:"field-"+h.id,multiple:h.multiple})},_field_text:function(h){return new g.Input({id:"field-"+h.id,area:h.area})},_field_slider:function(h){var i=1;if(h.type=="float"){i=(h.max-h.min)/10000}return new g.Slider.View({id:"field-"+h.id,min:h.min||0,max:h.max||1000,step:i})},_field_hidden:function(h){return new g.Hidden({id:"field-"+h.id})},_field_boolean:function(h){return new g.RadioButton.View({id:"field-"+h.id,data:[{label:"Yes",value:"true"},{label:"No",value:"false"}]})}});return{View:f}});
\ No newline at end of file
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/scripts/packed/mvc/ui/ui-frames.js
--- a/static/scripts/packed/mvc/ui/ui-frames.js
+++ b/static/scripts/packed/mvc/ui/ui-frames.js
@@ -1,1 +1,1 @@
-define([],function(){var a=Backbone.View.extend({options:{frame:{cols:6,rows:3},rows:1000,cell:130,margin:5,scroll:5,top_min:40,frame_max:9,visible:true,onchange:null},cols:0,top:0,top_max:0,frame_z:0,frame_counter:0,frame_counter_id:0,frame_list:[],frame_shadow:null,visible:null,initialize:function(c){var b=this;if(c){this.options=_.defaults(c,this.options)}this.visible=this.options.visible;this.top=this.top_max=this.options.top_min;this.setElement(this._template());$(this.el).append(this._template_background());$(this.el).append(this._template_menu());$(this.el_main).append($(this.el));var d="#frame-shadow";$(this.el).append(this._template_shadow(d.substring(1)));this.frame_shadow={id:d,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};this._frame_resize(this.frame_shadow,{width:0,height:0});this.frame_list[d]=this.frame_shadow;this._panel_refresh();if(!this.visible){this.hide()}else{this.show()}var b=this;$(window).resize(function(){if(b.visible){b._panel_refresh()}})},add:function(c){var f={title:"",content:null,target:"",type:null};if(c){c=_.defaults(c,f)}else{c=f}if(!c.content){return}if(this.frame_counter>=this.options.frame_max){alert("You have reached the maximum number of allowed frames ("+this.options.frame_max+").");return}var d="#frame-"+(this.frame_counter_id++);if($(d).length!==0){alert("This frame already exists. This page might contain multiple frame managers.");return}this.top=this.options.top_min;var e=null;if(c.type==="url"){e=$(this._template_frame_url(d.substring(1),c.title,c.content))}else{if(c.type==="other"){e=$(this._template_frame(d.substring(1),c.title));var b=e.find(".f-content");if(_.isFunction(c.content)){c.content(b)}else{b.append(c.content)}}}$(this.el).append(e);var g={id:d,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};c.width=this._to_pixel_coord("width",this.options.frame.cols);c.height=this._to_pixel_coord("height",this.options.frame.rows);this.frame_z=parseInt($(g.id).css("z-index"));this.frame_list[d]=g;this.frame_counter++;this._frame_resize(g,{width:c.width,height:c.height});this._frame_insert(g,{top:0,left:0},true);if(!this.visible){this.show()}},show:function(){this.visible=true;this.$el.find(".frame").fadeIn("fast");this.$el.find(this.frame_shadow.id).hide();this.$el.find(".frame-background").show();this._panel_refresh();this._menu_refresh()},hide:function(){if(this.event.type!==null){return}this.visible=false;this.$el.find(".frame").fadeOut("fast");this.$el.find(".frame-background").hide();this.$el.find(".frame-menu").hide();this._menu_refresh()},length:function(){return this.frame_counter},setOnChange:function(b){this.options.onchange=b},event:{type:null,target:null,xy:null},events:{mousemove:"_event_frame_mouse_move",mouseup:"_event_frame_mouse_up",mouseleave:"_event_frame_mouse_up",mousewheel:"_event_panel_scroll",DOMMouseScroll:"_event_panel_scroll","mousedown .frame":"_event_frame_mouse_down","mousedown .frame-background":"_event_hide","mousedown .frame-scroll-up":"_event_panel_scroll_up","mousedown .frame-scroll-down":"_event_panel_scroll_down","mousedown .f-close":"_event_frame_close","mousedown .f-pin":"_event_frame_lock"},_event_frame_mouse_down:function(b){if(this.event.type!==null){return}if($(b.target).hasClass("f-header")||$(b.target).hasClass("f-title")){this.event.type="drag"}if($(b.target).hasClass("f-resize")){this.event.type="resize"}if(this.event.type===null){return}b.preventDefault();this.event.target=this._frame_identify(b.target);if(this.event.target.grid_lock){this.event.type=null;return}this.event.xy={x:b.originalEvent.pageX,y:b.originalEvent.pageY};this._frame_drag_start(this.event.target)},_event_frame_mouse_move:function(h){if(this.event.type!="drag"&&this.event.type!="resize"){return}var f={x:h.originalEvent.pageX,y:h.originalEvent.pageY};var c={x:f.x-this.event.xy.x,y:f.y-this.event.xy.y};this.event.xy=f;var g=this._frame_screen(this.event.target);if(this.event.type=="resize"){g.width+=c.x;g.height+=c.y;var d=this.options.cell-this.options.margin-1;g.width=Math.max(g.width,d);g.height=Math.max(g.height,d);this._frame_resize(this.event.target,g);g.width=this._to_grid_coord("width",g.width)+1;g.height=this._to_grid_coord("height",g.height)+1;g.width=this._to_pixel_coord("width",g.width);g.height=this._to_pixel_coord("height",g.height);this._frame_resize(this.frame_shadow,g);this._frame_insert(this.frame_shadow,{top:this._to_grid_coord("top",g.top),left:this._to_grid_coord("left",g.left)})}if(this.event.type=="drag"){g.left+=c.x;g.top+=c.y;this._frame_offset(this.event.target,g);var b={top:this._to_grid_coord("top",g.top),left:this._to_grid_coord("left",g.left)};if(b.left!==0){b.left++}this._frame_insert(this.frame_shadow,b)}},_event_frame_mouse_up:function(b){if(this.event.type!="drag"&&this.event.type!="resize"){return}this._frame_drag_stop(this.event.target);this.event.type=null},_event_frame_close:function(c){if(this.event.type!==null){return}c.preventDefault();var d=this._frame_identify(c.target);var b=this;$(d.id).fadeOut("fast",function(){$(d.id).remove();delete b.frame_list[d.id];b.frame_counter--;b._panel_refresh(true);b._panel_animation_complete();if(b.visible&&b.frame_counter==0){b.hide()}})},_event_frame_lock:function(b){if(this.event.type!==null){return}b.preventDefault();var c=this._frame_identify(b.target);if(c.grid_lock){c.grid_lock=false;$(c.id).find(".f-pin").removeClass("toggle");$(c.id).find(".f-header").removeClass("f-not-allowed");$(c.id).find(".f-title").removeClass("f-not-allowed");$(c.id).find(".f-resize").show();$(c.id).find(".f-close").show()}else{c.grid_lock=true;$(c.id).find(".f-pin").addClass("toggle");$(c.id).find(".f-header").addClass("f-not-allowed");$(c.id).find(".f-title").addClass("f-not-allowed");$(c.id).find(".f-resize").hide();$(c.id).find(".f-close").hide()}},_event_hide:function(b){if(this.event.type!==null){return}this.hide()},_event_panel_scroll:function(b){if(this.event.type!==null||!this.visible){return}var c=$(b.srcElement).parents(".frame");if(c.length!==0){b.stopPropagation();return}b.preventDefault();var d=b.originalEvent.detail?b.originalEvent.detail:b.originalEvent.wheelDelta/-3;this._panel_scroll(d)},_event_panel_scroll_up:function(b){if(this.event.type!==null){return}b.preventDefault();this._panel_scroll(-this.options.scroll)},_event_panel_scroll_down:function(b){if(this.event.type!==null){return}b.preventDefault();this._panel_scroll(this.options.scroll)},_frame_identify:function(b){return this.frame_list["#"+$(b).closest(".frame").attr("id")]},_frame_drag_start:function(c){this._frame_focus(c,true);var b=this._frame_screen(c);this._frame_resize(this.frame_shadow,b);this._frame_grid(this.frame_shadow,c.grid_location);c.grid_location=null;$(this.frame_shadow.id).show();$(".f-cover").show()},_frame_drag_stop:function(c){this._frame_focus(c,false);var b=this._frame_screen(this.frame_shadow);this._frame_resize(c,b);this._frame_grid(c,this.frame_shadow.grid_location,true);this.frame_shadow.grid_location=null;$(this.frame_shadow.id).hide();$(".f-cover").hide();this._panel_animation_complete()},_to_grid_coord:function(d,c){var b=(d=="width"||d=="height")?1:-1;if(d=="top"){c-=this.top}return parseInt((c+b*this.options.margin)/this.options.cell,10)},_to_pixel_coord:function(d,e){var b=(d=="width"||d=="height")?1:-1;var c=(e*this.options.cell)-b*this.options.margin;if(d=="top"){c+=this.top}return c},_to_grid:function(b){return{top:this._to_grid_coord("top",b.top),left:this._to_grid_coord("left",b.left),width:this._to_grid_coord("width",b.width),height:this._to_grid_coord("height",b.height)}},_to_pixel:function(b){return{top:this._to_pixel_coord("top",b.top),left:this._to_pixel_coord("left",b.left),width:this._to_pixel_coord("width",b.width),height:this._to_pixel_coord("height",b.height)}},_is_collision:function(d){function b(g,f){return !(g.left>f.left+f.width-1||g.left+g.width-1<f.left||g.top>f.top+f.height-1||g.top+g.height-1<f.top)}for(var c in this.frame_list){var e=this.frame_list[c];if(e.grid_location===null){continue}if(b(d,e.grid_location)){return true}}return false},_location_rank:function(b){return(b.top*this.cols)+b.left},_menu_refresh:function(){if(this.visible){if(this.top==this.options.top_min){$(".frame-scroll-up").hide()}else{$(".frame-scroll-up").show()}if(this.top==this.top_max){$(".frame-scroll-down").hide()}else{$(".frame-scroll-down").show()}}if(this.options.onchange){this.options.onchange()}},_panel_animation_complete:function(){var b=this;$(".frame").promise().done(function(){b._panel_scroll(0,true)})},_panel_refresh:function(b){this.cols=parseInt($(window).width()/this.options.cell,10)+1;this._frame_insert(null,null,b)},_panel_scroll:function(g,b){var d=this.top-this.options.scroll*g;d=Math.max(d,this.top_max);d=Math.min(d,this.options.top_min);if(this.top!=d){for(var c in this.frame_list){var f=this.frame_list[c];if(f.grid_location!==null){var e={top:f.screen_location.top-(this.top-d),left:f.screen_location.left};this._frame_offset(f,e,b)}}this.top=d}this._menu_refresh()},_frame_insert:function(h,b,d){var c=[];if(h){h.grid_location=null;c.push([h,this._location_rank(b)])}var e=null;for(e in this.frame_list){var g=this.frame_list[e];if(g.grid_location!==null&&!g.grid_lock){g.grid_location=null;c.push([g,g.grid_rank])}}c.sort(function(k,f){var m=k[1];var l=f[1];return m<l?-1:(m>l?1:0)});for(e=0;e<c.length;e++){this._frame_place(c[e][0],d)}this.top_max=0;for(var e in this.frame_list){var h=this.frame_list[e];if(h.grid_location!==null){this.top_max=Math.max(this.top_max,h.grid_location.top+h.grid_location.height)}}this.top_max=$(window).height()-this.top_max*this.options.cell-2*this.options.margin;this.top_max=Math.min(this.top_max,this.options.top_min);this._menu_refresh()},_frame_place:function(h,c){h.grid_location=null;var f=this._to_grid(this._frame_screen(h));var b=false;for(var e=0;e<this.options.rows;e++){for(var d=0;d<Math.max(1,this.cols-f.width);d++){f.top=e;f.left=d;if(!this._is_collision(f)){b=true;break}}if(b){break}}if(b){this._frame_grid(h,f,c)}else{console.log("Grid dimensions exceeded.")}},_frame_focus:function(d,b){var c=this.frame_z+(b?1:0);$(d.id).css("z-index",c)},_frame_offset:function(e,d,c){e.screen_location.left=d.left;e.screen_location.top=d.top;if(c){this._frame_focus(e,true);var b=this;$(e.id).animate({top:d.top,left:d.left},"fast",function(){b._frame_focus(e,false)})}else{$(e.id).css({top:d.top,left:d.left})}},_frame_resize:function(c,b){$(c.id).css({width:b.width,height:b.height});c.screen_location.width=b.width;c.screen_location.height=b.height},_frame_grid:function(d,b,c){d.grid_location=b;this._frame_offset(d,this._to_pixel(b),c);d.grid_rank=this._location_rank(b)},_frame_screen:function(c){var b=c.screen_location;return{top:b.top,left:b.left,width:b.width,height:b.height}},_template:function(){return'<div class="galaxy-frame"></div>'},_template_frame:function(c,b){if(!b){b=""}return'<div id="'+c+'" class="frame corner"><div class="f-header corner"><span class="f-title">'+b+'</span><span class="f-icon f-close fa fa-trash-o"></span><span class="f-icon f-pin fa fa-thumb-tack"></span></div><div class="f-content"><div class="f-cover"></div></div><span class="f-resize f-icon corner fa fa-resize-full"></span></div>'},_template_frame_url:function(e,d,b){if(b.indexOf("?")==-1){b+="?"}else{b+="&"}b+="widget=True";var c=$(this._template_frame(e,d));c.find(".f-content").append('<iframe scrolling="auto" class="f-iframe" src="'+b+'"></iframe>');return c},_template_shadow:function(b){return'<div id="'+b+'" class="frame-shadow corner"></div>'},_template_background:function(){return'<div class="frame-background"></div>'},_template_menu:function(){return'<div class="frame-scroll-up frame-menu fa fa-chevron-up fa-2x"></div><div class="frame-scroll-down frame-menu fa fa-chevron-down fa-2x"></div>'}});return{View:a}});
\ No newline at end of file
+define([],function(){var a=Backbone.View.extend({options:{frame:{cols:6,rows:3},rows:1000,cell:130,margin:5,scroll:5,top_min:40,frame_max:9,visible:true,onchange:null},cols:0,top:0,top_max:0,frame_z:0,frame_counter:0,frame_counter_id:0,frame_list:[],frame_shadow:null,visible:null,initialize:function(c){var b=this;if(c){this.options=_.defaults(c,this.options)}this.visible=this.options.visible;this.top=this.top_max=this.options.top_min;this.setElement(this._template());$(this.el).append(this._templateBackground());$(this.el).append(this._templateMenu());$(this.el_main).append($(this.el));var d="#frame-shadow";$(this.el).append(this._templateShadow(d.substring(1)));this.frame_shadow={id:d,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};this._frameResize(this.frame_shadow,{width:0,height:0});this.frame_list[d]=this.frame_shadow;this._panelRefresh();if(!this.visible){this.hide()}else{this.show()}var b=this;$(window).resize(function(){if(b.visible){b._panelRefresh()}})},add:function(c){var f={title:"",content:null,target:"",type:null};if(c){c=_.defaults(c,f)}else{c=f}if(!c.content){return}if(this.frame_counter>=this.options.frame_max){alert("You have reached the maximum number of allowed frames ("+this.options.frame_max+").");return}var d="#frame-"+(this.frame_counter_id++);if($(d).length!==0){alert("This frame already exists. This page might contain multiple frame managers.");return}this.top=this.options.top_min;var e=null;if(c.type==="url"){e=$(this._templateFrameUrl(d.substring(1),c.title,c.content))}else{if(c.type==="other"){e=$(this._templateFrame(d.substring(1),c.title));var b=e.find(".f-content");if(_.isFunction(c.content)){c.content(b)}else{b.append(c.content)}}}$(this.el).append(e);var g={id:d,screen_location:{},grid_location:{},grid_rank:null,grid_lock:false};c.width=this._toPixelCoord("width",this.options.frame.cols);c.height=this._toPixelCoord("height",this.options.frame.rows);this.frame_z=parseInt($(g.id).css("z-index"));this.frame_list[d]=g;this.frame_counter++;this._frameResize(g,{width:c.width,height:c.height});this._frameInsert(g,{top:0,left:0},true);if(!this.visible){this.show()}},show:function(){this.visible=true;this.$el.find(".frame").fadeIn("fast");this.$el.find(this.frame_shadow.id).hide();this.$el.find(".frame-background").show();this._panelRefresh();this._menuRefresh()},hide:function(){if(this.event.type!==null){return}this.visible=false;this.$el.find(".frame").fadeOut("fast");this.$el.find(".frame-background").hide();this.$el.find(".frame-menu").hide();this._menuRefresh()},length:function(){return this.frame_counter},setOnChange:function(b){this.options.onchange=b},event:{type:null,target:null,xy:null},events:{mousemove:"_eventFrameMouseMove",mouseup:"_eventFrameMouseUp",mouseleave:"_eventFrameMouseUp",mousewheel:"_eventPanelScroll",DOMMouseScroll:"_eventPanelScroll","mousedown .frame":"_eventFrameMouseDown","mousedown .frame-background":"_eventHide","mousedown .frame-scroll-up":"_eventPanelScroll_up","mousedown .frame-scroll-down":"_eventPanelScroll_down","mousedown .f-close":"_eventFrameClose","mousedown .f-pin":"_eventFrameLock"},_eventFrameMouseDown:function(b){if(this.event.type!==null){return}if($(b.target).hasClass("f-header")||$(b.target).hasClass("f-title")){this.event.type="drag"}if($(b.target).hasClass("f-resize")){this.event.type="resize"}if(this.event.type===null){return}b.preventDefault();this.event.target=this._frameIdentify(b.target);if(this.event.target.grid_lock){this.event.type=null;return}this.event.xy={x:b.originalEvent.pageX,y:b.originalEvent.pageY};this._frameDragStart(this.event.target)},_eventFrameMouseMove:function(h){if(this.event.type!="drag"&&this.event.type!="resize"){return}var f={x:h.originalEvent.pageX,y:h.originalEvent.pageY};var c={x:f.x-this.event.xy.x,y:f.y-this.event.xy.y};this.event.xy=f;var g=this._frameScreen(this.event.target);if(this.event.type=="resize"){g.width+=c.x;g.height+=c.y;var d=this.options.cell-this.options.margin-1;g.width=Math.max(g.width,d);g.height=Math.max(g.height,d);this._frameResize(this.event.target,g);g.width=this._toGridCoord("width",g.width)+1;g.height=this._toGridCoord("height",g.height)+1;g.width=this._toPixelCoord("width",g.width);g.height=this._toPixelCoord("height",g.height);this._frameResize(this.frame_shadow,g);this._frameInsert(this.frame_shadow,{top:this._toGridCoord("top",g.top),left:this._toGridCoord("left",g.left)})}if(this.event.type=="drag"){g.left+=c.x;g.top+=c.y;this._frameOffset(this.event.target,g);var b={top:this._toGridCoord("top",g.top),left:this._toGridCoord("left",g.left)};if(b.left!==0){b.left++}this._frameInsert(this.frame_shadow,b)}},_eventFrameMouseUp:function(b){if(this.event.type!="drag"&&this.event.type!="resize"){return}this._frameDragStop(this.event.target);this.event.type=null},_eventFrameClose:function(c){if(this.event.type!==null){return}c.preventDefault();var d=this._frameIdentify(c.target);var b=this;$(d.id).fadeOut("fast",function(){$(d.id).remove();delete b.frame_list[d.id];b.frame_counter--;b._panelRefresh(true);b._panelAnimationComplete();if(b.visible&&b.frame_counter==0){b.hide()}})},_eventFrameLock:function(b){if(this.event.type!==null){return}b.preventDefault();var c=this._frameIdentify(b.target);if(c.grid_lock){c.grid_lock=false;$(c.id).find(".f-pin").removeClass("toggle");$(c.id).find(".f-header").removeClass("f-not-allowed");$(c.id).find(".f-title").removeClass("f-not-allowed");$(c.id).find(".f-resize").show();$(c.id).find(".f-close").show()}else{c.grid_lock=true;$(c.id).find(".f-pin").addClass("toggle");$(c.id).find(".f-header").addClass("f-not-allowed");$(c.id).find(".f-title").addClass("f-not-allowed");$(c.id).find(".f-resize").hide();$(c.id).find(".f-close").hide()}},_eventHide:function(b){if(this.event.type!==null){return}this.hide()},_eventPanelScroll:function(b){if(this.event.type!==null||!this.visible){return}var c=$(b.srcElement).parents(".frame");if(c.length!==0){b.stopPropagation();return}b.preventDefault();var d=b.originalEvent.detail?b.originalEvent.detail:b.originalEvent.wheelDelta/-3;this._panelScroll(d)},_eventPanelScroll_up:function(b){if(this.event.type!==null){return}b.preventDefault();this._panelScroll(-this.options.scroll)},_eventPanelScroll_down:function(b){if(this.event.type!==null){return}b.preventDefault();this._panelScroll(this.options.scroll)},_frameIdentify:function(b){return this.frame_list["#"+$(b).closest(".frame").attr("id")]},_frameDragStart:function(c){this._frameFocus(c,true);var b=this._frameScreen(c);this._frameResize(this.frame_shadow,b);this._frameGrid(this.frame_shadow,c.grid_location);c.grid_location=null;$(this.frame_shadow.id).show();$(".f-cover").show()},_frameDragStop:function(c){this._frameFocus(c,false);var b=this._frameScreen(this.frame_shadow);this._frameResize(c,b);this._frameGrid(c,this.frame_shadow.grid_location,true);this.frame_shadow.grid_location=null;$(this.frame_shadow.id).hide();$(".f-cover").hide();this._panelAnimationComplete()},_toGridCoord:function(d,c){var b=(d=="width"||d=="height")?1:-1;if(d=="top"){c-=this.top}return parseInt((c+b*this.options.margin)/this.options.cell,10)},_toPixelCoord:function(d,e){var b=(d=="width"||d=="height")?1:-1;var c=(e*this.options.cell)-b*this.options.margin;if(d=="top"){c+=this.top}return c},_toGrid:function(b){return{top:this._toGridCoord("top",b.top),left:this._toGridCoord("left",b.left),width:this._toGridCoord("width",b.width),height:this._toGridCoord("height",b.height)}},_toPixel:function(b){return{top:this._toPixelCoord("top",b.top),left:this._toPixelCoord("left",b.left),width:this._toPixelCoord("width",b.width),height:this._toPixelCoord("height",b.height)}},_isCollision:function(d){function b(g,f){return !(g.left>f.left+f.width-1||g.left+g.width-1<f.left||g.top>f.top+f.height-1||g.top+g.height-1<f.top)}for(var c in this.frame_list){var e=this.frame_list[c];if(e.grid_location===null){continue}if(b(d,e.grid_location)){return true}}return false},_locationRank:function(b){return(b.top*this.cols)+b.left},_menuRefresh:function(){if(this.visible){if(this.top==this.options.top_min){$(".frame-scroll-up").hide()}else{$(".frame-scroll-up").show()}if(this.top==this.top_max){$(".frame-scroll-down").hide()}else{$(".frame-scroll-down").show()}}if(this.options.onchange){this.options.onchange()}},_panelAnimationComplete:function(){var b=this;$(".frame").promise().done(function(){b._panelScroll(0,true)})},_panelRefresh:function(b){this.cols=parseInt($(window).width()/this.options.cell,10)+1;this._frameInsert(null,null,b)},_panelScroll:function(g,b){var d=this.top-this.options.scroll*g;d=Math.max(d,this.top_max);d=Math.min(d,this.options.top_min);if(this.top!=d){for(var c in this.frame_list){var f=this.frame_list[c];if(f.grid_location!==null){var e={top:f.screen_location.top-(this.top-d),left:f.screen_location.left};this._frameOffset(f,e,b)}}this.top=d}this._menuRefresh()},_frameInsert:function(h,b,d){var c=[];if(h){h.grid_location=null;c.push([h,this._locationRank(b)])}var e=null;for(e in this.frame_list){var g=this.frame_list[e];if(g.grid_location!==null&&!g.grid_lock){g.grid_location=null;c.push([g,g.grid_rank])}}c.sort(function(k,f){var m=k[1];var l=f[1];return m<l?-1:(m>l?1:0)});for(e=0;e<c.length;e++){this._framePlace(c[e][0],d)}this.top_max=0;for(var e in this.frame_list){var h=this.frame_list[e];if(h.grid_location!==null){this.top_max=Math.max(this.top_max,h.grid_location.top+h.grid_location.height)}}this.top_max=$(window).height()-this.top_max*this.options.cell-2*this.options.margin;this.top_max=Math.min(this.top_max,this.options.top_min);this._menuRefresh()},_framePlace:function(h,c){h.grid_location=null;var f=this._toGrid(this._frameScreen(h));var b=false;for(var e=0;e<this.options.rows;e++){for(var d=0;d<Math.max(1,this.cols-f.width);d++){f.top=e;f.left=d;if(!this._isCollision(f)){b=true;break}}if(b){break}}if(b){this._frameGrid(h,f,c)}else{console.log("Grid dimensions exceeded.")}},_frameFocus:function(d,b){var c=this.frame_z+(b?1:0);$(d.id).css("z-index",c)},_frameOffset:function(e,d,c){e.screen_location.left=d.left;e.screen_location.top=d.top;if(c){this._frameFocus(e,true);var b=this;$(e.id).animate({top:d.top,left:d.left},"fast",function(){b._frameFocus(e,false)})}else{$(e.id).css({top:d.top,left:d.left})}},_frameResize:function(c,b){$(c.id).css({width:b.width,height:b.height});c.screen_location.width=b.width;c.screen_location.height=b.height},_frameGrid:function(d,b,c){d.grid_location=b;this._frameOffset(d,this._toPixel(b),c);d.grid_rank=this._locationRank(b)},_frameScreen:function(c){var b=c.screen_location;return{top:b.top,left:b.left,width:b.width,height:b.height}},_template:function(){return'<div class="galaxy-frame"></div>'},_templateFrame:function(c,b){if(!b){b=""}return'<div id="'+c+'" class="frame corner"><div class="f-header corner"><span class="f-title">'+b+'</span><span class="f-icon f-close fa fa-trash-o"></span><span class="f-icon f-pin fa fa-thumb-tack"></span></div><div class="f-content"><div class="f-cover"></div></div><span class="f-resize f-icon corner fa fa-resize-full"></span></div>'},_templateFrameUrl:function(e,d,b){if(b.indexOf("?")==-1){b+="?"}else{b+="&"}b+="widget=True";var c=$(this._templateFrame(e,d));c.find(".f-content").append('<iframe scrolling="auto" class="f-iframe" src="'+b+'"></iframe>');return c},_templateShadow:function(b){return'<div id="'+b+'" class="frame-shadow corner"></div>'},_templateBackground:function(){return'<div class="frame-background"></div>'},_templateMenu:function(){return'<div class="frame-scroll-up frame-menu fa fa-chevron-up fa-2x"></div><div class="frame-scroll-down frame-menu fa fa-chevron-down fa-2x"></div>'}});return{View:a}});
\ No newline at end of file
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1278,6 +1278,8 @@
.upload-ftp .upload-ftp-help{margin-bottom:10px}
.upload-ftp .upload-ftp-warning{text-align:center;margin-top:20px}
.upload-settings .upload-settings-cover{position:absolute;width:100%;height:100%;top:0px;left:0px;background:#fff;opacity:0.4;cursor:no-drop}
+.ui-table-row-error{-moz-border-radius:3px;border-radius:3px;background:#f9c7c5;padding:5px}
+.ui-table-form-error{display:none}.ui-table-form-error .ui-table-form-error-text{padding-left:5px}
.ui-form-slider .ui-form-slider-text{width:100px;float:left}
.ui-form-slider .ui-form-slider-element{width:calc(100% - 110px);float:left;top:8px;left:10px}
.ui-radiobutton label{height:23px;line-height:1em}
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/style/src/less/frame.less
--- a/static/style/src/less/frame.less
+++ b/static/style/src/less/frame.less
@@ -1,18 +1,14 @@
-.galaxy-frame
-{
- .corner
- {
+.galaxy-frame{
+ .corner {
-moz-border-radius: @border-radius-large;
border-radius: @border-radius-large;
}
- .toggle
- {
+ .toggle {
color: gold;
}
- .frame-background
- {
+ .frame-background {
z-index : @zindex-navbar;
position : absolute;
display : none;
@@ -25,8 +21,7 @@
overflow : auto;
}
- .frame-shadow
- {
+ .frame-shadow {
z-index : @zindex-navbar + 1;
position : absolute;
display : none;
@@ -40,8 +35,7 @@
/*
panel menu button
*/
- .frame-menu
- {
+ .frame-menu {
z-index : @zindex-navbar + 5;
position : absolute;
cursor : pointer;
@@ -49,13 +43,11 @@
right : 10px;
}
- .frame-scroll-up
- {
+ .frame-scroll-up {
top : 50px;
}
- .frame-scroll-down
- {
+ .frame-scroll-down {
bottom : 20px;
}
@@ -63,8 +55,7 @@
frame components
*/
- .frame
- {
+ .frame {
z-index : @zindex-navbar + 2;
overflow : hidden;
position : absolute;
@@ -72,8 +63,7 @@
border : 1px solid @navbar-default-border;
-webkit-box-shadow: 0 0 5px rgba(0,0,0,0.3);
- .f-content
- {
+ .f-content{
position : absolute;
overflow : hidden;
background : @white;
@@ -84,8 +74,7 @@
right : 3px;
}
- .f-cover
- {
+ .f-cover{
position : absolute;
display : none;
top : 0px;
@@ -96,15 +85,13 @@
background : @white;
}
- .f-iframe
- {
+ .f-iframe{
border : none;
width : 100%;
height : 100%;
}
- .f-header
- {
+ .f-header{
height : 17px;
margin : 2px;
cursor : pointer;
@@ -113,8 +100,7 @@
color : @white;
}
- .f-title
- {
+ .f-title {
position : absolute;
top : 2px;
left : 16px;
@@ -128,32 +114,27 @@
frame icons
*/
- .f-icon
- {
+ .f-icon{
position : absolute;
cursor : pointer;
font-size : 14px;
}
- .f-not-allowed
- {
+ .f-not-allowed{
cursor : not-allowed;
}
- .f-close
- {
+ .f-close{
right : 5px;
top : 3px;
}
- .f-pin
- {
+ .f-pin{
left : 6px;
top : 3px;
}
- .f-resize
- {
+ .f-resize{
right : 0px;
bottom : 0px;
background : @white;
diff -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 -r deb003947e0e02de4613912b14832944954576a5 static/style/src/less/ui.less
--- a/static/style/src/less/ui.less
+++ b/static/style/src/less/ui.less
@@ -1,3 +1,17 @@
+.ui-table-row-error {
+ -moz-border-radius: @border-radius-base;
+ border-radius: @border-radius-base;
+ background: @state-danger-bg;
+ padding: 5px;
+}
+
+.ui-table-form-error {
+ display: none;
+ .ui-table-form-error-text {
+ padding-left: 5px;
+ }
+}
+
.ui-form-slider {
.ui-form-slider-text {
&:extend(.form-control);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
10 Sep '14
Branch: refs/heads/master
Home: https://github.com/galaxyproject/usegalaxy-playbook
Commit: 9b4c92fc5f5de88e73d91a4850e0a02af5681ca9
https://github.com/galaxyproject/usegalaxy-playbook/commit/9b4c92fc5f5de88e…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
A files/galaxy/test.galaxyproject.org/var/integrated_tool_panel.xml
A files/galaxy/usegalaxy.org/var/integrated_tool_panel.xml
A files/galaxy/usegalaxy.org/var/shed_data_manager_conf.xml
Log Message:
-----------
Add more mutable configs.
Commit: f86d4395ec4e603aa6dfb6d4706c864ac5856219
https://github.com/galaxyproject/usegalaxy-playbook/commit/f86d4395ec4e603a…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M stage/group_vars/pulsarservers.yml
M templates/galaxy/test.galaxyproject.org/config/job_conf.xml.j2
Log Message:
-----------
A few Pulsar fixes.
Compare: https://github.com/galaxyproject/usegalaxy-playbook/compare/adb284076c7c...…
1
0
[galaxyproject/usegalaxy-playbook] 6e84ae: Distribute integrated_tool_panel.xml
by GitHub 10 Sep '14
by GitHub 10 Sep '14
10 Sep '14
Branch: refs/heads/master
Home: https://github.com/galaxyproject/usegalaxy-playbook
Commit: 6e84aeb5fece00bcfa7e5e3df70a82da105d13e7
https://github.com/galaxyproject/usegalaxy-playbook/commit/6e84aeb5fece00bc…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M roles/usegalaxy/tasks/main.yml
Log Message:
-----------
Distribute integrated_tool_panel.xml
Commit: 50b76df7b812a6ce240dc8ff1f8e1536ead0a73e
https://github.com/galaxyproject/usegalaxy-playbook/commit/50b76df7b812a6ce…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M README.md
Log Message:
-----------
Notes on pulsar dependencies.
Commit: adb284076c7c626dc192fce7b02069a8965e4809
https://github.com/galaxyproject/usegalaxy-playbook/commit/adb284076c7c626d…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M files/galaxy/test.galaxyproject.org/var/shed_tool_conf.xml
M files/galaxy/test.galaxyproject.org/var/shed_tool_data_table_conf.xml
Log Message:
-----------
Install bamtools on Test.
Compare: https://github.com/galaxyproject/usegalaxy-playbook/compare/a0a3a459d10c...…
1
0
[galaxyproject/usegalaxy-playbook] 902e01: I accidentally a word in the downtime banner (than...
by GitHub 10 Sep '14
by GitHub 10 Sep '14
10 Sep '14
Branch: refs/heads/master
Home: https://github.com/galaxyproject/usegalaxy-playbook
Commit: 902e019be3c29da5d60ddfda43d036f6944f62b5
https://github.com/galaxyproject/usegalaxy-playbook/commit/902e019be3c29da5…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M production/group_vars/galaxyservers.yml
M stage/group_vars/galaxyservers.yml
Log Message:
-----------
I accidentally a word in the downtime banner (thanks @martenson).
Commit: a0a3a459d10ce80ebd380c25a8ac09d9958b7700
https://github.com/galaxyproject/usegalaxy-playbook/commit/a0a3a459d10ce80e…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M production/group_vars/all.yml
Log Message:
-----------
Upgrade Main
Compare: https://github.com/galaxyproject/usegalaxy-playbook/compare/4c1d5fce940b...…
1
0
Branch: refs/heads/master
Home: https://github.com/galaxyproject/usegalaxy-playbook
Commit: 4c1d5fce940b9d23dfc23f88291f5ed20fffaa6c
https://github.com/galaxyproject/usegalaxy-playbook/commit/4c1d5fce940b9d23…
Author: Nate Coraor <nate(a)bx.psu.edu>
Date: 2014-09-10 (Wed, 10 Sep 2014)
Changed paths:
M stage/group_vars/all.yml
Log Message:
-----------
Update Test.
1
0
commit/galaxy-central: natefoo: Fix references to config.ucsc_display_sites that I missed.
by commits-noreply@bitbucket.org 10 Sep '14
by commits-noreply@bitbucket.org 10 Sep '14
10 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d1f6d05706d0/
Changeset: d1f6d05706d0
User: natefoo
Date: 2014-09-10 20:37:33
Summary: Fix references to config.ucsc_display_sites that I missed.
Affected #: 6 files
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml
+++ b/display_applications/ucsc/bam.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($bam_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/bigbed.xml
--- a/display_applications/ucsc/bigbed.xml
+++ b/display_applications/ucsc/bigbed.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($bigbed_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/bigwig.xml
--- a/display_applications/ucsc/bigwig.xml
+++ b/display_applications/ucsc/bigwig.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($bigwig_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($bed_file.dbkey)}&position=${position.qp}&hgt.customText=${bed_file.qp}</url>
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/maf_customtrack.xml
--- a/display_applications/ucsc/maf_customtrack.xml
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($maf_customtrack_file.dbkey)}&position=${maf_customtrack_file.metadata.vp_chromosome}:${maf_customtrack_file.metadata.vp_start}-${maf_customtrack_file.metadata.vp_end}&hgt.customText=${maf_customtrack_file.qp}</url>
diff -r f68f8c8f147b863ec968475216cc9a72044a722b -r d1f6d05706d0f03c7a31cd5f8b3890c55e565af0 display_applications/ucsc/vcf.xml
--- a/display_applications/ucsc/vcf.xml
+++ b/display_applications/ucsc/vcf.xml
@@ -6,7 +6,7 @@
<dynamic_param name="ucsc_link" value="1"/><dynamic_param name="builds" value="2" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.ucsc_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('ucsc')}</filter><filter>${dataset.dbkey in $builds}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name --><url>${ucsc_link}db=${qp($bgzip_file.dbkey)}&hgt.customText=${qp($track.url)}</url>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/fa92c5497232/
Changeset: fa92c5497232
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: Synchronize validation of workflows between web and API controllers.
Reduces code duplication and does more correct checking of workflow step replacement parameters. More parameter checking functional tests.
Affected #: 6 files
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c lib/galaxy/workflow/run.py
--- a/lib/galaxy/workflow/run.py
+++ b/lib/galaxy/workflow/run.py
@@ -231,10 +231,10 @@
# Build the state for each step
module_injector = modules.WorkflowModuleInjector( self.trans )
for step in self.workflow.steps:
- step_errors = module_injector.inject( step )
+ step_args = self.param_map.get( step.id, {} )
+ step_errors = module_injector.inject( step, step_args=step_args )
if step.type == 'tool' or step.type is None:
- _update_step_parameters( step, self.param_map )
- if step.tool_errors:
+ if step_errors:
message = "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
raise exceptions.MessageException( message )
if step.upgrade_messages:
@@ -242,10 +242,4 @@
raise exceptions.MessageException( message )
-def _update_step_parameters(step, normalized_param_map):
- param_dict = normalized_param_map.get(step.id, {})
- if param_dict:
- step.state.inputs.update(param_dict)
-
-
__all__ = [ invoke, WorkflowRunConfig ]
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c test/api/helpers.py
--- a/test/api/helpers.py
+++ b/test/api/helpers.py
@@ -131,8 +131,9 @@
elif "dataset" in kwds:
dataset_id = kwds[ "dataset" ][ "id" ]
else:
+ hid = kwds.get( "hid", -1 ) # If not hid, just grab last dataset
dataset_contents = self.galaxy_interactor.get( contents_url ).json()
- dataset_id = dataset_contents[ -1 ][ "id" ]
+ dataset_id = dataset_contents[ hid ][ "id" ]
display_response = self.galaxy_interactor.get( "%s/%s/display" % ( contents_url, dataset_id ) )
assert display_response.status_code == 200
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c test/api/test_tools.py
--- a/test/api/test_tools.py
+++ b/test/api/test_tools.py
@@ -109,6 +109,15 @@
output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
self.assertEqual( output1_content.strip(), "Cat1Test" )
+ @skip_without_tool( "validation_default" )
+ def test_validation( self ):
+ history_id = self.dataset_populator.new_history()
+ inputs = {
+ 'select_param': "\" ; echo \"moo",
+ }
+ response = self._run( "validation_default", history_id, inputs )
+ self._assert_status_code_is( response, 400 )
+
@skip_without_tool( "cat1" )
def test_run_cat1_with_two_inputs( self ):
# Run tool with an multiple data parameter and grouping (repeat)
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c test/api/test_workflow_validation_1.ga
--- a/test/api/test_workflow_validation_1.ga
+++ b/test/api/test_workflow_validation_1.ga
@@ -23,7 +23,7 @@
"post_job_actions": {},
"tool_errors": null,
"tool_id": "validation_default",
- "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"\\\"cow\\\"\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central/tool-data/shared/ucsc/chrom/?.len\\\"\"}",
+ "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"\\\"cow\\\"\", \"float_param\": \"8.0\", \"select_param\": \"\\\"opt1\\\"\", \"chromInfo\": \"\\\"/home/john/workspace/galaxy-central/tool-data/shared/ucsc/chrom/?.len\\\"\"}",
"tool_version": null,
"type": "tool",
"user_outputs": []
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c test/api/test_workflows.py
--- a/test/api/test_workflows.py
+++ b/test/api/test_workflows.py
@@ -480,18 +480,30 @@
@skip_without_tool( "validation_default" )
def test_parameter_substitution_validation( self ):
+ substitions = dict( input1="\" ; echo \"moo" )
+ run_workflow_response, history_id = self._run_validation_workflow_with_substitions( substitions )
+
+ self.dataset_populator.wait_for_history( history_id, assert_ok=True )
+ self.assertEquals("__dq__ X echo __dq__moo\n", self.dataset_populator.get_history_dataset_content( history_id, hid=1 ) )
+
+ @skip_without_tool( "validation_default" )
+ def test_parameter_substitution_validation_value_errors_1( self ):
+ substitions = dict( select_param="\" ; echo \"moo" )
+ run_workflow_response, history_id = self._run_validation_workflow_with_substitions( substitions )
+
+ self._assert_status_code_is( run_workflow_response, 400 )
+
+ def _run_validation_workflow_with_substitions( self, substitions ):
workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_validation_1" )
uploaded_workflow_id = self.workflow_populator.create_workflow( workflow )
history_id = self.dataset_populator.new_history()
workflow_request = dict(
history="hist_id=%s" % history_id,
workflow_id=uploaded_workflow_id,
- parameters=dumps( dict( validation_default=dict( input1="\" ; echo \"moo" ) ) )
+ parameters=dumps( dict( validation_default=substitions ) )
)
run_workflow_response = self._post( "workflows", data=workflow_request )
- self._assert_status_code_is( run_workflow_response, 200 )
- self.dataset_populator.wait_for_history( history_id, assert_ok=True )
- self.assertEquals("__dq__ X echo __dq__moo\n", self.dataset_populator.get_history_dataset_content( history_id ) )
+ return run_workflow_response, history_id
@skip_without_tool( "random_lines1" )
def test_run_replace_params_by_steps( self ):
diff -r d85d039b8eeb2781619683f62dddf0f4c20af773 -r fa92c5497232444ade0e7044184fb8ccf419177c test/functional/tools/validation_default.xml
--- a/test/functional/tools/validation_default.xml
+++ b/test/functional/tools/validation_default.xml
@@ -1,16 +1,27 @@
<tool id="validation_default" name="Validation (default)"><command>
echo "$input1" > out1;
+ echo $float_param > out2;
+ echo $select_param > out3;
</command><inputs>
- <param name="input1" type="text" label="Concatenate Dataset" />
+ <param name="input1" type="text" label="text input" />
+ <param name="float_param" type="float" label="float input" value="8.0" />
+ <param name="select_param" type="select" label="select_param">
+ <option value="opt1">Option 1</option>
+ <option value="opt2">Option 2</option>
+ </param></inputs><outputs><data name="out_file1" from_work_dir="out1" />
+ <data name="out_file2" from_work_dir="out2" />
+ <data name="out_file3" from_work_dir="out3" /></outputs><tests><test><param name="input1" value="" ; echo "moo" />
+ <param name="float_param" value="5" />
+ <param name="select_param" value="opt1" /><output name="out_file1"><assert_contents><has_line line="__dq__ X echo __dq__moo" />
https://bitbucket.org/galaxy/galaxy-central/commits/144af99ca953/
Changeset: 144af99ca953
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: More workflow module commenting.
This time add small comment about config form method.
Affected #: 1 file
diff -r fa92c5497232444ade0e7044184fb8ccf419177c -r 144af99ca9536f5924fae66c8fb372f150599f73 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -96,6 +96,9 @@
pass
def get_config_form( self ):
+ """ Render form that is embedded in workflow editor for modifying the
+ step state of a node.
+ """
raise TypeError( "Abstract method" )
def check_and_update_state( self ):
https://bitbucket.org/galaxy/galaxy-central/commits/51799404c770/
Changeset: 51799404c770
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: Unify runtime state handling methods across tool and input modules.
The abstract interface WorkflowModule declared three methods that WorkflowToolModule didn't implement and logic for updating tool state wasn't restricted to the module itself. This introduces a new compute_state method that unifies all of that and includes some documentation.
In addition to simplifing the interface new modules must adhere to, this allows eliminating the switch on module type when injecting trasient state attribute into steps - moving that logic into the module itself. In my opinion this makes WorkflowModule closer to being an abstract interface - allowing components to uniformly reason about workflow modules as a black box (and not dispatch on type attributes).
These changes did require adding add_dummy_datasets to the base class - and while the input modules don't need to utilize the ability to override this - my pause module downstream did so I think it makes sense to place on the interface.
Affected #: 1 file
diff -r 144af99ca9536f5924fae66c8fb372f150599f73 -r 51799404c770249f71ffd540c352011509f01329 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -108,14 +108,15 @@
"""
pass
+ def add_dummy_datasets( self, connections=None):
+ # Replaced connected inputs with DummyDataset values.
+ pass
+
## ---- Run time ---------------------------------------------------------
def get_runtime_inputs( self ):
raise TypeError( "Abstract method" )
- def get_runtime_state( self ):
- raise TypeError( "Abstract method" )
-
def encode_runtime_state( self, trans, state ):
""" Encode the runtime state (loaded from the stored step and
populated via the WorkflowModuleInjector below) for use in a hidden
@@ -127,10 +128,16 @@
"""
raise TypeError( "Abstract method" )
- def decode_runtime_state( self, trans, string ):
- raise TypeError( "Abstract method" )
+ def compute_state( self, trans, step_updates=None ):
+ """ Recover the transient "state" attribute to populate corresponding
+ step with (currently this is always a DefaultToolState instance,
+ though I am not sure this is strictly nessecary).
- def update_runtime_state( self, trans, state, values ):
+ If `step_updates` is `None`, this is likely for rendering the run form
+ for instance and no runtime properties are available and state must be
+ solely determined by step. If `step_updates` are available they describe
+ the runtime properties supplied by the workflow runner.
+ """
raise TypeError( "Abstract method" )
def execute( self, trans, state ):
@@ -211,6 +218,17 @@
errors[ name ] = error
return errors
+ def compute_state( self, trans, step_updates=None ):
+ if step_updates:
+ # Fix this for multiple inputs
+ state = self.decode_runtime_state( trans, step_updates.pop( "tool_state" ) )
+ step_errors = self.update_runtime_state( trans, state, step_updates )
+ else:
+ state = self.get_runtime_state()
+ step_errors = {}
+
+ return state, step_errors
+
def execute( self, trans, state ):
return None, dict( output=state.inputs['input'])
@@ -509,6 +527,21 @@
def check_and_update_state( self ):
return self.tool.check_and_update_param_values( self.state.inputs, self.trans, allow_workflow_parameters=True )
+ def compute_state( self, trans, step_updates=None ):
+ # Warning: This method destructively modifies existing step state.
+ step_errors = None
+ state = self.state
+ if step_updates:
+ # Get the tool
+ tool = self.tool
+ # Get old errors
+ old_errors = state.inputs.pop( "__errors__", {} )
+ # Update the state
+ step_errors = tool.update_state( trans, tool.inputs, state.inputs, step_updates,
+ update_only=True, old_errors=old_errors )
+ else:
+ return state, step_errors
+
def add_dummy_datasets( self, connections=None):
if connections:
# Store onnections by input name
@@ -619,36 +652,19 @@
module = step.module = module_factory.from_workflow_step( trans, step )
# Calculating step errors and state depends on whether step is a tool step or not.
- if step.type == 'tool' or step.type is None:
- if not module:
- step.module = None
- step.state = None
- raise MissingToolException()
+ if not module:
+ step.module = None
+ step.state = None
+ raise MissingToolException()
- # Fix any missing parameters
- step.upgrade_messages = module.check_and_update_state()
+ # Fix any missing parameters
+ step.upgrade_messages = module.check_and_update_state()
- # Any connected input needs to have value DummyDataset (these
- # are not persisted so we need to do it every time)
- module.add_dummy_datasets( connections=step.input_connections )
+ # Any connected input needs to have value DummyDataset (these
+ # are not persisted so we need to do it every time)
+ module.add_dummy_datasets( connections=step.input_connections )
- state = module.state
- step.state = state
- if step_args is not None:
- # Get the tool
- tool = module.tool
- # Get old errors
- old_errors = state.inputs.pop( "__errors__", {} )
- # Update the state
- step_errors = tool.update_state( trans, tool.inputs, step.state.inputs, step_args,
- update_only=True, old_errors=old_errors )
-
- else:
- if step_args:
- # Fix this for multiple inputs
- state = step.state = module.decode_runtime_state( trans, step_args.pop( "tool_state" ) )
- step_errors = module.update_runtime_state( trans, state, step_args )
- else:
- step.state = step.module.get_runtime_state()
+ state, step_errors = module.compute_state( trans, step_args )
+ step.state = state
return step_errors
https://bitbucket.org/galaxy/galaxy-central/commits/952e1f153a2d/
Changeset: 952e1f153a2d
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: Remove some old commented out client-side stuff.
(Let me know if these were important.)
Affected #: 1 file
diff -r 51799404c770249f71ffd540c352011509f01329 -r 952e1f153a2d71673d71952f990f3e5b67495505 templates/webapps/galaxy/workflow/editor.mako
--- a/templates/webapps/galaxy/workflow/editor.mako
+++ b/templates/webapps/galaxy/workflow/editor.mako
@@ -7,16 +7,6 @@
%></%def>
-## <%def name="late_javascripts()">
-## <script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.panels.js')}"></script>
-## <script type="text/javascript">
-## ensure_dd_helper();
-## make_left_panel( $("#left"), $("#center"), $("#left-border" ) );
-## make_right_panel( $("#right"), $("#center"), $("#right-border" ) );
-## ## handle_minwidth_hint = rp.handle_minwidth_hint;
-## </script>
-## </%def>
-
<%def name="javascripts()">
${parent.javascripts()}
@@ -87,18 +77,6 @@
<style type="text/css">
body { margin: 0; padding: 0; overflow: hidden; }
- /* Wider right panel */
- ## #center { right: 309px; }
- ## #right-border { right: 300px; }
- ## #right { width: 300px; }
- ## /* Relative masthead size */
- ## #masthead { height: 2.5em; }
- ## #masthead div.title { font-size: 1.8em; }
- ## #left, #left-border, #center, #right-border, #right {
- ## top: 2.5em;
- ## margin-top: 7px;
- ## }
-
#left {
background: #C1C9E5 url(${h.url_for('/static/style/menu_bg.png')}) top repeat-x;
}
https://bitbucket.org/galaxy/galaxy-central/commits/3656bf3ec7c4/
Changeset: 3656bf3ec7c4
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: Refactor logic for tracking workflow progress out of WorkflowInvoker.
Creates a new class WorkflowProgress. This has class achives two objectives:
- I am trying to move specialized per-module-type logic out of WorkflowInvoker and into the modules themselves - making it easier to add new modules and because it is better OOP design (invoker shouldn't be dispatching on .type - the modules should provide a consistent interface for invoking themselves). WorkflowProgress will give something that WorkflowInvoker can pass along to the modules to allow them to find their connections and record their outputs.
- Downstream in workflow scheduling branch I am placing logic in here for rebuilding the in-memory state of the workflow progress when re-evaluating progress.
Affected #: 3 files
diff -r 952e1f153a2d71673d71952f990f3e5b67495505 -r 3656bf3ec7c487b8635ceb20de1ae5fe8f2e4f87 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -228,6 +228,7 @@
trans=trans,
workflow=workflow,
workflow_run_config=run_config,
+ populate_state=True,
)
trans.sa_session.flush()
diff -r 952e1f153a2d71673d71952f990f3e5b67495505 -r 3656bf3ec7c487b8635ceb20de1ae5fe8f2e4f87 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -11,6 +11,7 @@
from elementtree.ElementTree import Element
import galaxy.tools
+from galaxy import exceptions
from galaxy.web.framework import formbuilder
from galaxy.jobs.actions.post import ActionBox
from galaxy.model import PostJobAction
@@ -668,3 +669,20 @@
step.state = state
return step_errors
+
+
+def populate_module_and_state( trans, workflow, param_map ):
+ """ Used by API but not web controller, walks through a workflow's steps
+ and populates transient module and state attributes on each.
+ """
+ module_injector = WorkflowModuleInjector( trans )
+ for step in workflow.steps:
+ step_args = param_map.get( step.id, {} )
+ step_errors = module_injector.inject( step, step_args=step_args )
+ if step.type == 'tool' or step.type is None:
+ if step_errors:
+ message = "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
+ raise exceptions.MessageException( message )
+ if step.upgrade_messages:
+ message = "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
+ raise exceptions.MessageException( message )
diff -r 952e1f153a2d71673d71952f990f3e5b67495505 -r 3656bf3ec7c487b8635ceb20de1ae5fe8f2e4f87 lib/galaxy/workflow/run.py
--- a/lib/galaxy/workflow/run.py
+++ b/lib/galaxy/workflow/run.py
@@ -21,9 +21,12 @@
log = logging.getLogger( __name__ )
-def invoke( trans, workflow, workflow_run_config ):
+def invoke( trans, workflow, workflow_run_config, populate_state=False ):
""" Run the supplied workflow in the supplied target_history.
"""
+ if populate_state:
+ modules.populate_module_and_state( trans, workflow, workflow_run_config.param_map )
+
return WorkflowInvoker(
trans,
workflow,
@@ -36,29 +39,21 @@
def __init__( self, trans, workflow, workflow_run_config ):
self.trans = trans
self.workflow = workflow
+ workflow_invocation = model.WorkflowInvocation()
+ workflow_invocation.workflow = self.workflow
+ self.workflow_invocation = workflow_invocation
self.target_history = workflow_run_config.target_history
self.replacement_dict = workflow_run_config.replacement_dict
self.copy_inputs_to_history = workflow_run_config.copy_inputs_to_history
- self.inputs_by_step_id = workflow_run_config.inputs
- self.param_map = workflow_run_config.param_map
+ self.progress = WorkflowProgress( self.workflow_invocation, workflow_run_config.inputs )
- self.outputs = odict()
# TODO: Attach to actual model object and persist someday...
self.invocation_uuid = uuid.uuid1().hex
def invoke( self ):
- workflow_invocation = model.WorkflowInvocation()
- workflow_invocation.workflow = self.workflow
-
- # Web controller will populate state on each step before calling
- # invoke but not API controller. More work should be done to further
- # harmonize these methods going forward if possible - if possible
- # moving more web controller logic here.
- state_populated = not self.workflow.steps or hasattr( self.workflow.steps[ 0 ], "state" )
- if not state_populated:
- self._populate_state( )
-
- for step in self.workflow.steps:
+ workflow_invocation = self.workflow_invocation
+ remaining_steps = self.progress.remaining_steps()
+ for step in remaining_steps:
jobs = self._invoke_step( step )
for job in util.listify( jobs ):
# Record invocation
@@ -72,7 +67,7 @@
# Not flushing in here, because web controller may create multiple
# invocations.
- return self.outputs
+ return self.progress.outputs
def _invoke_step( self, step ):
if step.type == 'tool' or step.type is None:
@@ -84,7 +79,6 @@
def _execute_tool_step( self, step ):
trans = self.trans
- outputs = self.outputs
tool = trans.app.toolbox.get_tool( step.tool_id )
tool_state = step.state
@@ -119,7 +113,7 @@
# If collection - just use element model object.
replacement = iteration_elements[ prefixed_name ]
else:
- replacement = self._replacement_for_input( input, prefixed_name, step )
+ replacement = self.progress.replacement_for_tool_input( step, input, prefixed_name )
return replacement
try:
# Replace DummyDatasets with historydatasetassociations
@@ -139,10 +133,11 @@
workflow_invocation_uuid=self.invocation_uuid
)
if collection_info:
- outputs[ step.id ] = dict( execution_tracker.created_collections )
+ step_outputs = dict( execution_tracker.created_collections )
+
else:
- outputs[ step.id ] = dict( execution_tracker.output_datasets )
-
+ step_outputs = dict( execution_tracker.output_datasets )
+ self.progress.set_step_outputs( step, step_outputs )
jobs = execution_tracker.successful_jobs
for job in jobs:
self._handle_post_job_actions( step, job )
@@ -154,13 +149,13 @@
def callback( input, value, prefixed_name, prefixed_label ):
is_data_param = isinstance( input, DataToolParameter )
if is_data_param and not input.multiple:
- data = self._replacement_for_input( input, prefixed_name, step )
+ data = self.progress.replacement_for_tool_input( step, input, prefixed_name )
if isinstance( data, model.HistoryDatasetCollectionAssociation ):
collections_to_match.add( prefixed_name, data )
is_data_collection_param = isinstance( input, DataCollectionToolParameter )
if is_data_collection_param and not input.multiple:
- data = self._replacement_for_input( input, prefixed_name, step )
+ data = self.progress.replacement_for_tool_input( step, input, prefixed_name )
history_query = input._history_query( self.trans )
if history_query.can_map_over( data ):
collections_to_match.add( prefixed_name, data, subcollection_type=input.collection_type )
@@ -170,29 +165,25 @@
def _execute_input_step( self, step ):
trans = self.trans
- outputs = self.outputs
- job, out_data = step.module.execute( trans, step.state )
- outputs[ step.id ] = out_data
+ job, step_outputs = step.module.execute( trans, step.state )
# Web controller may set copy_inputs_to_history, API controller always sets
# inputs.
if self.copy_inputs_to_history:
- for input_dataset_hda in out_data.values():
+ for input_dataset_hda in step_outputs.values():
content_type = input_dataset_hda.history_content_type
if content_type == "dataset":
new_hda = input_dataset_hda.copy( copy_children=True )
self.target_history.add_dataset( new_hda )
- outputs[ step.id ][ 'input_ds_copy' ] = new_hda
+ step_outputs[ 'input_ds_copy' ] = new_hda
elif content_type == "dataset_collection":
new_hdca = input_dataset_hda.copy()
self.target_history.add_dataset_collection( new_hdca )
- outputs[ step.id ][ 'input_ds_copy' ] = new_hdca
+ step_outputs[ 'input_ds_copy' ] = new_hdca
else:
raise Exception("Unknown history content encountered")
- if self.inputs_by_step_id:
- outputs[ step.id ][ 'output' ] = self.inputs_by_step_id[ step.id ]
-
+ self.progress.set_outputs_for_input( step, step_outputs )
return job
def _handle_post_job_actions( self, step, job ):
@@ -205,7 +196,20 @@
else:
job.add_post_job_action( pja )
- def _replacement_for_input( self, input, prefixed_name, step ):
+
+class WorkflowProgress( object ):
+
+ def __init__( self, workflow_invocation, inputs_by_step_id ):
+ self.outputs = odict()
+ self.workflow_invocation = workflow_invocation
+ self.inputs_by_step_id = inputs_by_step_id
+
+ def remaining_steps(self):
+ steps = self.workflow_invocation.workflow.steps
+
+ return steps
+
+ def replacement_for_tool_input( self, step, input, prefixed_name ):
""" For given workflow 'step' that has had input_connections_by_name
populated fetch the actual runtime input for the given tool 'input'.
"""
@@ -213,7 +217,7 @@
if prefixed_name in step.input_connections_by_name:
connection = step.input_connections_by_name[ prefixed_name ]
if input.multiple:
- replacement = [ self._replacement_for_connection( c ) for c in connection ]
+ replacement = [ self.replacement_for_connection( c ) for c in connection ]
# If replacement is just one dataset collection, replace tool
# input with dataset collection - tool framework will extract
# datasets properly.
@@ -221,25 +225,21 @@
if isinstance( replacement[ 0 ], model.HistoryDatasetCollectionAssociation ):
replacement = replacement[ 0 ]
else:
- replacement = self._replacement_for_connection( connection[ 0 ] )
+ replacement = self.replacement_for_connection( connection[ 0 ] )
return replacement
- def _replacement_for_connection( self, connection ):
- return self.outputs[ connection.output_step.id ][ connection.output_name ]
+ def replacement_for_connection( self, connection ):
+ step_outputs = self.outputs[ connection.output_step.id ]
+ return step_outputs[ connection.output_name ]
- def _populate_state( self ):
- # Build the state for each step
- module_injector = modules.WorkflowModuleInjector( self.trans )
- for step in self.workflow.steps:
- step_args = self.param_map.get( step.id, {} )
- step_errors = module_injector.inject( step, step_args=step_args )
- if step.type == 'tool' or step.type is None:
- if step_errors:
- message = "Workflow cannot be run because of validation errors in some steps: %s" % step_errors
- raise exceptions.MessageException( message )
- if step.upgrade_messages:
- message = "Workflow cannot be run because of step upgrade messages: %s" % step.upgrade_messages
- raise exceptions.MessageException( message )
+ def set_outputs_for_input( self, step, outputs={} ):
+ if self.inputs_by_step_id:
+ outputs[ 'output' ] = self.inputs_by_step_id[ step.id ]
+
+ self.set_step_outputs( step, outputs )
+
+ def set_step_outputs(self, step, outputs):
+ self.outputs[ step.id ] = outputs
__all__ = [ invoke, WorkflowRunConfig ]
https://bitbucket.org/galaxy/galaxy-central/commits/bbe9be042b34/
Changeset: bbe9be042b34
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: Unify execute method across tool and input workflow modules.
Previously tool module class did not contain this method (though it was abstract in the base class) and extra logic for dealing with tools was located outside of the module itself. Moving everything into the module (made possible by previously committed WorkflowProgress class and attaching some properties to workflow invocations and sending them in) means that now the worklfow invocation logic can uniformly deal with modules and doesn't need to dispatch on type. This should allow extending the capabilities of the workflow invoker and adding new modules independently.
Affected #: 2 files
diff -r 3656bf3ec7c487b8635ceb20de1ae5fe8f2e4f87 -r bbe9be042b34268c649b42c8c85d238149e910f0 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -12,11 +12,15 @@
import galaxy.tools
from galaxy import exceptions
+from galaxy import model
+from galaxy.dataset_collections import matching
from galaxy.web.framework import formbuilder
from galaxy.jobs.actions.post import ActionBox
from galaxy.model import PostJobAction
from galaxy.tools.parameters import check_param, DataToolParameter, DummyDataset, RuntimeValue, visit_input_values
from galaxy.tools.parameters import DataCollectionToolParameter
+from galaxy.tools.parameters.wrapped import make_dict_copy
+from galaxy.tools.execute import execute
from galaxy.util.bunch import Bunch
from galaxy.util import odict
from galaxy.util.json import loads
@@ -141,7 +145,11 @@
"""
raise TypeError( "Abstract method" )
- def execute( self, trans, state ):
+ def execute( self, trans, progress, invocation, step ):
+ """ Execute the given workflow step in the given workflow invocation.
+ Use the supplied workflow progress object to track outputs, find
+ inputs, etc...
+ """
raise TypeError( "Abstract method" )
@@ -230,8 +238,26 @@
return state, step_errors
- def execute( self, trans, state ):
- return None, dict( output=state.inputs['input'])
+ def execute( self, trans, progress, invocation, step ):
+ job, step_outputs = None, dict( output=step.state.inputs['input'])
+
+ # Web controller may set copy_inputs_to_history, API controller always sets
+ # inputs.
+ if invocation.copy_inputs_to_history:
+ for input_dataset_hda in step_outputs.values():
+ content_type = input_dataset_hda.history_content_type
+ if content_type == "dataset":
+ new_hda = input_dataset_hda.copy( copy_children=True )
+ invocation.history.add_dataset( new_hda )
+ step_outputs[ 'input_ds_copy' ] = new_hda
+ elif content_type == "dataset_collection":
+ new_hdca = input_dataset_hda.copy()
+ invocation.history.add_dataset_collection( new_hdca )
+ step_outputs[ 'input_ds_copy' ] = new_hdca
+ else:
+ raise Exception("Unknown history content encountered")
+ progress.set_outputs_for_input( step, step_outputs )
+ return job
class InputDataModule( InputModule ):
@@ -540,8 +566,100 @@
# Update the state
step_errors = tool.update_state( trans, tool.inputs, state.inputs, step_updates,
update_only=True, old_errors=old_errors )
+ return state, step_errors
+
+ def execute( self, trans, progress, invocation, step ):
+ tool = trans.app.toolbox.get_tool( step.tool_id )
+ tool_state = step.state
+
+ collections_to_match = self._find_collections_to_match( tool, progress, step )
+ # Have implicit collections...
+ if collections_to_match.has_collections():
+ collection_info = self.trans.app.dataset_collections_service.match_collections( collections_to_match )
else:
- return state, step_errors
+ collection_info = None
+
+ param_combinations = []
+ if collection_info:
+ iteration_elements_iter = collection_info.slice_collections()
+ else:
+ iteration_elements_iter = [ None ]
+
+ for iteration_elements in iteration_elements_iter:
+ execution_state = tool_state.copy()
+ # TODO: Move next step into copy()
+ execution_state.inputs = make_dict_copy( execution_state.inputs )
+
+ # Connect up
+ def callback( input, value, prefixed_name, prefixed_label ):
+ replacement = None
+ if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
+ if iteration_elements and prefixed_name in iteration_elements:
+ if isinstance( input, DataToolParameter ):
+ # Pull out dataset instance from element.
+ replacement = iteration_elements[ prefixed_name ].dataset_instance
+ else:
+ # If collection - just use element model object.
+ replacement = iteration_elements[ prefixed_name ]
+ else:
+ replacement = progress.replacement_for_tool_input( step, input, prefixed_name )
+ return replacement
+ try:
+ # Replace DummyDatasets with historydatasetassociations
+ visit_input_values( tool.inputs, execution_state.inputs, callback )
+ except KeyError, k:
+ message_template = "Error due to input mapping of '%s' in '%s'. A common cause of this is conditional outputs that cannot be determined until runtime, please review your workflow."
+ message = message_template % (tool.name, k.message)
+ raise exceptions.MessageException( message )
+ param_combinations.append( execution_state.inputs )
+
+ execution_tracker = execute(
+ trans=self.trans,
+ tool=tool,
+ param_combinations=param_combinations,
+ history=invocation.history,
+ collection_info=collection_info,
+ workflow_invocation_uuid=invocation.uuid
+ )
+ if collection_info:
+ step_outputs = dict( execution_tracker.created_collections )
+ else:
+ step_outputs = dict( execution_tracker.output_datasets )
+ progress.set_step_outputs( step, step_outputs )
+ jobs = execution_tracker.successful_jobs
+ for job in jobs:
+ self._handle_post_job_actions( step, job, invocation.replacement_dict )
+ return jobs
+
+ def _find_collections_to_match( self, tool, progress, step ):
+ collections_to_match = matching.CollectionsToMatch()
+
+ def callback( input, value, prefixed_name, prefixed_label ):
+ is_data_param = isinstance( input, DataToolParameter )
+ if is_data_param and not input.multiple:
+ data = progress.replacement_for_tool_input( step, input, prefixed_name )
+ if isinstance( data, model.HistoryDatasetCollectionAssociation ):
+ collections_to_match.add( prefixed_name, data )
+
+ is_data_collection_param = isinstance( input, DataCollectionToolParameter )
+ if is_data_collection_param and not input.multiple:
+ data = progress.replacement_for_tool_input( step, input, prefixed_name )
+ history_query = input._history_query( self.trans )
+ if history_query.can_map_over( data ):
+ collections_to_match.add( prefixed_name, data, subcollection_type=input.collection_type )
+
+ visit_input_values( tool.inputs, step.state.inputs, callback )
+ return collections_to_match
+
+ def _handle_post_job_actions( self, step, job, replacement_dict ):
+ # Create new PJA associations with the created job, to be run on completion.
+ # PJA Parameter Replacement (only applies to immediate actions-- rename specifically, for now)
+ # Pass along replacement dict with the execution of the PJA so we don't have to modify the object.
+ for pja in step.post_job_actions:
+ if pja.action_type in ActionBox.immediate_actions:
+ ActionBox.execute( self.trans.app, self.trans.sa_session, pja, job, replacement_dict )
+ else:
+ job.add_post_job_action( pja )
def add_dummy_datasets( self, connections=None):
if connections:
diff -r 3656bf3ec7c487b8635ceb20de1ae5fe8f2e4f87 -r bbe9be042b34268c649b42c8c85d238149e910f0 lib/galaxy/workflow/run.py
--- a/lib/galaxy/workflow/run.py
+++ b/lib/galaxy/workflow/run.py
@@ -1,18 +1,8 @@
import uuid
from galaxy import model
-from galaxy import exceptions
from galaxy import util
-from galaxy.dataset_collections import matching
-
-from galaxy.jobs.actions.post import ActionBox
-
-from galaxy.tools.parameters.basic import DataToolParameter
-from galaxy.tools.parameters.basic import DataCollectionToolParameter
-from galaxy.tools.parameters import visit_input_values
-from galaxy.tools.parameters.wrapped import make_dict_copy
-from galaxy.tools.execute import execute
from galaxy.util.odict import odict
from galaxy.workflow import modules
from galaxy.workflow.run_request import WorkflowRunConfig
@@ -42,13 +32,16 @@
workflow_invocation = model.WorkflowInvocation()
workflow_invocation.workflow = self.workflow
self.workflow_invocation = workflow_invocation
- self.target_history = workflow_run_config.target_history
- self.replacement_dict = workflow_run_config.replacement_dict
- self.copy_inputs_to_history = workflow_run_config.copy_inputs_to_history
self.progress = WorkflowProgress( self.workflow_invocation, workflow_run_config.inputs )
- # TODO: Attach to actual model object and persist someday...
- self.invocation_uuid = uuid.uuid1().hex
+ invocation_uuid = uuid.uuid1().hex
+
+ # In one way or another, following attributes will become persistent
+ # so they are available during delayed/revisited workflow scheduling.
+ self.workflow_invocation.uuid = invocation_uuid
+ self.workflow_invocation.history = workflow_run_config.target_history
+ self.workflow_invocation.copy_inputs_to_history = workflow_run_config.copy_inputs_to_history
+ self.workflow_invocation.replacement_dict = workflow_run_config.replacement_dict
def invoke( self ):
workflow_invocation = self.workflow_invocation
@@ -70,132 +63,9 @@
return self.progress.outputs
def _invoke_step( self, step ):
- if step.type == 'tool' or step.type is None:
- jobs = self._execute_tool_step( step )
- else:
- jobs = self._execute_input_step( step )
-
+ jobs = step.module.execute( self.trans, self.progress, self.workflow_invocation, step )
return jobs
- def _execute_tool_step( self, step ):
- trans = self.trans
-
- tool = trans.app.toolbox.get_tool( step.tool_id )
- tool_state = step.state
-
- collections_to_match = self._find_collections_to_match( tool, step )
- # Have implicit collections...
- if collections_to_match.has_collections():
- collection_info = self.trans.app.dataset_collections_service.match_collections( collections_to_match )
- else:
- collection_info = None
-
- param_combinations = []
- if collection_info:
- iteration_elements_iter = collection_info.slice_collections()
- else:
- iteration_elements_iter = [ None ]
-
- for iteration_elements in iteration_elements_iter:
- execution_state = tool_state.copy()
- # TODO: Move next step into copy()
- execution_state.inputs = make_dict_copy( execution_state.inputs )
-
- # Connect up
- def callback( input, value, prefixed_name, prefixed_label ):
- replacement = None
- if isinstance( input, DataToolParameter ) or isinstance( input, DataCollectionToolParameter ):
- if iteration_elements and prefixed_name in iteration_elements:
- if isinstance( input, DataToolParameter ):
- # Pull out dataset instance from element.
- replacement = iteration_elements[ prefixed_name ].dataset_instance
- else:
- # If collection - just use element model object.
- replacement = iteration_elements[ prefixed_name ]
- else:
- replacement = self.progress.replacement_for_tool_input( step, input, prefixed_name )
- return replacement
- try:
- # Replace DummyDatasets with historydatasetassociations
- visit_input_values( tool.inputs, execution_state.inputs, callback )
- except KeyError, k:
- message_template = "Error due to input mapping of '%s' in '%s'. A common cause of this is conditional outputs that cannot be determined until runtime, please review your workflow."
- message = message_template % (tool.name, k.message)
- raise exceptions.MessageException( message )
- param_combinations.append( execution_state.inputs )
-
- execution_tracker = execute(
- trans=self.trans,
- tool=tool,
- param_combinations=param_combinations,
- history=self.target_history,
- collection_info=collection_info,
- workflow_invocation_uuid=self.invocation_uuid
- )
- if collection_info:
- step_outputs = dict( execution_tracker.created_collections )
-
- else:
- step_outputs = dict( execution_tracker.output_datasets )
- self.progress.set_step_outputs( step, step_outputs )
- jobs = execution_tracker.successful_jobs
- for job in jobs:
- self._handle_post_job_actions( step, job )
- return jobs
-
- def _find_collections_to_match( self, tool, step ):
- collections_to_match = matching.CollectionsToMatch()
-
- def callback( input, value, prefixed_name, prefixed_label ):
- is_data_param = isinstance( input, DataToolParameter )
- if is_data_param and not input.multiple:
- data = self.progress.replacement_for_tool_input( step, input, prefixed_name )
- if isinstance( data, model.HistoryDatasetCollectionAssociation ):
- collections_to_match.add( prefixed_name, data )
-
- is_data_collection_param = isinstance( input, DataCollectionToolParameter )
- if is_data_collection_param and not input.multiple:
- data = self.progress.replacement_for_tool_input( step, input, prefixed_name )
- history_query = input._history_query( self.trans )
- if history_query.can_map_over( data ):
- collections_to_match.add( prefixed_name, data, subcollection_type=input.collection_type )
-
- visit_input_values( tool.inputs, step.state.inputs, callback )
- return collections_to_match
-
- def _execute_input_step( self, step ):
- trans = self.trans
-
- job, step_outputs = step.module.execute( trans, step.state )
-
- # Web controller may set copy_inputs_to_history, API controller always sets
- # inputs.
- if self.copy_inputs_to_history:
- for input_dataset_hda in step_outputs.values():
- content_type = input_dataset_hda.history_content_type
- if content_type == "dataset":
- new_hda = input_dataset_hda.copy( copy_children=True )
- self.target_history.add_dataset( new_hda )
- step_outputs[ 'input_ds_copy' ] = new_hda
- elif content_type == "dataset_collection":
- new_hdca = input_dataset_hda.copy()
- self.target_history.add_dataset_collection( new_hdca )
- step_outputs[ 'input_ds_copy' ] = new_hdca
- else:
- raise Exception("Unknown history content encountered")
- self.progress.set_outputs_for_input( step, step_outputs )
- return job
-
- def _handle_post_job_actions( self, step, job ):
- # Create new PJA associations with the created job, to be run on completion.
- # PJA Parameter Replacement (only applies to immediate actions-- rename specifically, for now)
- # Pass along replacement dict with the execution of the PJA so we don't have to modify the object.
- for pja in step.post_job_actions:
- if pja.action_type in ActionBox.immediate_actions:
- ActionBox.execute( self.trans.app, self.trans.sa_session, pja, job, self.replacement_dict )
- else:
- job.add_post_job_action( pja )
-
class WorkflowProgress( object ):
https://bitbucket.org/galaxy/galaxy-central/commits/f68f8c8f147b/
Changeset: f68f8c8f147b
User: jmchilton
Date: 2014-09-10 17:51:10
Summary: API functional test for workflow 'replacement_params' and PJA renaming.
Affected #: 2 files
diff -r bbe9be042b34268c649b42c8c85d238149e910f0 -r f68f8c8f147b863ec968475216cc9a72044a722b test/api/helpers.py
--- a/test/api/helpers.py
+++ b/test/api/helpers.py
@@ -120,24 +120,38 @@
return tool_response.json()
def get_history_dataset_content( self, history_id, wait=True, **kwds ):
+ dataset_id = self.__history_dataset_id( history_id, wait=wait, **kwds )
+ display_response = self.__get_contents_request( history_id, "/%s/display" % dataset_id )
+ assert display_response.status_code == 200, display_response.content
+ return display_response.content
+
+ def get_history_dataset_details( self, history_id, **kwds ):
+ dataset_id = self.__history_dataset_id( history_id, **kwds )
+ details_response = self.__get_contents_request( history_id, "/%s" % dataset_id )
+ assert details_response.status_code == 200
+ return details_response.json()
+
+ def __history_dataset_id( self, history_id, wait=True, **kwds ):
if wait:
assert_ok = kwds.get( "assert_ok", True )
self.wait_for_history( history_id, assert_ok=assert_ok )
# kwds should contain a 'dataset' object response, a 'dataset_id' or
# the last dataset in the history will be fetched.
- contents_url = "histories/%s/contents" % history_id
if "dataset_id" in kwds:
dataset_id = kwds[ "dataset_id" ]
elif "dataset" in kwds:
dataset_id = kwds[ "dataset" ][ "id" ]
else:
hid = kwds.get( "hid", -1 ) # If not hid, just grab last dataset
- dataset_contents = self.galaxy_interactor.get( contents_url ).json()
- dataset_id = dataset_contents[ hid ][ "id" ]
+ dataset_contents = self.__get_contents_request( history_id ).json()
+ dataset_id = dataset_contents[ hid - 1 ][ "id" ]
+ return dataset_id
- display_response = self.galaxy_interactor.get( "%s/%s/display" % ( contents_url, dataset_id ) )
- assert display_response.status_code == 200
- return display_response.content
+ def __get_contents_request( self, history_id, suffix=""):
+ url = "histories/%s/contents" % history_id
+ if suffix:
+ url = "%s%s" % ( url, suffix )
+ return self.galaxy_interactor.get( url )
class WorkflowPopulator( object ):
@@ -154,7 +168,7 @@
tool_step[ "post_job_actions" ][ "RenameDatasetActionout_file1" ] = dict(
action_type="RenameDatasetAction",
output_name="out_file1",
- action_arguments=dict( newname="the_new_name" ),
+ action_arguments=dict( newname="foo ${replaceme}" ),
)
return workflow
diff -r bbe9be042b34268c649b42c8c85d238149e910f0 -r f68f8c8f147b863ec968475216cc9a72044a722b test/api/test_workflows.py
--- a/test/api/test_workflows.py
+++ b/test/api/test_workflows.py
@@ -467,6 +467,16 @@
assert n == expected_len, "Expected %d steps of type %s, found %d" % ( expected_len, type, n )
return sorted( steps, key=operator.itemgetter("id") )
+ @skip_without_tool( "cat1" )
+ def test_run_with_pja( self ):
+ workflow = self.workflow_populator.load_workflow( name="test_for_pja_run", add_pja=True )
+ workflow_request, history_id = self._setup_workflow_run( workflow, inputs_by='step_index' )
+ workflow_request[ "replacement_params" ] = dumps( dict( replaceme="was replaced" ) )
+ run_workflow_response = self._post( "workflows", data=workflow_request )
+ self._assert_status_code_is( run_workflow_response, 200 )
+ content = self.dataset_populator.get_history_dataset_details( history_id, wait=True, assert_ok=True )
+ assert content[ "name" ] == "foo was replaced"
+
@skip_without_tool( "random_lines1" )
def test_run_replace_params_by_tool( self ):
workflow_request, history_id = self._setup_random_x2_workflow( "test_for_replace_tool_params" )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: davebgx: Fix data table xml definition for tool tarball download.
by commits-noreply@bitbucket.org 10 Sep '14
by commits-noreply@bitbucket.org 10 Sep '14
10 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d85d039b8eeb/
Changeset: d85d039b8eeb
User: davebgx
Date: 2014-09-10 15:44:32
Summary: Fix data table xml definition for tool tarball download.
Affected #: 1 file
diff -r 308f95b0f2b01930fab8f43598b48ba9d680e568 -r d85d039b8eeb2781619683f62dddf0f4c20af773 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -756,7 +756,6 @@
tarball_files.append( ( filesystem_path, tarball_path ) )
image_found = True
tool_xml = tool_xml.replace( '${static_path}/%s' % tarball_path, tarball_path )
- log.debug( tarball_files )
# If one or more tool help images were found, add the modified tool XML to the tarball instead of the original.
if image_found:
fd, new_tool_config = tempfile.mkstemp( suffix='.xml' )
@@ -817,8 +816,7 @@
if len( data_table_definitions ) > 0:
# Put the data table definition XML in a temporary file.
table_definition = '<?xml version="1.0" encoding="utf-8"?>\n<tables>\n %s</tables>'
- table_xml = [ data_table.xml_string for data_table in data_table_definitions ]
- table_definition = table_definition % '\n'.join( table_xml )
+ table_definition = table_definition % '\n'.join( data_table_definitions )
fd, table_conf = tempfile.mkstemp()
os.close( fd )
file( table_conf, 'w' ).write( table_definition )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Revert accidental replacement of symlinks w/ real file from ce088c6. Thanks @jmchilton.
by commits-noreply@bitbucket.org 09 Sep '14
by commits-noreply@bitbucket.org 09 Sep '14
09 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/308f95b0f2b0/
Changeset: 308f95b0f2b0
User: dannon
Date: 2014-09-10 02:47:48
Summary: Revert accidental replacement of symlinks w/ real file from ce088c6. Thanks @jmchilton.
Affected #: 17 files
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py
@@ -1,51 +1,1 @@
-"""
-Migration script to add the tool_shed_repository table.
-"""
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-import sys, logging
-from galaxy.model.custom_types import *
-from sqlalchemy.exc import *
-import datetime
-now = datetime.datetime.utcnow
-
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-# New table to store information about cloned tool shed repositories.
-ToolShedRepository_table = Table( "tool_shed_repository", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed", TrimmedString( 255 ), index=True ),
- Column( "name", TrimmedString( 255 ), index=True ),
- Column( "description" , TEXT ),
- Column( "owner", TrimmedString( 255 ), index=True ),
- Column( "changeset_revision", TrimmedString( 255 ), index=True ),
- Column( "deleted", Boolean, index=True, default=False ) )
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- try:
- ToolShedRepository_table.create()
- except Exception, e:
- log.debug( "Creating tool_shed_repository table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- ToolShedRepository_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_shed_repository table failed: %s" % str( e ) )
+../../../migrate/versions/0082_add_tool_shed_repository_table.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py
@@ -1,79 +1,1 @@
-"""
-Migration script to add the metadata, update_available and includes_datatypes columns to the tool_shed_repository table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def get_default_false(migrate_engine):
- if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
- return "0"
- elif migrate_engine.name in ['postgresql', 'postgres']:
- return "false"
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- c = Column( "metadata", JSONType(), nullable=True )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.metadata
- except Exception, e:
- print "Adding metadata column to the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) )
- c = Column( "includes_datatypes", Boolean, index=True, default=False )
- try:
- c.create( ToolShedRepository_table, index_name="ix_tool_shed_repository_includes_datatypes")
- assert c is ToolShedRepository_table.c.includes_datatypes
- migrate_engine.execute( "UPDATE tool_shed_repository SET includes_datatypes=%s" % get_default_false(migrate_engine))
- except Exception, e:
- print "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) )
- c = Column( "update_available", Boolean, default=False )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.update_available
- migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % get_default_false(migrate_engine))
- except Exception, e:
- print "Adding update_available column to the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- try:
- ToolShedRepository_table.c.metadata.drop()
- except Exception, e:
- print "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) )
- try:
- ToolShedRepository_table.c.includes_datatypes.drop()
- except Exception, e:
- print "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) )
- try:
- ToolShedRepository_table.c.update_available.drop()
- except Exception, e:
- print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) )
+../../../migrate/versions/0086_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py
@@ -1,52 +1,1 @@
-"""
-Migration script to create the tool_id_guid_map table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_id", String( 255 ) ),
- Column( "tool_version", TEXT ),
- Column( "tool_shed", TrimmedString( 255 ) ),
- Column( "repository_owner", TrimmedString( 255 ) ),
- Column( "repository_name", TrimmedString( 255 ) ),
- Column( "guid", TEXT, index=True, unique=True ) )
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- try:
- ToolIdGuidMap_table.create()
- except Exception, e:
- log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- ToolIdGuidMap_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
+../../../migrate/versions/0087_tool_id_guid_map_table.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py
@@ -1,64 +1,1 @@
-"""
-Migration script to add the installed_changeset_revision column to the tool_shed_repository table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- col = Column( "installed_changeset_revision", TrimmedString( 255 ) )
- try:
- col.create( ToolShedRepository_table )
- assert col is ToolShedRepository_table.c.installed_changeset_revision
- except Exception, e:
- print "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) )
- # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision.
- # This will be problematic if the value of changeset_revision was updated to something other than the value
- # that it was when the repository was installed (because the install path determined in real time will attempt to
- # find the repository using the updated changeset_revison instead of the required installed_changeset_revision),
- # but at the time this script was written, this scenario is extremely unlikely.
- cmd = "SELECT id AS id, " \
- + "installed_changeset_revision AS installed_changeset_revision, " \
- + "changeset_revision AS changeset_revision " \
- + "FROM tool_shed_repository;"
- tool_shed_repositories = migrate_engine.execute( cmd ).fetchall()
- update_count = 0
- for row in tool_shed_repositories:
- cmd = "UPDATE tool_shed_repository " \
- + "SET installed_changeset_revision = '%s' " % row.changeset_revision \
- + "WHERE changeset_revision = '%s';" % row.changeset_revision
- migrate_engine.execute( cmd )
- update_count += 1
- print "Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. "
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- try:
- ToolShedRepository_table.c.installed_changeset_revision.drop()
- except Exception, e:
- print "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e )
- log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) )
+../../../migrate/versions/0088_add_installed_changeset_revison_column.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py
@@ -1,63 +1,1 @@
-"""
-Migration script to add the uninstalled and dist_to_shed columns to the tool_shed_repository table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def default_false(migrate_engine):
- if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
- return "0"
- elif migrate_engine.name in ['postgresql', 'postgres']:
- return "false"
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- c = Column( "uninstalled", Boolean, default=False )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.uninstalled
- migrate_engine.execute( "UPDATE tool_shed_repository SET uninstalled=%s" % default_false(migrate_engine) )
- except Exception, e:
- print "Adding uninstalled column to the tool_shed_repository table failed: %s" % str( e )
- c = Column( "dist_to_shed", Boolean, default=False )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.dist_to_shed
- migrate_engine.execute( "UPDATE tool_shed_repository SET dist_to_shed=%s" % default_false(migrate_engine) )
- except Exception, e:
- print "Adding dist_to_shed column to the tool_shed_repository table failed: %s" % str( e )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- try:
- ToolShedRepository_table.c.uninstalled.drop()
- except Exception, e:
- print "Dropping column uninstalled from the tool_shed_repository table failed: %s" % str( e )
- try:
- ToolShedRepository_table.c.dist_to_shed.drop()
- except Exception, e:
- print "Dropping column dist_to_shed from the tool_shed_repository table failed: %s" % str( e )
+../../../migrate/versions/0090_add_tool_shed_repository_table_columns.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py
@@ -1,122 +1,1 @@
-"""
-Migration script to create the tool_version and tool_version_association tables and drop the tool_id_guid_map table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-from galaxy.model.custom_types import _sniffnfix_pg9_hex
-from galaxy.util.json import loads, dumps
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-#migrate_engine = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
-
-def nextval( table, col='id' ):
- if migrate_engine.name == 'postgres':
- return "nextval('%s_%s_seq')" % ( table, col )
- elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
- return "null"
- else:
- raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
-
-def localtimestamp():
- if migrate_engine.name == 'postgres' or migrate_engine.name == 'mysql':
- return "LOCALTIMESTAMP"
- elif migrate_engine.name == 'sqlite':
- return "current_date || ' ' || current_time"
- else:
- raise Exception( 'Unable to convert data for unknown database type: %s' % db )
-
-ToolVersion_table = Table( "tool_version", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_id", String( 255 ) ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
-
-ToolVersionAssociation_table = Table( "tool_version_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
- Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
-
- ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True )
-
- metadata.reflect()
- # Create the tables.
- try:
- ToolVersion_table.create()
- except Exception, e:
- log.debug( "Creating tool_version table failed: %s" % str( e ) )
- try:
- ToolVersionAssociation_table.create()
- except Exception, e:
- log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
- # Populate the tool table with tools included in installed tool shed repositories.
- cmd = "SELECT id, metadata FROM tool_shed_repository"
- result = migrate_engine.execute( cmd )
- count = 0
- for row in result:
- if row[1]:
- tool_shed_repository_id = row[0]
- repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) )
- # Create a new row in the tool table for each tool included in repository. We will NOT
- # handle tool_version_associaions because we do not have the information we need to do so.
- tools = repository_metadata.get( 'tools', [] )
- for tool_dict in tools:
- cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
- ( nextval( 'tool_version' ), localtimestamp(), localtimestamp(), tool_dict[ 'guid' ], tool_shed_repository_id )
- migrate_engine.execute( cmd )
- count += 1
- print "Added %d rows to the new tool_version table." % count
- # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
- try:
- ToolIdGuidMap_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
-
- ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_id", String( 255 ) ),
- Column( "tool_version", TEXT ),
- Column( "tool_shed", TrimmedString( 255 ) ),
- Column( "repository_owner", TrimmedString( 255 ) ),
- Column( "repository_name", TrimmedString( 255 ) ),
- Column( "guid", TEXT, index=True, unique=True ) )
-
- metadata.reflect()
- try:
- ToolVersionAssociation_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_version_association table failed: %s" % str( e ) )
- try:
- ToolVersion_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_version table failed: %s" % str( e ) )
- try:
- ToolIdGuidMap_table.create()
- except Exception, e:
- log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) )
+../../../migrate/versions/0091_add_tool_version_tables.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py
@@ -1,50 +1,1 @@
-"""
-Migration script to create the migrate_tools table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-MigrateTools_table = Table( "migrate_tools", metadata,
- Column( "repository_id", TrimmedString( 255 ) ),
- Column( "repository_path", TEXT ),
- Column( "version", Integer ) )
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
-
- metadata.reflect()
- # Create the table.
- try:
- MigrateTools_table.create()
- cmd = "INSERT INTO migrate_tools VALUES ('GalaxyTools', 'lib/galaxy/tool_shed/migrate', %d)" % 1
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Creating migrate_tools table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- MigrateTools_table.drop()
- except Exception, e:
- log.debug( "Dropping migrate_tools table failed: %s" % str( e ) )
+../../../migrate/versions/0092_add_migrate_tools_table.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py
@@ -1,44 +1,1 @@
-"""
-Migration script to add the ctx_rev column to the tool_shed_repository table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- col = Column( "ctx_rev", TrimmedString( 10 ) )
- try:
- col.create( ToolShedRepository_table )
- assert col is ToolShedRepository_table.c.ctx_rev
- except Exception, e:
- print "Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e )
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- try:
- ToolShedRepository_table.c.ctx_rev.drop()
- except Exception, e:
- print "Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e )
+../../../migrate/versions/0097_add_ctx_rev_column.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py
@@ -1,51 +1,1 @@
-"""
-Migration script to add the tool_dependency table.
-"""
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-import sys, logging
-from galaxy.model.custom_types import *
-from sqlalchemy.exc import *
-import datetime
-now = datetime.datetime.utcnow
-
-log = logging.getLogger( __name__ )
-log.setLevel( logging.DEBUG )
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-# New table to store information about cloned tool shed repositories.
-ToolDependency_table = Table( "tool_dependency", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ),
- Column( "installed_changeset_revision", TrimmedString( 255 ) ),
- Column( "name", TrimmedString( 255 ) ),
- Column( "version", TrimmedString( 40 ) ),
- Column( "type", TrimmedString( 40 ) ),
- Column( "uninstalled", Boolean, default=False ) )
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- try:
- ToolDependency_table.create()
- except Exception, e:
- log.debug( "Creating tool_dependency table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- ToolDependency_table.drop()
- except Exception, e:
- log.debug( "Dropping tool_dependency table failed: %s" % str( e ) )
+../../../migrate/versions/0099_add_tool_dependency_table.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py
@@ -1,54 +1,1 @@
-"""
-Migration script to alter the type of the tool_dependency.version column from TrimmedString(40) to Text.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
- # Change the tool_dependency table's version column from TrimmedString to Text.
- if migrate_engine.name in ['postgresql', 'postgres']:
- cmd = "ALTER TABLE tool_dependency ALTER COLUMN version TYPE Text;"
- elif migrate_engine.name == 'mysql':
- cmd = "ALTER TABLE tool_dependency MODIFY COLUMN version Text;"
- else:
- # We don't have to do anything for sqlite tables. From the sqlite documentation at http://sqlite.org/datatype3.html:
- # 1.0 Storage Classes and Datatypes
- # Each value stored in an SQLite database (or manipulated by the database engine) has one of the following storage classes:
- # NULL. The value is a NULL value.
- # INTEGER. The value is a signed integer, stored in 1, 2, 3, 4, 6, or 8 bytes depending on the magnitude of the value.
- # REAL. The value is a floating point value, stored as an 8-byte IEEE floating point number.
- # TEXT. The value is a text string, stored using the database encoding (UTF-8, UTF-16BE or UTF-16LE).
- # BLOB. The value is a blob of data, stored exactly as it was input.
- cmd = None
- if cmd:
- try:
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Altering tool_dependency.version column from TrimmedString(40) to Text failed: %s" % str( e ) )
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- # Not necessary to change column type Text to TrimmedString(40).
- pass
+../../../migrate/versions/0100_alter_tool_dependency_table_version_column.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py
@@ -1,41 +1,1 @@
-"""
-Migration script to drop the installed_changeset_revision column from the tool_dependency table.
-"""
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-import sys, logging
-from galaxy.model.custom_types import *
-from sqlalchemy.exc import *
-import datetime
-now = datetime.datetime.utcnow
-
-log = logging.getLogger( __name__ )
-log.setLevel( logging.DEBUG )
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- try:
- ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
- except NoSuchTableError:
- ToolDependency_table = None
- log.debug( "Failed loading table tool_dependency" )
- if ToolDependency_table is not None:
- try:
- col = ToolDependency_table.c.installed_changeset_revision
- col.drop()
- except Exception, e:
- log.debug( "Dropping column 'installed_changeset_revision' from tool_dependency table failed: %s" % ( str( e ) ) )
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- pass
+../../../migrate/versions/0101_drop_installed_changeset_revision_column.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py
@@ -1,71 +1,1 @@
-"""
-Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
- if migrate_engine.name == 'sqlite':
- col = Column( "status", TrimmedString( 255 ))
- else:
- col = Column( "status", TrimmedString( 255 ), nullable=False)
- try:
- col.create( ToolDependency_table )
- assert col is ToolDependency_table.c.status
- except Exception, e:
- print "Adding status column to the tool_dependency table failed: %s" % str( e )
- col = Column( "error_message", TEXT )
- try:
- col.create( ToolDependency_table )
- assert col is ToolDependency_table.c.error_message
- except Exception, e:
- print "Adding error_message column to the tool_dependency table failed: %s" % str( e )
-
- if migrate_engine.name != 'sqlite':
- #This breaks in sqlite due to failure to drop check constraint.
- # TODO move to alembic.
- try:
- ToolDependency_table.c.uninstalled.drop()
- except Exception, e:
- print "Dropping uninstalled column from the tool_dependency table failed: %s" % str( e )
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolDependency_table = Table( "tool_dependency", metadata, autoload=True )
- try:
- ToolDependency_table.c.status.drop()
- except Exception, e:
- print "Dropping column status from the tool_dependency table failed: %s" % str( e )
- try:
- ToolDependency_table.c.error_message.drop()
- except Exception, e:
- print "Dropping column error_message from the tool_dependency table failed: %s" % str( e )
- col = Column( "uninstalled", Boolean, default=False )
- try:
- col.create( ToolDependency_table )
- assert col is ToolDependency_table.c.uninstalled
- except Exception, e:
- print "Adding uninstalled column to the tool_dependency table failed: %s" % str( e )
+../../../migrate/versions/0102_add_tool_dependency_status_columns.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py
@@ -1,69 +1,1 @@
-"""Migration script to add status and error_message columns to the tool_shed_repository table."""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-metadata = MetaData()
-
-def upgrade(migrate_engine):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- # Add the status column to the tool_shed_repository table.
- col = Column( "status", TrimmedString( 255 ) )
- try:
- col.create( ToolShedRepository_table )
- assert col is ToolShedRepository_table.c.status
- except Exception, e:
- print "Adding status column to the tool_shed_repository table failed: %s" % str( e )
- # Add the error_message column to the tool_shed_repository table.
- col = Column( "error_message", TEXT )
- try:
- col.create( ToolShedRepository_table )
- assert col is ToolShedRepository_table.c.error_message
- except Exception, e:
- print "Adding error_message column to the tool_shed_repository table failed: %s" % str( e )
- # Update the status column value for tool_shed_repositories to the default value 'Installed'.
- cmd = "UPDATE tool_shed_repository SET status = 'Installed';"
- try:
- migrate_engine.execute( cmd )
- except Exception, e:
- print "Exception executing sql command: "
- print cmd
- print str( e )
- # Update the status column for tool_shed_repositories that have been uninstalled.
- cmd = "UPDATE tool_shed_repository SET status = 'Uninstalled' WHERE uninstalled;"
- try:
- migrate_engine.execute( cmd )
- except Exception, e:
- print "Exception executing sql command: "
- print cmd
- print str( e )
- # Update the status column for tool_shed_repositories that have been deactivated.
- cmd = "UPDATE tool_shed_repository SET status = 'Deactivated' where deleted and not uninstalled;"
- try:
- migrate_engine.execute( cmd )
- except Exception, e:
- print "Exception executing sql command: "
- print cmd
- print str( e )
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- try:
- ToolShedRepository_table.c.status.drop()
- except Exception, e:
- print "Dropping column status from the tool_shed_repository table failed: %s" % str( e )
- try:
- ToolShedRepository_table.c.error_message.drop()
- except Exception, e:
- print "Dropping column error_message from the tool_shed_repository table failed: %s" % str( e )
+../../../migrate/versions/0103_add_tool_shed_repository_status_columns.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py
@@ -1,60 +1,1 @@
-"""
-Migration script to add the repository_dependency and repository_repository_dependency_association tables.
-"""
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-import sys, logging
-from galaxy.model.custom_types import *
-from sqlalchemy.exc import *
-import datetime
-now = datetime.datetime.utcnow
-
-log = logging.getLogger( __name__ )
-log.setLevel( logging.DEBUG )
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-RepositoryDependency_table = Table( "repository_dependency", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
-
-RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
- Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
-
-def upgrade(migrate_engine):
- print __doc__
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- RepositoryDependency_table.create()
- except Exception, e:
- log.debug( "Creating repository_dependency table failed: %s" % str( e ) )
- try:
- RepositoryRepositoryDependencyAssociation_table.create()
- except Exception, e:
- log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- RepositoryRepositoryDependencyAssociation_table.drop()
- except Exception, e:
- log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) )
- try:
- RepositoryDependency_table.drop()
- except Exception, e:
- log.debug( "Dropping repository_dependency table failed: %s" % str( e ) )
+../../../migrate/versions/0109_add_repository_dependency_tables.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py
@@ -1,38 +1,1 @@
-"""
-Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-def upgrade(migrate_engine):
- print __doc__
- # Create the table.
- try:
- cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- try:
- cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) )
+../../../migrate/versions/0113_update_migrate_tools_table.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py
@@ -1,40 +1,1 @@
-"""
-Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-
-import datetime
-now = datetime.datetime.utcnow
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
-import sys, logging
-log = logging.getLogger( __name__ )
-log.setLevel(logging.DEBUG)
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-
-def upgrade(migrate_engine):
- print __doc__
- # Create the table.
- try:
- cmd = "UPDATE migrate_tools set repository_path='lib/tool_shed/galaxy_install/migrate';"
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) )
-
-def downgrade(migrate_engine):
- try:
- cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';"
- migrate_engine.execute( cmd )
- except Exception, e:
- log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) )
-
+../../../migrate/versions/0114_update_migrate_tools_table_again.py
\ No newline at end of file
diff -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 -r 308f95b0f2b01930fab8f43598b48ba9d680e568 lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
--- a/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
+++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py
@@ -1,77 +1,1 @@
-"""
-Migration script to drop the update_available Boolean column and replace it with the tool_shed_status JSONType column in the tool_shed_repository table.
-"""
-
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from migrate import *
-from migrate.changeset import *
-import sys, logging
-from galaxy.model.custom_types import *
-from sqlalchemy.exc import *
-import datetime
-now = datetime.datetime.utcnow
-
-log = logging.getLogger( __name__ )
-log.setLevel( logging.DEBUG )
-handler = logging.StreamHandler( sys.stdout )
-format = "%(name)s %(levelname)s %(asctime)s %(message)s"
-formatter = logging.Formatter( format )
-handler.setFormatter( formatter )
-log.addHandler( handler )
-
-metadata = MetaData()
-
-def default_false( migrate_engine ):
- if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
- return "0"
- elif migrate_engine.name in [ 'postgresql', 'postgres' ]:
- return "false"
-
-def upgrade( migrate_engine ):
- metadata.bind = migrate_engine
- print __doc__
- metadata.reflect()
- try:
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- except NoSuchTableError:
- ToolShedRepository_table = None
- log.debug( "Failed loading table tool_shed_repository" )
- if ToolShedRepository_table is not None:
- # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database.
- if migrate_engine.name != 'sqlite':
- try:
- col = ToolShedRepository_table.c.update_available
- col.drop()
- except Exception, e:
- print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e )
- c = Column( "tool_shed_status", JSONType, nullable=True )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.tool_shed_status
- except Exception, e:
- print "Adding tool_shed_status column to the tool_shed_repository table failed: %s" % str( e )
-
-def downgrade( migrate_engine ):
- metadata.bind = migrate_engine
- metadata.reflect()
- try:
- ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True )
- except NoSuchTableError:
- ToolShedRepository_table = None
- log.debug( "Failed loading table tool_shed_repository" )
- if ToolShedRepository_table is not None:
- # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database.
- if migrate_engine.name != 'sqlite':
- try:
- col = ToolShedRepository_table.c.tool_shed_status
- col.drop()
- except Exception, e:
- print "Dropping column tool_shed_status from the tool_shed_repository table failed: %s" % str( e )
- c = Column( "update_available", Boolean, default=False )
- try:
- c.create( ToolShedRepository_table )
- assert c is ToolShedRepository_table.c.update_available
- migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % default_false( migrate_engine ) )
- except Exception, e:
- print "Adding column update_available to the tool_shed_repository table failed: %s" % str( e )
+../../../migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bd450366fc9d/
Changeset: bd450366fc9d
User: jmchilton
Date: 2014-09-09 22:41:34
Summary: Pull separate missing tools template out of run.mako.
If you look at the conflicting sets of parameters these templates require I think it makes more sense to separate them this way - think result is simpler. Likewise, no need to for instance show collapse/expand all buttons on page with no actual rows for steps for instance. Most important thing here though is that now one can assume that step.module and step.state are available for all steps throughout run.mako (this will lead to further simplications).
Affected #: 3 files
diff -r 74b2495de918c95730f1f2fd2ac1bf502018a99f -r bd450366fc9da736c46358e6a827496c5c58d174 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1351,10 +1351,8 @@
if missing_tools:
stored.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, stored )
return trans.fill_template(
- "workflow/run.mako",
- steps=[],
+ "workflow/missing_tools.mako",
workflow=stored,
- hide_fixed_params=hide_fixed_params,
missing_tools=missing_tools
)
# Render the form
diff -r 74b2495de918c95730f1f2fd2ac1bf502018a99f -r bd450366fc9da736c46358e6a827496c5c58d174 templates/webapps/galaxy/workflow/missing_tools.mako
--- /dev/null
+++ b/templates/webapps/galaxy/workflow/missing_tools.mako
@@ -0,0 +1,17 @@
+<%inherit file="/base.mako"/>
+
+<h2>Cannot run workflow "${h.to_unicode( workflow.name )}"</h2>
+
+%if workflow.annotation:
+ <div class="workflow-annotation">${workflow.annotation}</div>
+ <hr/>
+%endif
+
+<div class='errormessage'>
+ <strong>This workflow utilizes tools which are unavailable, and cannot be run. Enable the tools listed below, or <a href="${h.url_for(controller='workflow', action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">edit the workflow</a> to correct these errors.</strong><br/>
+ <ul>
+ %for i, tool in enumerate( missing_tools ):
+ <li>${tool}</li>
+ %endfor
+ </ul>
+</div>
\ No newline at end of file
diff -r 74b2495de918c95730f1f2fd2ac1bf502018a99f -r bd450366fc9da736c46358e6a827496c5c58d174 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -605,21 +605,11 @@
</div>
%endif
%endfor
-%if missing_tools:
- <div class='errormessage'>
- <strong>This workflow utilizes tools which are unavailable, and cannot be run. Enable the tools listed below, or <a href="${h.url_for(controller='workflow', action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">edit the workflow</a> to correct these errors.</strong><br/>
- <ul>
- %for i, tool in enumerate( missing_tools ):
- <li>${tool}</li>
- %endfor
- </ul>
-%else:
- %if history_id is None:
-<p id='new_history_p'>
- <input type="checkbox" name='new_history' value="true" id='new_history_cbx'/><label for='new_history_cbx'>Send results to a new history </label>
- <span id="new_history_input">named: <input type='text' name='new_history_name' value='${ h.to_unicode( workflow.name ) | h }'/></span>
-</p>
- %endif
+%if history_id is None:
+ <p id='new_history_p'>
+ <input type="checkbox" name='new_history' value="true" id='new_history_cbx'/><label for='new_history_cbx'>Send results to a new history </label>
+ <span id="new_history_input">named: <input type='text' name='new_history_name' value='${ h.to_unicode( workflow.name ) | h }'/></span>
+ </p>
+%endif
<input type="submit" class="btn btn-primary" name="run_workflow" value="Run workflow" /></form>
-%endif
https://bitbucket.org/galaxy/galaxy-central/commits/89ab3ca6171d/
Changeset: 89ab3ca6171d
User: jmchilton
Date: 2014-09-09 22:41:34
Summary: Unify API for encoding runtime state across tool and input modules.
Add some documentation.
Affected #: 2 files
diff -r bd450366fc9da736c46358e6a827496c5c58d174 -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -114,6 +114,14 @@
raise TypeError( "Abstract method" )
def encode_runtime_state( self, trans, state ):
+ """ Encode the runtime state (loaded from the stored step and
+ populated via the WorkflowModuleInjector below) for use in a hidden
+ parameter on the webpage.
+
+ This will combined with runtime parameters supplied by user running
+ the workflow to create the final state to pass along to execute during
+ workflow invocation.
+ """
raise TypeError( "Abstract method" )
def decode_runtime_state( self, trans, string ):
@@ -460,6 +468,9 @@
return self.trans.fill_template( "workflow/editor_tool_form.mako",
tool=self.tool, values=self.state.inputs, errors=( self.errors or {} ) )
+ def encode_runtime_state( self, trans, state ):
+ return state.encode( self.tool, self.trans.app )
+
def update_state( self, incoming ):
# Build a callback that handles setting an input to be required at
# runtime. We still process all other parameters the user might have
diff -r bd450366fc9da736c46358e6a827496c5c58d174 -r 89ab3ca6171d10d37d26fb7c50242bd6dcde45d9 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -542,11 +542,15 @@
</script>
%endif
%for i, step in enumerate( steps ):
+ <!-- Only way module would be missing is if tool is missing, but
+ that would cause missing_tools.mako to render instead of this
+ template. -->
+ <% module = step.module %>
+ <input type="hidden" name="${step.id}|tool_state" value="${module.encode_runtime_state( t, step.state )}">
%if step.type == 'tool' or step.type is None:
<%
tool = trans.app.toolbox.get_tool( step.tool_id )
%>
- <input type="hidden" name="${step.id}|tool_state" value="${step.state.encode( tool, app )}"><div class="toolForm"><div class="toolFormTitle"><span class='title_ul_text'>Step ${int(step.order_index)+1}: ${tool.name}</span>
@@ -580,8 +584,6 @@
</div></div>
%else:
- <% module = step.module %>
- <input type="hidden" name="${step.id}|tool_state" value="${module.encode_runtime_state( t, step.state )}"><div class="toolForm"><div class="toolFormTitle"><span class='title_ul_text'>Step ${int(step.order_index)+1}: ${module.name}</span>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0