galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2013
- 2 participants
- 189 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ac3195ac1025/
changeset: ac3195ac1025
branch: next-stable
user: inithello
date: 2013-02-04 20:00:31
summary: Updated BWA's tool_dependencies.xml to no longer rely on a network connection to correctly complete functional tests.
affected #: 4 files
diff -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac -r ac3195ac1025f73bcabeeafa66e93b964fbac7b2 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -483,7 +483,10 @@
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
- return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ temp_path = os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ if not os.path.exists( temp_path ):
+ os.makedirs( temp_path )
+ return temp_path
def get_datatypes_count( self ):
url = '/admin/view_datatypes_registry'
self.visit_galaxy_url( url )
diff -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac -r ac3195ac1025f73bcabeeafa66e93b964fbac7b2 test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- a/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -1,6 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
-
+import logging
+log = logging.getLogger(__name__)
bwa_base_repository_name = 'bwa_base_repository_0100'
bwa_base_repository_description = "BWA Base"
bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at bwa_tool_repository_0100"
@@ -41,8 +42,15 @@
strings_displayed=[] )
if self.repository_is_new( repository ):
running_standalone = True
+ old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'tool_dependencies.xml' ) )
+ new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+ xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+ log.debug( xml_filename )
+ file( xml_filename, 'w' ).write( file( old_tool_dependency, 'r' )
+ .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
self.upload_file( repository,
- 'bwa/complex/tool_dependencies.xml',
+ xml_filename,
+ filepath=new_tool_dependency_path,
strings_displayed=[],
commit_message='Uploaded tool_dependencies.xml.' )
self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
@@ -171,8 +179,15 @@
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
previous_changeset = self.get_repository_tip( tool_repository )
+ old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'readme', 'tool_dependencies.xml' ) )
+ new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+ xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+ log.debug( xml_filename )
+ file( xml_filename, 'w' ).write( file( old_tool_dependency, 'r' )
+ .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
self.upload_file( tool_repository,
- 'bwa/complex/readme/tool_dependencies.xml',
+ xml_filename,
+ filepath=new_tool_dependency_path,
strings_displayed=[],
commit_message='Uploaded new tool_dependencies.xml.' )
# Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
diff -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac -r ac3195ac1025f73bcabeeafa66e93b964fbac7b2 test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
--- a/test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
+++ b/test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
@@ -3,12 +3,7 @@
<package name="bwa" version="0.5.9"><install version="1.0"><actions>
- <action type="download_by_url">http://downloads.sourceforge.net/project/bio-bwa/bwa-0.5.9.tar.bz2</action>
- <action type="shell_command">make</action>
- <action type="move_file">
- <source>bwa</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
+ <action type="download_by_url">file://__PATH__/bwa_base.tar</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
diff -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac -r ac3195ac1025f73bcabeeafa66e93b964fbac7b2 test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
--- a/test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
+++ b/test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
@@ -3,12 +3,7 @@
<package name="bwa" version="0.5.9"><install version="1.0"><actions>
- <action type="download_by_url">http://downloads.sourceforge.net/project/bio-bwa/bwa-0.5.9.tar.bz2</action>
- <action type="shell_command">make</action>
- <action type="move_file">
- <source>bwa</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
+ <action type="download_by_url">file://__PATH__/bwa_base.tar</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
https://bitbucket.org/galaxy/galaxy-central/commits/b6be9c3a2d2d/
changeset: b6be9c3a2d2d
user: inithello
date: 2013-02-04 20:01:50
summary: Merged in changes from next-stable.
affected #: 4 files
diff -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 -r b6be9c3a2d2df94f4d5236184a4392ec00961743 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -483,7 +483,10 @@
# Save the generated xml to the specified location.
file( xml_filename, 'w' ).write( repository_dependency_xml )
def generate_temp_path( self, test_script_path, additional_paths=[] ):
- return os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ temp_path = os.path.join( self.tool_shed_test_tmp_dir, test_script_path, os.sep.join( additional_paths ) )
+ if not os.path.exists( temp_path ):
+ os.makedirs( temp_path )
+ return temp_path
def get_datatypes_count( self ):
url = '/admin/view_datatypes_registry'
self.visit_galaxy_url( url )
diff -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 -r b6be9c3a2d2df94f4d5236184a4392ec00961743 test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- a/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -1,6 +1,7 @@
from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
-
+import logging
+log = logging.getLogger(__name__)
bwa_base_repository_name = 'bwa_base_repository_0100'
bwa_base_repository_description = "BWA Base"
bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at bwa_tool_repository_0100"
@@ -41,8 +42,15 @@
strings_displayed=[] )
if self.repository_is_new( repository ):
running_standalone = True
+ old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'tool_dependencies.xml' ) )
+ new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+ xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+ log.debug( xml_filename )
+ file( xml_filename, 'w' ).write( file( old_tool_dependency, 'r' )
+ .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
self.upload_file( repository,
- 'bwa/complex/tool_dependencies.xml',
+ xml_filename,
+ filepath=new_tool_dependency_path,
strings_displayed=[],
commit_message='Uploaded tool_dependencies.xml.' )
self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
@@ -171,8 +179,15 @@
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
previous_changeset = self.get_repository_tip( tool_repository )
+ old_tool_dependency = self.get_filename( os.path.join( 'bwa', 'complex', 'readme', 'tool_dependencies.xml' ) )
+ new_tool_dependency_path = self.generate_temp_path( 'test_1100', additional_paths=[ 'tool_dependency' ] )
+ xml_filename = os.path.abspath( os.path.join( new_tool_dependency_path, 'tool_dependencies.xml' ) )
+ log.debug( xml_filename )
+ file( xml_filename, 'w' ).write( file( old_tool_dependency, 'r' )
+ .read().replace( '__PATH__', self.get_filename( 'bwa/complex' ) ) )
self.upload_file( tool_repository,
- 'bwa/complex/readme/tool_dependencies.xml',
+ xml_filename,
+ filepath=new_tool_dependency_path,
strings_displayed=[],
commit_message='Uploaded new tool_dependencies.xml.' )
# Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
diff -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 -r b6be9c3a2d2df94f4d5236184a4392ec00961743 test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
--- a/test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
+++ b/test/tool_shed/test_data/bwa/complex/readme/tool_dependencies.xml
@@ -3,12 +3,7 @@
<package name="bwa" version="0.5.9"><install version="1.0"><actions>
- <action type="download_by_url">http://downloads.sourceforge.net/project/bio-bwa/bwa-0.5.9.tar.bz2</action>
- <action type="shell_command">make</action>
- <action type="move_file">
- <source>bwa</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
+ <action type="download_by_url">file://__PATH__/bwa_base.tar</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
diff -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 -r b6be9c3a2d2df94f4d5236184a4392ec00961743 test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
--- a/test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
+++ b/test/tool_shed/test_data/bwa/complex/tool_dependencies.xml
@@ -3,12 +3,7 @@
<package name="bwa" version="0.5.9"><install version="1.0"><actions>
- <action type="download_by_url">http://downloads.sourceforge.net/project/bio-bwa/bwa-0.5.9.tar.bz2</action>
- <action type="shell_command">make</action>
- <action type="move_file">
- <source>bwa</source>
- <destination>$INSTALL_DIR/bin</destination>
- </action>
+ <action type="download_by_url">file://__PATH__/bwa_base.tar</action><action type="set_environment"><environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable></action>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Refactor tabular chunked display into Backbone and use for displaying tabular data.
by Bitbucket 04 Feb '13
by Bitbucket 04 Feb '13
04 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2faf4a9871bb/
changeset: 2faf4a9871bb
user: jgoecks
date: 2013-02-04 18:37:32
summary: Refactor tabular chunked display into Backbone and use for displaying tabular data.
affected #: 3 files
diff -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 static/scripts/mvc/data.js
--- a/static/scripts/mvc/data.js
+++ b/static/scripts/mvc/data.js
@@ -1,3 +1,4 @@
+// Additional dependencies: jQuery, underscore.
define(["libs/backbone/backbone-relational"], function() {
/**
@@ -46,13 +47,175 @@
urlRoot: galaxy_paths.get('datasets_url')
});
+/**
+ * A tabular dataset. This object extends dataset to provide incremental chunked data.
+ */
+var TabularDataset = Dataset.extend({
+ defaults: _.extend({}, Dataset.prototype.defaults, {
+ chunk_url: null,
+ first_data_chunk: null,
+ chunk_index: -1,
+ at_eof: false
+ }),
+
+ initialize: function(options) {
+ Dataset.prototype.initialize.call(this);
+
+ // If first data chunk is available, next
+ // chunk is 1.
+ chunk_index = (this.attributes.first_data_chunk ? 1 : 0);
+ },
+
+ /**
+ * Set first data chunk; useful when initializing on the server side.
+ */
+ set_first_chunk: function(chunk) {
+ this.attributes.first_data_chunk = chunk;
+ this.attributes.chunk_index = 1;
+ },
+
+ /**
+ * Returns a jQuery Deferred object that resolves to the next data chunk or null if at EOF.
+ */
+ get_next_chunk: function() {
+ // If already at end of file, do nothing.
+ if (this.attributes.at_eof) {
+ return null;
+ }
+
+ // Get next chunk.
+ var self = this,
+ next_chunk = $.Deferred();
+ $.getJSON(this.attributes.chunk_url, {
+ chunk: self.attributes.chunk_index++
+ }).success(function(chunk) {
+ var rval;
+ if (chunk.ck_data !== '') {
+ // Found chunk.
+ rval = chunk;
+ }
+ else {
+ // At EOF.
+ self.attributes.at_eof = true;
+ rval = null;
+ }
+ next_chunk.resolve(rval);
+ });
+
+ return next_chunk;
+ }
+});
+
var DatasetCollection = Backbone.Collection.extend({
model: Dataset
});
+/**
+ * Provides table-based, dynamic view of a tabular dataset.
+ */
+var TabularDatasetChunkedView = Backbone.View.extend({
+
+ initialize: function(options) {},
+
+ render: function() {
+ // Add loading indicator div.
+ this.$el.append( $('<div/>').attr('id', 'loading_indicator') );
+
+ // Add data table and header.
+ var data_table = $('<table/>').attr({
+ id: 'content_table',
+ cellpadding: 0
+ });
+ this.$el.append(data_table);
+ var column_names = this.model.get_metadata('column_names');
+ if (column_names) {
+ data_table.append('<tr><th>' + column_names.join('</th><th>') + '</th></tr>');
+ }
+
+ // Add first chunk.
+ var first_chunk = this.model.get('first_data_chunk');
+ if (first_chunk) {
+ this._renderChunk(first_chunk);
+ }
+
+ // Show new chunks during scrolling.
+ var self = this;
+ $(window).scroll(function() {
+ if ($(window).scrollTop() === $(document).height() - $(window).height()) {
+ $.when(self.model.get_next_chunk()).then(function(result) {
+ if (result) {
+ self._renderChunk(result);
+ }
+ });
+ }
+ });
+ $('#loading_indicator').ajaxStart(function(){
+ $(this).show();
+ }).ajaxStop(function(){
+ $(this).hide();
+ });
+ },
+
+ // -- Helper functions. --
+
+ _renderCell: function(cell_contents, index, colspan) {
+ var column_types = this.model.get_metadata('column_types');
+ if (colspan !== undefined) {
+ return $('<td>').attr('colspan', colspan).addClass('stringalign').text(cell_contents);
+ }
+ else if (column_types[index] === 'str' || column_types === 'list') {
+ /* Left align all str columns, right align the rest */
+ return $('<td>').addClass('stringalign').text(cell_contents);
+ }
+ else {
+ return $('<td>').text(cell_contents);
+ }
+ },
+
+ _renderRow: function(line) {
+ // Check length of cells to ensure this is a complete row.
+ var cells = line.split('\t'),
+ row = $('<tr>'),
+ num_columns = this.model.get_metadata('columns');
+ if (cells.length === num_columns) {
+ _.each(cells, function(cell_contents, index) {
+ row.append(this._renderCell(cell_contents, index));
+ }, this);
+ }
+ else if (cells.length > num_columns) {
+ // SAM file or like format with optional metadata included.
+ _.each(cells.slice(0, num_columns - 1), function(cell_contents, index) {
+ row.append(this._renderCell(cell_contents, index));
+ }, this);
+ row.append(this._renderCell(cells.slice(num_columns - 1).join('\t'), num_columns - 1));
+ }
+ else if (num_columns > 5 && cells.length === num_columns - 1 ) {
+ // SAM file or like format with optional metadata missing.
+ _.each(cells, function(cell_contents, index) {
+ row.append(this._renderCell(cell_contents, index));
+ }, this);
+ row.append($('<td>'));
+ }
+ else {
+ // Comment line, just return the one cell.
+ row.append(this._renderCell(line, 0, num_columns));
+ }
+ return row;
+ },
+
+ _renderChunk: function(chunk) {
+ var data_table = this.$el.find('table');
+ _.each(chunk.ck_data.split('\n'), function(line, index) {
+ data_table.append(this._renderRow(line));
+ }, this);
+ }
+});
+
return {
Dataset: Dataset,
- DatasetCollection: DatasetCollection
+ TabularDataset: TabularDataset,
+ DatasetCollection: DatasetCollection,
+ TabularDatasetChunkedView: TabularDatasetChunkedView
};
});
diff -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 static/scripts/packed/mvc/data.js
--- a/static/scripts/packed/mvc/data.js
+++ b/static/scripts/packed/mvc/data.js
@@ -1,1 +1,1 @@
-define(["libs/backbone/backbone-relational"],function(){var a=Backbone.RelationalModel.extend({});var b=Backbone.RelationalModel.extend({defaults:{id:"",type:"",name:"",hda_ldda:"hda",metadata:null},initialize:function(){var d=new a();_.each(_.keys(this.attributes),function(e){if(e.indexOf("metadata_")===0){var f=e.split("metadata_")[1];d.set(f,this.attributes[e]);delete this.attributes[e]}},this);this.set("metadata",d)},get_metadata:function(d){return this.attributes.metadata.get(d)},urlRoot:galaxy_paths.get("datasets_url")});var c=Backbone.Collection.extend({model:b});return{Dataset:b,DatasetCollection:c}});
\ No newline at end of file
+define(["libs/backbone/backbone-relational"],function(){var b=Backbone.RelationalModel.extend({});var c=Backbone.RelationalModel.extend({defaults:{id:"",type:"",name:"",hda_ldda:"hda",metadata:null},initialize:function(){var f=new b();_.each(_.keys(this.attributes),function(g){if(g.indexOf("metadata_")===0){var h=g.split("metadata_")[1];f.set(h,this.attributes[g]);delete this.attributes[g]}},this);this.set("metadata",f)},get_metadata:function(f){return this.attributes.metadata.get(f)},urlRoot:galaxy_paths.get("datasets_url")});var a=c.extend({defaults:_.extend({},c.prototype.defaults,{chunk_url:null,first_data_chunk:null,chunk_index:-1,at_eof:false}),initialize:function(f){c.prototype.initialize.call(this);chunk_index=(this.attributes.first_data_chunk?1:0)},set_first_chunk:function(f){this.attributes.first_data_chunk=f;this.attributes.chunk_index=1},get_next_chunk:function(){if(this.attributes.at_eof){return null}var f=this,g=$.Deferred();$.getJSON(this.attributes.chunk_url,{chunk:f.attributes.chunk_index++}).success(function(h){var i;if(h.ck_data!==""){i=h}else{f.attributes.at_eof=true;i=null}g.resolve(i)});return g}});var e=Backbone.Collection.extend({model:c});var d=Backbone.View.extend({initialize:function(f){},render:function(){this.$el.append($("<div/>").attr("id","loading_indicator"));var i=$("<table/>").attr({id:"content_table",cellpadding:0});this.$el.append(i);var f=this.model.get_metadata("column_names");if(f){i.append("<tr><th>"+f.join("</th><th>")+"</th></tr>")}var h=this.model.get("first_data_chunk");if(h){this._renderChunk(h)}var g=this;$(window).scroll(function(){if($(window).scrollTop()===$(document).height()-$(window).height()){$.when(g.model.get_next_chunk()).then(function(j){if(j){g._renderChunk(j)}})}});$("#loading_indicator").ajaxStart(function(){$(this).show()}).ajaxStop(function(){$(this).hide()})},_renderCell:function(h,f,i){var g=this.model.get_metadata("column_types");if(i!==undefined){return $("<td>").attr("colspan",i).addClass("stringalign").text(h)}else{if(g[f]==="str"||g==="list"){return $("<td>").addClass("stringalign").text(h)}else{return $("<td>").text(h)}}},_renderRow:function(f){var g=f.split("\t"),i=$("<tr>"),h=this.model.get_metadata("columns");if(g.length===h){_.each(g,function(k,j){i.append(this._renderCell(k,j))},this)}else{if(g.length>h){_.each(g.slice(0,h-1),function(k,j){i.append(this._renderCell(k,j))},this);i.append(this._renderCell(g.slice(h-1).join("\t"),h-1))}else{if(h>5&&g.length===h-1){_.each(g,function(k,j){i.append(this._renderCell(k,j))},this);i.append($("<td>"))}else{i.append(this._renderCell(f,0,h))}}}return i},_renderChunk:function(f){var g=this.$el.find("table");_.each(f.ck_data.split("\n"),function(h,i){g.append(this._renderRow(h))},this)}});return{Dataset:c,TabularDataset:a,DatasetCollection:e,TabularDatasetChunkedView:d}});
\ No newline at end of file
diff -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 -r 2faf4a9871bb58da2b16d1a8929a53c68f951cc0 templates/webapps/galaxy/dataset/tabular_chunked.mako
--- a/templates/webapps/galaxy/dataset/tabular_chunked.mako
+++ b/templates/webapps/galaxy/dataset/tabular_chunked.mako
@@ -5,91 +5,31 @@
<%def name="javascripts()">
${parent.javascripts()}
+ ${h.js( "libs/require" )}
<script type="text/javascript">
- var DATASET_URL = "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ))}";
- var COLUMN_NUMBER = ${column_number};
- var COLUMN_TYPES = ${column_types};
- var COLUMN_NAMES = ${column_names};
+ require.config({
+ baseUrl: "${h.url_for('/static/scripts')}",
+ shim: {
+ "libs/underscore": { exports: "_" },
+ "libs/backbone/backbone": { exports: "Backbone" },
+ "libs/backbone/backbone-relational": ["libs/backbone/backbone"]
+ }
+ });
- var chunk = ${chunk};
- var current_chunk = 0;
+ require([ 'mvc/data' ], function(data) {
- function renderCell(cell_contents, index, colspan){
- if (colspan !== undefined){
- return $('<td>').attr('colspan', colspan).addClass('stringalign').text(cell_contents);
- }
- else if (COLUMN_TYPES[index] == 'str' || COLUMN_TYPES[index] == 'list'){
- /* Left align all str columns, right align the rest */
- return $('<td>').addClass('stringalign').text(cell_contents);;
- }
- else{
- return $('<td>').text(cell_contents);
- }
- }
-
- function renderRow(line){
- /* Check length of cells to ensure this is a complete row. */
- var cells = line.split('\t');
- var row = $('<tr>');
- if (cells.length == COLUMN_NUMBER){
- $.each(cells, function(index, cell_contents){
- row.append(renderCell(cell_contents, index));
- });
- }
- else if(cells.length > COLUMN_NUMBER){
- /* SAM file or like format with optional metadata included */
- $.each(cells.slice(0, COLUMN_NUMBER -1), function(index, cell_contents){
- row.append(renderCell(cell_contents, index));
- });
- row.append(renderCell(cells.slice(COLUMN_NUMBER -1).join('\t'), COLUMN_NUMBER-1));
- }
- else if(COLUMN_NUMBER > 5 && cells.length == COLUMN_NUMBER - 1 ){
- /* SAM file or like format with optional metadata missing */
- $.each(cells, function(index, cell_contents){
- row.append(renderCell(cell_contents, index));
- });
- row.append($('<td>'));
- }
- else{
- /* Comment line, just return the one cell*/
- row.append(renderCell(line, 0, COLUMN_NUMBER));
- }
- return row;
- }
-
- function renderChunk(chunk){
- var table = $('#content_table');
- if (chunk.ck_data == ""){
- current_chunk = -1;
- }
- else if(chunk.ck_index === current_chunk + 1){
- if (current_chunk === 0 && COLUMN_NAMES){
- table.append('<tr><th>' + COLUMN_NAMES.join('</th><th>') + '</th></tr>');
- }
- var lines = chunk.ck_data.split('\n');
- $.each(lines, function(index, line){
- table.append(renderRow(line));
- });
- current_chunk = chunk.ck_index;
- }
- }
-
- $(document).ready(function(){
- renderChunk(chunk);
- $(window).scroll(function(){
- if ($(window).scrollTop() == $(document).height() - $(window).height()){
- if (current_chunk !== -1){
- $.getJSON(DATASET_URL,
- {chunk: current_chunk},
- function(result){renderChunk(result)});
- }
- }
+ // Set up dataset and attributes.
+ var dataset = new data.TabularDataset( ${h.to_json_string( dataset.get_api_value() )} );
+ dataset.set('chunk_url',
+ "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ))}");
+ dataset.set_first_chunk(${chunk})
+
+ // Set up, render, and add view.
+ var dataset_view = new data.TabularDatasetChunkedView({
+ model: dataset
});
- $('#loading_indicator').ajaxStart(function(){
- $(this).show();
- }).ajaxStop(function(){
- $(this).hide();
- });
+ dataset_view.render();
+ $('body').append(dataset_view.$el);
});
</script></%def>
@@ -97,7 +37,3 @@
<%def name="stylesheets()">
${parent.stylesheets()}
</%def>
-
-<div id="loading_indicator" ></div>
-<table id="content_table" cellpadding="0">
-</table>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history panel: ensure deleted or purged datasets are considered ready to the updater; pack scripts
by Bitbucket 04 Feb '13
by Bitbucket 04 Feb '13
04 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/909868dda881/
changeset: 909868dda881
user: carlfeberhard
date: 2013-02-04 18:14:23
summary: history panel: ensure deleted or purged datasets are considered ready to the updater; pack scripts
affected #: 3 files
diff -r 596b4c29f84256767975caf9770d8c316d97386e -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 static/scripts/mvc/dataset/hda-model.js
--- a/static/scripts/mvc/dataset/hda-model.js
+++ b/static/scripts/mvc/dataset/hda-model.js
@@ -130,7 +130,8 @@
//TODO: to list inclusion test
//TODO: class level readyStates list
return (
- ( state === HistoryDatasetAssociation.STATES.OK )
+ this.isDeletedOrPurged()
+ || ( state === HistoryDatasetAssociation.STATES.OK )
|| ( state === HistoryDatasetAssociation.STATES.EMPTY )
|| ( state === HistoryDatasetAssociation.STATES.FAILED_METADATA )
|| ( state === HistoryDatasetAssociation.STATES.NOT_VIEWABLE )
diff -r 596b4c29f84256767975caf9770d8c316d97386e -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 static/scripts/packed/galaxy.base.js
--- a/static/scripts/packed/galaxy.base.js
+++ b/static/scripts/packed/galaxy.base.js
@@ -1,1 +1,1 @@
-(function(){var b=0;var c=["ms","moz","webkit","o"];for(var a=0;a<c.length&&!window.requestAnimationFrame;++a){window.requestAnimationFrame=window[c[a]+"RequestAnimationFrame"];window.cancelRequestAnimationFrame=window[c[a]+"CancelRequestAnimationFrame"]}if(!window.requestAnimationFrame){window.requestAnimationFrame=function(h,e){var d=new Date().getTime();var f=Math.max(0,16-(d-b));var g=window.setTimeout(function(){h(d+f)},f);b=d+f;return g}}if(!window.cancelAnimationFrame){window.cancelAnimationFrame=function(d){clearTimeout(d)}}}());if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){g.append($("<li></li>").append($("<a href='#'></a>").html(j).click(i)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(a){a=a||document;$(a).find("div[popupmenu]").each(function(){var b={};var d=$(this);d.find("a").each(function(){var g=$(this),i=g.get(0),e=i.getAttribute("confirm"),f=i.getAttribute("href"),h=i.getAttribute("target");if(!f){b[g.text()]=null}else{b[g.text()]=function(){if(!e||confirm(e)){var j;if(h=="_parent"){window.parent.location=f}else{if(h=="_top"){window.top.location=f}else{if(h=="demo"){if(j===undefined||j.closed){j=window.open(f,h);j.creator=self}}else{window.location=f}}}}}}});var c=$(a).find("#"+d.attr("popupmenu"));c.find("a").bind("click",function(f){f.stopPropagation();return true});make_popupmenu(c,b);c.addClass("popup");d.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}function replace_big_select_inputs(a,c,b){if(!jQuery().autocomplete){return}if(a===undefined){a=20}if(c===undefined){c=3000}var b=b||$("select");b.each(function(){var e=$(this);var h=e.find("option").length;if((h<a)||(h>c)){return}if(e.attr("multiple")==="multiple"){return}if(e.hasClass("no-autocomplete")){return}var n=e.attr("value");var d=$("<input type='text' class='text-and-autocomplete-select'></input>");d.attr("size",40);d.attr("name",e.attr("name"));d.attr("id",e.attr("id"));d.click(function(){var o=$(this).val();$(this).val("Loading...");$(this).showAllInCache();$(this).val(o);$(this).select()});var f=[];var j={};e.children("option").each(function(){var p=$(this).text();var o=$(this).attr("value");f.push(p);j[p]=o;j[o]=o;if(o==n){d.attr("value",p)}});if(n===""||n==="?"){d.attr("value","Click to Search or Select")}if(e.attr("name")=="dbkey"){f=f.sort(naturalSort)}var g={selectFirst:false,autoFill:false,mustMatch:false,matchContains:true,max:c,minChars:0,hideForLessThanMinChars:false};d.autocomplete(f,g);e.replaceWith(d);var l=function(){var p=d.attr("value");var o=j[p];if(o!==null&&o!==undefined){d.attr("value",o)}else{if(n!==""){d.attr("value",n)}else{d.attr("value","?")}}};d.parents("form").submit(function(){l()});$(document).bind("convert_to_values",function(){l()});if(e.attr("refresh_on_change")=="true"){var i=e.attr("refresh_on_change_values"),m=e.attr("last_selected_value");if(i!==undefined){i=i.split(",")}var k=function(){var o=j[d.attr("value")];if(m!==o&&o!==null&&o!==undefined){if(i!==undefined&&$.inArray(o,i)===-1&&$.inArray(m,i)===-1){return}d.attr("value",o);$(window).trigger("refresh_on_change");d.parents("form").submit()}};d.bind("result",k);d.keyup(function(o){if(o.keyCode===13){k()}});d.keydown(function(o){if(o.keyCode===13){return false}})}})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!==""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=a.text(),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tooltip()}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tooltip){$(".tooltip").tooltip({placement:"top"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
+(function(){var b=0;var c=["ms","moz","webkit","o"];for(var a=0;a<c.length&&!window.requestAnimationFrame;++a){window.requestAnimationFrame=window[c[a]+"RequestAnimationFrame"];window.cancelRequestAnimationFrame=window[c[a]+"CancelRequestAnimationFrame"]}if(!window.requestAnimationFrame){window.requestAnimationFrame=function(h,e){var d=new Date().getTime();var f=Math.max(0,16-(d-b));var g=window.setTimeout(function(){h(d+f)},f);b=d+f;return g}}if(!window.cancelAnimationFrame){window.cancelAnimationFrame=function(d){clearTimeout(d)}}}());if(!Array.indexOf){Array.prototype.indexOf=function(c){for(var b=0,a=this.length;b<a;b++){if(this[b]==c){return b}}return -1}}function obj_length(c){if(c.length!==undefined){return c.length}var b=0;for(var a in c){b++}return b}$.fn.makeAbsolute=function(a){return this.each(function(){var b=$(this);var c=b.position();b.css({position:"absolute",marginLeft:0,marginTop:0,top:c.top,left:c.left,right:$(window).width()-(c.left+b.width())});if(a){b.remove().appendTo("body")}})};function make_popupmenu(b,c){var a=(b.data("menu_options"));b.data("menu_options",c);if(a){return}b.bind("click.show_popup",function(d){$(".popmenu-wrapper").remove();setTimeout(function(){var g=$("<ul class='dropdown-menu' id='"+b.attr("id")+"-menu'></ul>");var f=b.data("menu_options");if(obj_length(f)<=0){$("<li>No Options.</li>").appendTo(g)}$.each(f,function(j,i){if(i){g.append($("<li></li>").append($("<a href='#'></a>").html(j).click(i)))}else{g.append($("<li></li>").addClass("head").append($("<a href='#'></a>").html(j)))}});var h=$("<div class='popmenu-wrapper' style='position: absolute;left: 0; top: -1000;'></div>").append(g).appendTo("body");var e=d.pageX-h.width()/2;e=Math.min(e,$(document).scrollLeft()+$(window).width()-$(h).width()-5);e=Math.max(e,$(document).scrollLeft()+5);h.css({top:d.pageY,left:e})},10);setTimeout(function(){var f=function(h){$(h).bind("click.close_popup",function(){$(".popmenu-wrapper").remove();h.unbind("click.close_popup")})};f($(window.document));f($(window.top.document));for(var e=window.top.frames.length;e--;){var g=$(window.top.frames[e].document);f(g)}},50);return false})}function make_popup_menus(a){a=a||document;$(a).find("div[popupmenu]").each(function(){var b={};var d=$(this);d.find("a").each(function(){var g=$(this),i=g.get(0),e=i.getAttribute("confirm"),f=i.getAttribute("href"),h=i.getAttribute("target");if(!f){b[g.text()]=null}else{b[g.text()]=function(){if(!e||confirm(e)){var j;if(h=="_parent"){window.parent.location=f}else{if(h=="_top"){window.top.location=f}else{if(h=="demo"){if(j===undefined||j.closed){j=window.open(f,h);j.creator=self}}else{window.location=f}}}}}}});var c=$(a).find("#"+d.attr("popupmenu"));c.find("a").bind("click",function(f){f.stopPropagation();return true});make_popupmenu(c,b);c.addClass("popup");d.remove()})}function naturalSort(j,h){var p=/(-?[0-9\.]+)/g,k=j.toString().toLowerCase()||"",g=h.toString().toLowerCase()||"",l=String.fromCharCode(0),n=k.replace(p,l+"$1"+l).split(l),e=g.replace(p,l+"$1"+l).split(l),d=(new Date(k)).getTime(),o=d?(new Date(g)).getTime():null;if(o){if(d<o){return -1}else{if(d>o){return 1}}}var m,f;for(var i=0,c=Math.max(n.length,e.length);i<c;i++){m=parseFloat(n[i])||n[i];f=parseFloat(e[i])||e[i];if(m<f){return -1}else{if(m>f){return 1}}}return 0}function replace_big_select_inputs(a,c,b){if(!jQuery.fn.select2){return}if(a===undefined){a=20}if(c===undefined){c=3000}var b=b||$("select");b.each(function(){var e=$(this);var d=e.find("option").length;if((d<a)||(d>c)){return}if(e.hasClass("no-autocomplete")){return}e.select2({width:"resolve"})})}$.fn.make_text_editable=function(g){var d=("num_cols" in g?g.num_cols:30),c=("num_rows" in g?g.num_rows:4),e=("use_textarea" in g?g.use_textarea:false),b=("on_finish" in g?g.on_finish:null),f=("help_text" in g?g.help_text:null);var a=$(this);a.addClass("editable-text").click(function(l){if($(this).children(":input").length>0){return}a.removeClass("editable-text");var i=function(m){a.find(":input").remove();if(m!==""){a.text(m)}else{a.html("<br>")}a.addClass("editable-text");if(b){b(m)}};var h=a.text(),k,j;if(e){k=$("<textarea/>").attr({rows:c,cols:d}).text($.trim(h)).keyup(function(m){if(m.keyCode===27){i(h)}});j=$("<button/>").text("Done").click(function(){i(k.val());return false})}else{k=$("<input type='text'/>").attr({value:$.trim(h),size:d}).blur(function(){i(h)}).keyup(function(m){if(m.keyCode===27){$(this).trigger("blur")}else{if(m.keyCode===13){i($(this).val())}}})}a.text("");a.append(k);if(j){a.append(j)}k.focus();k.select();l.stopPropagation()});if(f){a.attr("title",f).tooltip()}return a};function async_save_text(d,f,e,a,c,h,i,g,b){if(c===undefined){c=30}if(i===undefined){i=4}$("#"+d).live("click",function(){if($("#renaming-active").length>0){return}var l=$("#"+f),k=l.text(),j;if(h){j=$("<textarea></textarea>").attr({rows:i,cols:c}).text($.trim(k))}else{j=$("<input type='text'></input>").attr({value:$.trim(k),size:c})}j.attr("id","renaming-active");j.blur(function(){$(this).remove();l.show();if(b){b(j)}});j.keyup(function(n){if(n.keyCode===27){$(this).trigger("blur")}else{if(n.keyCode===13){var m={};m[a]=$(this).val();$(this).trigger("blur");$.ajax({url:e,data:m,error:function(){alert("Text editing for elt "+f+" failed")},success:function(o){if(o!==""){l.text(o)}else{l.html("<em>None</em>")}if(b){b(j)}}})}}});if(g){g(j)}l.hide();j.insertAfter(l);j.focus();j.select();return})}function init_history_items(d,a,c){var b=function(){try{var e=$.jStorage.get("history_expand_state");if(e){for(var g in e){$("#"+g+" div.historyItemBody").show()}}}catch(f){$.jStorage.deleteKey("history_expand_state")}if($.browser.mozilla){$("div.historyItemBody").each(function(){if(!$(this).is(":visible")){$(this).find("pre.peek").css("overflow","hidden")}})}d.each(function(){var j=this.id,h=$(this).children("div.historyItemBody"),i=h.find("pre.peek");$(this).find(".historyItemTitleBar > .historyItemTitle").wrap("<a href='javascript:void(0);'></a>").click(function(){var k;if(h.is(":visible")){if($.browser.mozilla){i.css("overflow","hidden")}h.slideUp("fast");if(!c){k=$.jStorage.get("history_expand_state");if(k){delete k[j];$.jStorage.set("history_expand_state",k)}}}else{h.slideDown("fast",function(){if($.browser.mozilla){i.css("overflow","auto")}});if(!c){k=$.jStorage.get("history_expand_state");if(!k){k={}}k[j]=true;$.jStorage.set("history_expand_state",k)}}return false})});$("#top-links > a.toggle").click(function(){var h=$.jStorage.get("history_expand_state");if(!h){h={}}$("div.historyItemBody:visible").each(function(){if($.browser.mozilla){$(this).find("pre.peek").css("overflow","hidden")}$(this).slideUp("fast");if(h){delete h[$(this).parent().attr("id")]}});$.jStorage.set("history_expand_state",h)}).show()};b()}function commatize(b){b+="";var a=/(\d+)(\d{3})/;while(a.test(b)){b=b.replace(a,"$1,$2")}return b}function reset_tool_search(a){var c=$("#galaxy_tools").contents();if(c.length===0){c=$(document)}$(this).removeClass("search_active");c.find(".toolTitle").removeClass("search_match");c.find(".toolSectionBody").hide();c.find(".toolTitle").show();c.find(".toolPanelLabel").show();c.find(".toolSectionWrapper").each(function(){if($(this).attr("id")!="recently_used_wrapper"){$(this).show()}else{if($(this).hasClass("user_pref_visible")){$(this).show()}}});c.find("#search-no-results").hide();c.find("#search-spinner").hide();if(a){var b=c.find("#tool-search-query");b.val("search tools")}}var GalaxyAsync=function(a){this.url_dict={};this.log_action=(a===undefined?false:a)};GalaxyAsync.prototype.set_func_url=function(a,b){this.url_dict[a]=b};GalaxyAsync.prototype.set_user_pref=function(a,b){var c=this.url_dict[arguments.callee];if(c===undefined){return false}$.ajax({url:c,data:{pref_name:a,pref_value:b},error:function(){return false},success:function(){return true}})};GalaxyAsync.prototype.log_user_action=function(c,b,d){if(!this.log_action){return}var a=this.url_dict[arguments.callee];if(a===undefined){return false}$.ajax({url:a,data:{action:c,context:b,params:d},error:function(){return false},success:function(){return true}})};$(document).ready(function(){$("select[refresh_on_change='true']").change(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");$(document).trigger("convert_to_values");a.get(0).form.submit()});$(":checkbox[refresh_on_change='true']").click(function(){var a=$(this),e=a.val(),d=false,c=a.attr("refresh_on_change_values");if(c){c=c.split(",");var b=a.attr("last_selected_value");if($.inArray(e,c)===-1&&$.inArray(b,c)===-1){return}}$(window).trigger("refresh_on_change");a.get(0).form.submit()});$("a[confirm]").click(function(){return confirm($(this).attr("confirm"))});if($.fn.tooltip){$(".tooltip").tooltip({placement:"top"})}make_popup_menus();replace_big_select_inputs(20,1500);$("a").click(function(){var b=$(this);var c=(parent.frames&&parent.frames.galaxy_main);if((b.attr("target")=="galaxy_main")&&(!c)){var a=b.attr("href");if(a.indexOf("?")==-1){a+="?"}else{a+="&"}a+="use_panels=True";b.attr("href",a);b.attr("target","_self")}return b})});
\ No newline at end of file
diff -r 596b4c29f84256767975caf9770d8c316d97386e -r 909868dda8812fcba2215a6eb77bdbaf71c6dfd0 static/scripts/packed/mvc/dataset/hda-model.js
--- a/static/scripts/packed/mvc/dataset/hda-model.js
+++ b/static/scripts/packed/mvc/dataset/hda-model.js
@@ -1,1 +1,1 @@
-var HistoryDatasetAssociation=BaseModel.extend(LoggableMixin).extend({defaults:{history_id:null,model_class:"HistoryDatasetAssociation",hid:0,id:null,name:"(unnamed dataset)",state:"ok",data_type:null,file_size:0,file_ext:"",meta_files:[],misc_blurb:"",misc_info:"",deleted:false,purged:false,visible:false,accessible:true},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("history_id")+"/contents/"+this.get("id")},initialize:function(){this.log(this+".initialize",this.attributes);this.log("\tparent history_id: "+this.get("history_id"));if(!this.get("accessible")){this.set("state",HistoryDatasetAssociation.STATES.NOT_VIEWABLE)}this.on("change:state",function(b,a){this.log(this+" has changed state:",b,a);if(this.inReadyState()){this.trigger("state:ready",b,a,this.previous("state"))}})},isDeletedOrPurged:function(){return(this.get("deleted")||this.get("purged"))},isVisible:function(b,c){var a=true;if((!b)&&(this.get("deleted")||this.get("purged"))){a=false}if((!c)&&(!this.get("visible"))){a=false}return a},inReadyState:function(){var a=this.get("state");return((a===HistoryDatasetAssociation.STATES.OK)||(a===HistoryDatasetAssociation.STATES.EMPTY)||(a===HistoryDatasetAssociation.STATES.FAILED_METADATA)||(a===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(a===HistoryDatasetAssociation.STATES.DISCARDED)||(a===HistoryDatasetAssociation.STATES.ERROR))},hasData:function(){return(this.get("file_size")>0)},toString:function(){var a=this.get("id")||"";if(this.get("name")){a+=':"'+this.get("name")+'"'}return"HistoryDatasetAssociation("+a+")"}});HistoryDatasetAssociation.STATES={UPLOAD:"upload",QUEUED:"queued",PAUSED:"paused",RUNNING:"running",SETTING_METADATA:"setting_metadata",NEW:"new",EMPTY:"empty",OK:"ok",FAILED_METADATA:"failed_metadata",NOT_VIEWABLE:"noPermission",DISCARDED:"discarded",ERROR:"error"};var HDACollection=Backbone.Collection.extend(LoggableMixin).extend({model:HistoryDatasetAssociation,initialize:function(){},ids:function(){return this.map(function(a){return a.id})},getByHid:function(a){return _.first(this.filter(function(b){return b.get("hid")===a}))},hidToCollectionIndex:function(a){if(!a){return this.models.length}var d=this.models.length-1;for(var b=d;b>=0;b--){var c=this.at(b).get("hid");if(c==a){return b}if(c<a){return b+1}}return null},getVisible:function(a,b){return this.filter(function(c){return c.isVisible(a,b)})},getStateLists:function(){var a={};_.each(_.values(HistoryDatasetAssociation.STATES),function(b){a[b]=[]});this.each(function(b){a[b.get("state")].push(b.get("id"))});return a},running:function(){var a=[];this.each(function(b){if(!b.inReadyState()){a.push(b.get("id"))}});return a},update:function(a){this.log(this+"update:",a);if(!(a&&a.length)){return[]}var c=this,b=null;_.each(a,function(f,d){var e=c.get(f);if(e){e.fetch();b.push(e)}});return b},toString:function(){return("HDACollection()")}});
\ No newline at end of file
+var HistoryDatasetAssociation=BaseModel.extend(LoggableMixin).extend({defaults:{history_id:null,model_class:"HistoryDatasetAssociation",hid:0,id:null,name:"(unnamed dataset)",state:"ok",data_type:null,file_size:0,file_ext:"",meta_files:[],misc_blurb:"",misc_info:"",deleted:false,purged:false,visible:false,accessible:true},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("history_id")+"/contents/"+this.get("id")},initialize:function(){this.log(this+".initialize",this.attributes);this.log("\tparent history_id: "+this.get("history_id"));if(!this.get("accessible")){this.set("state",HistoryDatasetAssociation.STATES.NOT_VIEWABLE)}this.on("change:state",function(b,a){this.log(this+" has changed state:",b,a);if(this.inReadyState()){this.trigger("state:ready",b,a,this.previous("state"))}})},isDeletedOrPurged:function(){return(this.get("deleted")||this.get("purged"))},isVisible:function(b,c){var a=true;if((!b)&&(this.get("deleted")||this.get("purged"))){a=false}if((!c)&&(!this.get("visible"))){a=false}return a},inReadyState:function(){var a=this.get("state");return(this.isDeletedOrPurged()||(a===HistoryDatasetAssociation.STATES.OK)||(a===HistoryDatasetAssociation.STATES.EMPTY)||(a===HistoryDatasetAssociation.STATES.FAILED_METADATA)||(a===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(a===HistoryDatasetAssociation.STATES.DISCARDED)||(a===HistoryDatasetAssociation.STATES.ERROR))},hasData:function(){return(this.get("file_size")>0)},toString:function(){var a=this.get("id")||"";if(this.get("name")){a+=':"'+this.get("name")+'"'}return"HistoryDatasetAssociation("+a+")"}});HistoryDatasetAssociation.STATES={UPLOAD:"upload",QUEUED:"queued",PAUSED:"paused",RUNNING:"running",SETTING_METADATA:"setting_metadata",NEW:"new",EMPTY:"empty",OK:"ok",FAILED_METADATA:"failed_metadata",NOT_VIEWABLE:"noPermission",DISCARDED:"discarded",ERROR:"error"};var HDACollection=Backbone.Collection.extend(LoggableMixin).extend({model:HistoryDatasetAssociation,initialize:function(){},ids:function(){return this.map(function(a){return a.id})},getByHid:function(a){return _.first(this.filter(function(b){return b.get("hid")===a}))},hidToCollectionIndex:function(a){if(!a){return this.models.length}var d=this.models.length-1;for(var b=d;b>=0;b--){var c=this.at(b).get("hid");if(c==a){return b}if(c<a){return b+1}}return null},getVisible:function(a,b){return this.filter(function(c){return c.isVisible(a,b)})},getStateLists:function(){var a={};_.each(_.values(HistoryDatasetAssociation.STATES),function(b){a[b]=[]});this.each(function(b){a[b.get("state")].push(b.get("id"))});return a},running:function(){var a=[];this.each(function(b){if(!b.inReadyState()){a.push(b.get("id"))}});return a},update:function(a){this.log(this+"update:",a);if(!(a&&a.length)){return[]}var c=this,b=null;_.each(a,function(f,d){var e=c.get(f);if(e){e.fetch();b.push(e)}});return b},toString:function(){return("HDACollection()")}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1ab19b93abbd/
changeset: 1ab19b93abbd
branch: next-stable
user: inithello
date: 2013-02-04 17:51:42
summary: Tool shed functional tests for complex repository dependencies, detection and setting of metadata for invalid repository dependencies. Fixes for detecting and displaying invalid repository dependencies.
affected #: 10 files
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1292,8 +1292,8 @@
for repository_elem in root.findall( 'repository' ):
current_rd_tups, error_message = handle_repository_elem( app, repository_elem, repository_dependencies_tups )
if error_message:
+ # Log the problem, but generate metadata for the invalid repository dependencies.
log.debug( error_message )
- return metadata_dict, error_message
for crdt in current_rd_tups:
repository_dependencies_tups.append( crdt )
if repository_dependencies_tups:
@@ -1868,10 +1868,12 @@
.first()
# We're in the tool shed.
user = get_user_by_username( app, owner )
- return sa_session.query( app.model.Repository ) \
- .filter( and_( app.model.Repository.table.c.name == name,
- app.model.Repository.table.c.user_id == user.id ) ) \
- .first()
+ if user:
+ return sa_session.query( app.model.Repository ) \
+ .filter( and_( app.model.Repository.table.c.name == name,
+ app.model.Repository.table.c.user_id == user.id ) ) \
+ .first()
+ return None
def get_repository_dependencies_for_changeset_revision( trans, repository, repository_metadata, toolshed_base_url,
key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
handled_key_rd_dicts=None, circular_repository_dependencies=None ):
@@ -2284,9 +2286,13 @@
def get_user_by_username( app, username ):
"""Get a user from the database by username."""
sa_session = app.model.context.current
- return sa_session.query( app.model.User ) \
- .filter( app.model.User.table.c.username == username ) \
- .one()
+ try:
+ user = sa_session.query( app.model.User ) \
+ .filter( app.model.User.table.c.username == username ) \
+ .one()
+ return user
+ except Exception, e:
+ return None
def handle_circular_repository_dependency( repository_key, repository_dependency, circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies ):
all_repository_dependencies_root_key = all_repository_dependencies[ 'root_key' ]
repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
@@ -2510,6 +2516,11 @@
else:
# We're in the tool shed.
if tool_shed_is_this_tool_shed( toolshed ):
+ # Append the repository dependency definition regardless of whether it's valid or not, as Galaxy needs this to
+ # properly display an error when the repository dependency is invalid at the time of installation.
+ repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
+ if repository_dependencies_tup not in new_rd_tups:
+ new_rd_tups.append( repository_dependencies_tup )
try:
user = sa_session.query( app.model.User ) \
.filter( app.model.User.table.c.username == owner ) \
@@ -2539,9 +2550,6 @@
error_message = "Invalid changeset revision <b>%s</b> defined. Repository dependencies will be ignored." % str( changeset_revision )
log.debug( error_message )
return new_rd_tups, error_message
- repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
- if repository_dependencies_tup not in new_rd_tups:
- new_rd_tups.append( repository_dependencies_tup )
else:
# Repository dependencies are currentlhy supported within a single tool shed.
error_message = "Invalid tool shed <b>%s</b> defined for repository <b>%s</b>. " % ( toolshed, name )
@@ -3179,7 +3187,7 @@
updating_installed_repository=False,
persist=False )
# We'll only display error messages for the repository tip (it may be better to display error messages for each installable changeset revision).
- if current_metadata_dict == repository.tip( trans.app ):
+ if current_changeset_revision == repository.tip( trans.app ):
invalid_file_tups.extend( invalid_tups )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -11,8 +11,8 @@
echo "'run_functional_tests.sh -id bbb' for testing one tool with id 'bbb' ('bbb' is the tool id)"
echo "'run_functional_tests.sh -sid ccc' for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
echo "'run_functional_tests.sh -list' for listing all the tool ids"
- echo "'run_functional_tests.sh -toolshed' for running all the test scripts in the ./test/tool_shed/functional directory"
- echo "'run_functional_tests.sh -toolshed testscriptname' for running one test script named testscriptname in the .test/tool_shed/functional directory"
+ echo "'run_functional_tests.sh -toolshed' for running all the test scripts in the ./test/tool_shed/functional directory"
+ echo "'run_functional_tests.sh -toolshed testscriptname' for running one test script named testscriptname in the .test/tool_shed/functional directory"
elif [ $1 = '-id' ]; then
python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$2 --with-nosehtml --html-report-file run_functional_tests.html
elif [ $1 = '-sid' ]; then
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -32,7 +32,7 @@
self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
self.shed_tool_conf = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF' )
# TODO: Figure out a way to alter these attributes during tests.
- self.galaxy_tool_dependency_dir = None # os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
+ self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
self.home()
def add_repository_review_component( self, **kwd ):
@@ -450,6 +450,10 @@
dependency_template = string.Template( common.complex_repository_dependency_template )
repository_dependency_xml = dependency_template.safe_substitute( package=package, version=version, dependency_lines='\n'.join( dependency_entries ) )
else:
+ if not description:
+ description = ' description=""'
+ else:
+ description = ' description="%s"' % description
template_parser = string.Template( common.new_repository_dependencies_xml )
repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
@@ -625,7 +629,8 @@
( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
return util.listify( repository_ids )
- def install_repositories_from_search_results( self, repositories, strings_displayed=[], strings_not_displayed=[], **kwd ):
+ def install_repositories_from_search_results( self, repositories, install_tool_dependencies=False,
+ strings_displayed=[], strings_not_displayed=[], **kwd ):
'''
Normally, it would be possible to check the appropriate boxes in the search results, and click the install button. This works
in a browser, but Twill manages to lose the 'toolshedgalaxyurl' cookie between one page and the next, so it's necessary to work
@@ -640,15 +645,12 @@
form = tc.browser.get_form( 'select_tool_panel_section' )
checkbox = form.find_control( id="install_tool_dependencies" )
checkbox.disabled = False
- if 'install_tool_dependencies' in kwd:
- install_tool_dependencies = kwd[ 'install_tool_dependencies' ]
- del kwd[ 'install_tool_dependencies' ]
- else:
- install_tool_dependencies = False
if install_tool_dependencies:
checkbox.selected = True
+ kwd[ 'install_tool_dependencies' ] = 'True'
else:
checkbox.selected = False
+ kwd[ 'install_tool_dependencies' ] = 'False'
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
repository_ids = self.initiate_installation_process()
self.wait_for_repository_installation( repository_ids )
@@ -676,8 +678,10 @@
checkbox.disabled = False
if install_tool_dependencies:
checkbox.selected = True
+ kwd[ 'install_tool_dependencies' ] = 'True'
else:
checkbox.selected = False
+ kwd[ 'install_tool_dependencies' ] = 'False'
if 'install_repository_dependencies' in self.last_page():
kwd[ 'install_repository_dependencies' ] = str( install_repository_dependencies ).lower()
if 'shed_tool_conf' not in kwd:
@@ -688,7 +692,7 @@
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
else:
- self.check_for_strings(strings_displayed=[ 'Choose the configuration file whose tool_path setting will be used for installing repositories' ] )
+ self.check_for_strings( strings_displayed=[ 'Choose the configuration file whose tool_path setting will be used for installing repositories' ] )
args = dict( shed_tool_conf=self.shed_tool_conf )
self.submit_form( 1, 'select_shed_tool_panel_config_button', **args )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_0100_complex_repository_dependencies.py
--- a/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
@@ -60,15 +60,16 @@
commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.' )
def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
'''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = 'http://http://this is not an url!'
- name = repository.name
- owner = repository.user.username
- changeset_revision = self.get_repository_tip( repository )
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid tool shed %s defined for repository %s' % ( url, repository.name ) ]
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -77,16 +78,16 @@
strings_displayed=strings_displayed )
def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
'''Generate and upload a complex repository definition that specifies an invalid repository name.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
name = 'invalid_repository!?'
- owner = repository.user.username
- changeset_revision = self.get_repository_tip( repository )
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = 'Ignoring repository dependency definition for tool shed %s, name %s, owner %s' % ( url, name, owner )
- strings_displayed += ', changeset revision %s because the name is invalid.' % changeset_revision
+ strings_displayed = 'Invalid repository name <b>%s</b> defined.' % name
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -95,15 +96,16 @@
strings_displayed=[ strings_displayed ] )
def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
'''Generate and upload a complex repository definition that specifies an invalid owner.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = repository.name
+ name = tool_repository.name
owner = 'invalid_owner!?'
- changeset_revision = self.get_repository_tip( repository )
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid owner %s defined for repository %s. Repository dependencies will be ignored.' % ( owner, name ) ]
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -112,16 +114,16 @@
strings_displayed=strings_displayed )
def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
'''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = repository.name
- owner = repository.user.username
+ name = tool_repository.name
+ owner = tool_repository.user.username
changeset_revision = '1234abcd'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = 'Ignoring repository dependency definition for tool shed %s, name %s, owner %s' % ( url, name, owner )
- strings_displayed += ', changeset revision %s because the changeset revision is invalid.' % changeset_revision
+ strings_displayed = 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -129,13 +131,21 @@
commit_message='Uploaded dependency on bwa_tool_0100 with invalid changeset revision.',
strings_displayed=[ strings_displayed ] )
def test_0035_generate_complex_repository_dependency( self ):
- '''Generate and upload a tool_dependencies.xml file that specifies a repository rather than a tool.'''
+ '''Generate and upload a valid tool_dependencies.xml file that specifies bwa_tool_repository_0100.'''
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
- self.create_repository_complex_dependency( base_repository,
- self.get_filename( 'tool_dependencies.xml', filepath=dependency_path ),
- depends_on=dict( package='bwa', version='0.5.9', repositories=[ tool_repository ] ) )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_repository_dependency_xml( [ tool_repository ], xml_filename, complex=True, package='bwa', version='0.5.9' )
+ self.upload_file( base_repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=True,
+ filepath=dependency_path,
+ commit_message='Uploaded valid complex dependency on bwa_tool_0100.' )
self.check_repository_dependency( base_repository, tool_repository )
self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
def test_0040_update_base_repository( self ):
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
@@ -0,0 +1,129 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing emboss 5 with repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( name=category_name, description=category_desc )
+ def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+ '''Create and populate the emboss_datatypes repository.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_verify_datatypes_in_datatypes_repository( self ):
+ '''Verify that the emboss_datatypes repository contains datatype entries.'''
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+ def test_0020_create_emboss_5_repository_and_upload_files( self ):
+ '''Create and populate the emboss_5_0110 repository.'''
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ def test_0025_generate_repository_dependency_with_invalid_url( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_repository_dependency_with_invalid_name( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid name.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = '!?invalid?!'
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = '!?invalid?!'
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple', 'invalid' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = '!?invalid?!'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -72,8 +72,8 @@
self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=strings_displayed )
def test_0015_install_freebayes_repository( self ):
'''Install the freebayes repository without installing tool dependencies.'''
- strings_displayed=[ 'Never installed', 'can be automatically installed', 'Set the tool_dependency_dir' ]
- strings_displayed.extend( [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
+ strings_displayed=[ 'Never installed', 'dependencies can be automatically handled', 'Handle', 'tool dependencies' ]
+ strings_displayed.extend( [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
self.install_repository( repository_name,
common.test_user_1_name,
category_name,
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- a/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -1,4 +1,4 @@
-from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
column_repository_name = 'column_maker_1087'
@@ -12,8 +12,6 @@
category_name = 'Test 1087 Advanced Circular Dependencies'
category_description = 'Test circular dependency features'
-log = logging.getLogger( __name__ )
-
class TestRepositoryDependencies( ShedTwillTestCase ):
'''Test installing a repository, then updating it to include repository dependencies.'''
def test_0000_create_or_login_admin_user( self ):
@@ -91,7 +89,6 @@
convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
self.check_repository_dependency( column_repository, convert_repository )
- log.debug( [ repository.id for repository in test_db_util.get_all_installed_repositories() ] )
def test_0030_reinstall_column_repository( self ):
'''Reinstall column_maker and verify that it now shows repository dependencies.'''
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -0,0 +1,228 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+bwa_base_repository_name = 'bwa_base_repository_0100'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at bwa_tool_repository_0100"
+
+bwa_tool_repository_name = 'bwa_tool_repository_0100'
+bwa_tool_repository_description = "BWA Tool"
+bwa_tool_repository_long_description = "BWA repository with a package tool dependency defined for BWA 0.5.9."
+
+category_name = 'Test 0100 Complex Repository Dependencies'
+category_description = 'Test 0100 Complex Repository Dependencies'
+running_standalone = False
+
+class TestInstallingComplexRepositoryDependencies( ShedTwillTestCase ):
+ '''Test features related to installing repositories with complex repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_bwa_tool_repository( self ):
+ '''Create and populate bwa_tool_0100.'''
+ global running_standalone
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_tool_repository_name,
+ description=bwa_tool_repository_description,
+ long_description=bwa_tool_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'bwa/complex/tool_dependencies.xml',
+ strings_displayed=[],
+ commit_message='Uploaded tool_dependencies.xml.' )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
+ def test_0010_create_bwa_base_repository( self ):
+ '''Create and populate bwa_base_0100.'''
+ global running_standalone
+ if running_standalone:
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_base_repository_name,
+ description=bwa_base_repository_description,
+ long_description=bwa_base_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'bwa/complex/bwa_base.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.' )
+ def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid repository name.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = 'invalid_repository!?'
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = 'Invalid repository name <b>%s</b> defined.' % name
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid repository name.',
+ strings_displayed=[ strings_displayed ] )
+ def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid owner.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = tool_repository.name
+ owner = 'invalid_owner!?'
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid owner.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = '1234abcd'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid changeset revision.',
+ strings_displayed=[ strings_displayed ] )
+ def test_0035_generate_valid_complex_repository_dependency( self ):
+ '''Generate and upload a valid tool_dependencies.xml file that specifies bwa_tool_repository_0100.'''
+ global running_standalone
+ if running_standalone:
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_repository_dependency_xml( [ tool_repository ], xml_filename, complex=True, package='bwa', version='0.5.9' )
+ self.upload_file( base_repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=True,
+ filepath=dependency_path,
+ commit_message='Uploaded valid complex dependency on bwa_tool_0100.' )
+ self.check_repository_dependency( base_repository, tool_repository )
+ self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
+ def test_0040_update_tool_repository( self ):
+ '''Upload a new tool_dependencies.xml to the tool repository, and verify that the base repository displays the new changeset.'''
+ global running_standalone
+ if running_standalone:
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ previous_changeset = self.get_repository_tip( tool_repository )
+ self.upload_file( tool_repository,
+ 'bwa/complex/readme/tool_dependencies.xml',
+ strings_displayed=[],
+ commit_message='Uploaded new tool_dependencies.xml.' )
+ # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
+ self.display_manage_repository_page( base_repository,
+ strings_displayed=[ self.get_repository_tip( tool_repository ), 'bwa', '0.5.9', 'package' ],
+ strings_not_displayed=[ previous_changeset ] )
+ def test_0045_install_base_repository( self ):
+ '''Verify installation of the repository with complex repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ preview_strings_displayed = [ tool_repository.name, self.get_repository_tip( tool_repository ) ]
+ self.install_repository( bwa_base_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=True,
+ preview_strings_displayed=preview_strings_displayed,
+ post_submit_strings_displayed=[ base_repository.name, tool_repository.name, 'new' ],
+ includes_tools=True )
+ def test_0050_verify_installed_repositories( self ):
+ '''Verify that the installed repositories are displayed properly.'''
+ base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_installed_repository_by_name_owner( bwa_tool_repository_name, common.test_user_1_name )
+ strings_displayed = [ base_repository.name, base_repository.owner, base_repository.installed_changeset_revision ]
+ strings_displayed.extend( [ tool_repository.name, tool_repository.owner, tool_repository.installed_changeset_revision ] )
+ strings_displayed.append( self.url.replace( 'http://', '' ) )
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=[] )
+ checks = [ ( tool_repository,
+ [ tool_repository.name, tool_repository.owner, tool_repository.installed_changeset_revision ],
+ [ 'Missing tool dependencies' ] ),
+ ( base_repository,
+ [ base_repository.name, base_repository.owner, base_repository.installed_changeset_revision, tool_repository.name,
+ tool_repository.owner, tool_repository.installed_changeset_revision ],
+ [ 'Missing tool dependencies' ] ) ]
+ for repository, strings_displayed, strings_not_displayed in checks:
+ self.display_installed_repository_manage_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ def test_0055_verify_complex_tool_dependency( self ):
+ '''Verify that the generated env.sh contains the right data.'''
+ base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_installed_repository_by_name_owner( bwa_tool_repository_name, common.test_user_1_name )
+ env_sh_path = os.path.join( self.galaxy_tool_dependency_dir,
+ 'bwa',
+ '0.5.9',
+ base_repository.owner,
+ base_repository.name,
+ base_repository.installed_changeset_revision,
+ 'env.sh' )
+ assert os.path.exists( env_sh_path ), 'env.sh was not generated in %s for this dependency.' % env_sh_path
+ contents = file( env_sh_path, 'r' ).read()
+ if tool_repository.installed_changeset_revision not in contents or tool_repository.name not in contents:
+ raise AssertionError( 'The env.sh file was not correctly generated. Contents: %s' % contents )
+
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
@@ -0,0 +1,159 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+running_standalone = False
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing emboss 5 with repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( name=category_name, description=category_desc )
+ def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+ '''Create and populate the emboss_datatypes repository.'''
+ global running_standalone
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_verify_datatypes_in_datatypes_repository( self ):
+ '''Verify that the emboss_datatypes repository contains datatype entries.'''
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+ def test_0020_create_emboss_5_repository_and_upload_files( self ):
+ '''Create and populate the emboss_5_0110 repository.'''
+ global running_standalone
+ if running_standalone:
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ def test_0025_generate_repository_dependency_with_invalid_url( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_repository_dependency_with_invalid_name( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid name.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = '!?invalid?!'
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid name.',
+ strings_displayed=strings_displayed )
+ def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = '!?invalid?!'
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid owner.',
+ strings_displayed=strings_displayed )
+ def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple', 'invalid' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = '!?invalid?!'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid changeset revision.',
+ strings_displayed=strings_displayed )
+ def test_0045_install_repository_with_invalid_repository_dependency( self ):
+ '''Install the repository and verify that galaxy detects invalid repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ preview_strings_displayed = [ repository.name, self.get_repository_tip( repository ), 'will be ignored' ]
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ preview_strings_displayed=preview_strings_displayed,
+ post_submit_strings_displayed=[ repository.name, repository.name, 'new' ],
+ includes_tools=True )
+ repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.display_installed_repository_manage_page( repository, strings_not_displayed=[ 'Repository dependencies' ] )
diff -r 73bb36c4ee3b34c3ca32ed830b4f91df79b971c5 -r 1ab19b93abbdf10d831dd2b9804a0081d7867fac test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -288,6 +288,7 @@
shed_tool_path = galaxy_shed_tool_path,
template_path = "templates",
tool_data_path = tool_data_path,
+ tool_dependency_dir = galaxy_tool_dependency_dir,
tool_path = tool_path,
tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
@@ -391,18 +392,18 @@
galaxyapp.shutdown()
galaxyapp = None
log.info( "Embedded galaxy application stopped" )
- if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
- try:
- for dir in [ tool_shed_test_tmp_dir ]:
- if os.path.exists( dir ):
- log.info( "Cleaning up temporary files in %s" % dir )
- shutil.rmtree( dir )
- except:
- pass
- if success:
- return 0
- else:
- return 1
+# if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
+# try:
+# for dir in [ tool_shed_test_tmp_dir ]:
+# if os.path.exists( dir ):
+# log.info( "Cleaning up temporary files in %s" % dir )
+# shutil.rmtree( dir )
+# except:
+# pass
+# if success:
+# return 0
+# else:
+# return 1
if __name__ == "__main__":
sys.exit( main() )
https://bitbucket.org/galaxy/galaxy-central/commits/596b4c29f842/
changeset: 596b4c29f842
user: inithello
date: 2013-02-04 17:52:33
summary: Merged in changes from next-stable.
affected #: 10 files
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1292,8 +1292,8 @@
for repository_elem in root.findall( 'repository' ):
current_rd_tups, error_message = handle_repository_elem( app, repository_elem, repository_dependencies_tups )
if error_message:
+ # Log the problem, but generate metadata for the invalid repository dependencies.
log.debug( error_message )
- return metadata_dict, error_message
for crdt in current_rd_tups:
repository_dependencies_tups.append( crdt )
if repository_dependencies_tups:
@@ -1868,10 +1868,12 @@
.first()
# We're in the tool shed.
user = get_user_by_username( app, owner )
- return sa_session.query( app.model.Repository ) \
- .filter( and_( app.model.Repository.table.c.name == name,
- app.model.Repository.table.c.user_id == user.id ) ) \
- .first()
+ if user:
+ return sa_session.query( app.model.Repository ) \
+ .filter( and_( app.model.Repository.table.c.name == name,
+ app.model.Repository.table.c.user_id == user.id ) ) \
+ .first()
+ return None
def get_repository_dependencies_for_changeset_revision( trans, repository, repository_metadata, toolshed_base_url,
key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
handled_key_rd_dicts=None, circular_repository_dependencies=None ):
@@ -2284,9 +2286,13 @@
def get_user_by_username( app, username ):
"""Get a user from the database by username."""
sa_session = app.model.context.current
- return sa_session.query( app.model.User ) \
- .filter( app.model.User.table.c.username == username ) \
- .one()
+ try:
+ user = sa_session.query( app.model.User ) \
+ .filter( app.model.User.table.c.username == username ) \
+ .one()
+ return user
+ except Exception, e:
+ return None
def handle_circular_repository_dependency( repository_key, repository_dependency, circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies ):
all_repository_dependencies_root_key = all_repository_dependencies[ 'root_key' ]
repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
@@ -2510,6 +2516,11 @@
else:
# We're in the tool shed.
if tool_shed_is_this_tool_shed( toolshed ):
+ # Append the repository dependency definition regardless of whether it's valid or not, as Galaxy needs this to
+ # properly display an error when the repository dependency is invalid at the time of installation.
+ repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
+ if repository_dependencies_tup not in new_rd_tups:
+ new_rd_tups.append( repository_dependencies_tup )
try:
user = sa_session.query( app.model.User ) \
.filter( app.model.User.table.c.username == owner ) \
@@ -2539,9 +2550,6 @@
error_message = "Invalid changeset revision <b>%s</b> defined. Repository dependencies will be ignored." % str( changeset_revision )
log.debug( error_message )
return new_rd_tups, error_message
- repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
- if repository_dependencies_tup not in new_rd_tups:
- new_rd_tups.append( repository_dependencies_tup )
else:
# Repository dependencies are currentlhy supported within a single tool shed.
error_message = "Invalid tool shed <b>%s</b> defined for repository <b>%s</b>. " % ( toolshed, name )
@@ -3179,7 +3187,7 @@
updating_installed_repository=False,
persist=False )
# We'll only display error messages for the repository tip (it may be better to display error messages for each installable changeset revision).
- if current_metadata_dict == repository.tip( trans.app ):
+ if current_changeset_revision == repository.tip( trans.app ):
invalid_file_tups.extend( invalid_tups )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e run_functional_tests.sh
--- a/run_functional_tests.sh
+++ b/run_functional_tests.sh
@@ -11,8 +11,8 @@
echo "'run_functional_tests.sh -id bbb' for testing one tool with id 'bbb' ('bbb' is the tool id)"
echo "'run_functional_tests.sh -sid ccc' for testing one section with sid 'ccc' ('ccc' is the string after 'section::')"
echo "'run_functional_tests.sh -list' for listing all the tool ids"
- echo "'run_functional_tests.sh -toolshed' for running all the test scripts in the ./test/tool_shed/functional directory"
- echo "'run_functional_tests.sh -toolshed testscriptname' for running one test script named testscriptname in the .test/tool_shed/functional directory"
+ echo "'run_functional_tests.sh -toolshed' for running all the test scripts in the ./test/tool_shed/functional directory"
+ echo "'run_functional_tests.sh -toolshed testscriptname' for running one test script named testscriptname in the .test/tool_shed/functional directory"
elif [ $1 = '-id' ]; then
python ./scripts/functional_tests.py -v functional.test_toolbox:TestForTool_$2 --with-nosehtml --html-report-file run_functional_tests.html
elif [ $1 = '-sid' ]; then
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -32,7 +32,7 @@
self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
self.shed_tool_conf = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF' )
# TODO: Figure out a way to alter these attributes during tests.
- self.galaxy_tool_dependency_dir = None # os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
+ self.galaxy_tool_dependency_dir = os.environ.get( 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' )
self.shed_tools_dict = {}
self.home()
def add_repository_review_component( self, **kwd ):
@@ -450,6 +450,10 @@
dependency_template = string.Template( common.complex_repository_dependency_template )
repository_dependency_xml = dependency_template.safe_substitute( package=package, version=version, dependency_lines='\n'.join( dependency_entries ) )
else:
+ if not description:
+ description = ' description=""'
+ else:
+ description = ' description="%s"' % description
template_parser = string.Template( common.new_repository_dependencies_xml )
repository_dependency_xml = template_parser.safe_substitute( description=description, dependency_lines='\n'.join( dependency_entries ) )
# Save the generated xml to the specified location.
@@ -625,7 +629,8 @@
( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling )
self.visit_galaxy_url( url )
return util.listify( repository_ids )
- def install_repositories_from_search_results( self, repositories, strings_displayed=[], strings_not_displayed=[], **kwd ):
+ def install_repositories_from_search_results( self, repositories, install_tool_dependencies=False,
+ strings_displayed=[], strings_not_displayed=[], **kwd ):
'''
Normally, it would be possible to check the appropriate boxes in the search results, and click the install button. This works
in a browser, but Twill manages to lose the 'toolshedgalaxyurl' cookie between one page and the next, so it's necessary to work
@@ -640,15 +645,12 @@
form = tc.browser.get_form( 'select_tool_panel_section' )
checkbox = form.find_control( id="install_tool_dependencies" )
checkbox.disabled = False
- if 'install_tool_dependencies' in kwd:
- install_tool_dependencies = kwd[ 'install_tool_dependencies' ]
- del kwd[ 'install_tool_dependencies' ]
- else:
- install_tool_dependencies = False
if install_tool_dependencies:
checkbox.selected = True
+ kwd[ 'install_tool_dependencies' ] = 'True'
else:
checkbox.selected = False
+ kwd[ 'install_tool_dependencies' ] = 'False'
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
repository_ids = self.initiate_installation_process()
self.wait_for_repository_installation( repository_ids )
@@ -676,8 +678,10 @@
checkbox.disabled = False
if install_tool_dependencies:
checkbox.selected = True
+ kwd[ 'install_tool_dependencies' ] = 'True'
else:
checkbox.selected = False
+ kwd[ 'install_tool_dependencies' ] = 'False'
if 'install_repository_dependencies' in self.last_page():
kwd[ 'install_repository_dependencies' ] = str( install_repository_dependencies ).lower()
if 'shed_tool_conf' not in kwd:
@@ -688,7 +692,7 @@
self.submit_form( 1, 'select_tool_panel_section_button', **kwd )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
else:
- self.check_for_strings(strings_displayed=[ 'Choose the configuration file whose tool_path setting will be used for installing repositories' ] )
+ self.check_for_strings( strings_displayed=[ 'Choose the configuration file whose tool_path setting will be used for installing repositories' ] )
args = dict( shed_tool_conf=self.shed_tool_conf )
self.submit_form( 1, 'select_shed_tool_panel_config_button', **args )
self.check_for_strings( post_submit_strings_displayed, strings_not_displayed )
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_0100_complex_repository_dependencies.py
--- a/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0100_complex_repository_dependencies.py
@@ -60,15 +60,16 @@
commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.' )
def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
'''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = 'http://http://this is not an url!'
- name = repository.name
- owner = repository.user.username
- changeset_revision = self.get_repository_tip( repository )
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid tool shed %s defined for repository %s' % ( url, repository.name ) ]
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -77,16 +78,16 @@
strings_displayed=strings_displayed )
def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
'''Generate and upload a complex repository definition that specifies an invalid repository name.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
name = 'invalid_repository!?'
- owner = repository.user.username
- changeset_revision = self.get_repository_tip( repository )
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = 'Ignoring repository dependency definition for tool shed %s, name %s, owner %s' % ( url, name, owner )
- strings_displayed += ', changeset revision %s because the name is invalid.' % changeset_revision
+ strings_displayed = 'Invalid repository name <b>%s</b> defined.' % name
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -95,15 +96,16 @@
strings_displayed=[ strings_displayed ] )
def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
'''Generate and upload a complex repository definition that specifies an invalid owner.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = repository.name
+ name = tool_repository.name
owner = 'invalid_owner!?'
- changeset_revision = self.get_repository_tip( repository )
+ changeset_revision = self.get_repository_tip( tool_repository )
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = [ 'Invalid owner %s defined for repository %s. Repository dependencies will be ignored.' % ( owner, name ) ]
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -112,16 +114,16 @@
strings_displayed=strings_displayed )
def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
'''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
- dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'invalid' ] )
xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
url = self.url
- name = repository.name
- owner = repository.user.username
+ name = tool_repository.name
+ owner = tool_repository.user.username
changeset_revision = '1234abcd'
self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
- strings_displayed = 'Ignoring repository dependency definition for tool shed %s, name %s, owner %s' % ( url, name, owner )
- strings_displayed += ', changeset revision %s because the changeset revision is invalid.' % changeset_revision
+ strings_displayed = 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision
self.upload_file( repository,
'tool_dependencies.xml',
valid_tools_only=False,
@@ -129,13 +131,21 @@
commit_message='Uploaded dependency on bwa_tool_0100 with invalid changeset revision.',
strings_displayed=[ strings_displayed ] )
def test_0035_generate_complex_repository_dependency( self ):
- '''Generate and upload a tool_dependencies.xml file that specifies a repository rather than a tool.'''
+ '''Generate and upload a valid tool_dependencies.xml file that specifies bwa_tool_repository_0100.'''
base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
- self.create_repository_complex_dependency( base_repository,
- self.get_filename( 'tool_dependencies.xml', filepath=dependency_path ),
- depends_on=dict( package='bwa', version='0.5.9', repositories=[ tool_repository ] ) )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_repository_dependency_xml( [ tool_repository ], xml_filename, complex=True, package='bwa', version='0.5.9' )
+ self.upload_file( base_repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=True,
+ filepath=dependency_path,
+ commit_message='Uploaded valid complex dependency on bwa_tool_0100.' )
self.check_repository_dependency( base_repository, tool_repository )
self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
def test_0040_update_base_repository( self ):
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0110_invalid_simple_repository_dependencies.py
@@ -0,0 +1,129 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing emboss 5 with repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( name=category_name, description=category_desc )
+ def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+ '''Create and populate the emboss_datatypes repository.'''
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_verify_datatypes_in_datatypes_repository( self ):
+ '''Verify that the emboss_datatypes repository contains datatype entries.'''
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+ def test_0020_create_emboss_5_repository_and_upload_files( self ):
+ '''Create and populate the emboss_5_0110 repository.'''
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ def test_0025_generate_repository_dependency_with_invalid_url( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_repository_dependency_with_invalid_name( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid name.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = '!?invalid?!'
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = '!?invalid?!'
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple', 'invalid' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = '!?invalid?!'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -72,8 +72,8 @@
self.preview_repository_in_tool_shed( repository_name, common.test_user_1_name, strings_displayed=strings_displayed )
def test_0015_install_freebayes_repository( self ):
'''Install the freebayes repository without installing tool dependencies.'''
- strings_displayed=[ 'Never installed', 'can be automatically installed', 'Set the tool_dependency_dir' ]
- strings_displayed.extend( [ 'Handle', 'tool dependencies', 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
+ strings_displayed=[ 'Never installed', 'dependencies can be automatically handled', 'Handle', 'tool dependencies' ]
+ strings_displayed.extend( [ 'freebayes', '0.9.4_9696d0ce8a9', 'samtools', '0.1.18' ] )
self.install_repository( repository_name,
common.test_user_1_name,
category_name,
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
--- a/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py
@@ -1,4 +1,4 @@
-from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os, logging
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
import tool_shed.base.test_db_util as test_db_util
column_repository_name = 'column_maker_1087'
@@ -12,8 +12,6 @@
category_name = 'Test 1087 Advanced Circular Dependencies'
category_description = 'Test circular dependency features'
-log = logging.getLogger( __name__ )
-
class TestRepositoryDependencies( ShedTwillTestCase ):
'''Test installing a repository, then updating it to include repository dependencies.'''
def test_0000_create_or_login_admin_user( self ):
@@ -91,7 +89,6 @@
convert_repository = test_db_util.get_repository_by_name_and_owner( convert_repository_name, common.test_user_1_name )
column_repository = test_db_util.get_repository_by_name_and_owner( column_repository_name, common.test_user_1_name )
self.check_repository_dependency( column_repository, convert_repository )
- log.debug( [ repository.id for repository in test_db_util.get_all_installed_repositories() ] )
def test_0030_reinstall_column_repository( self ):
'''Reinstall column_maker and verify that it now shows repository dependencies.'''
installed_column_repository = test_db_util.get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name )
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1100_install_repository_with_complex_dependencies.py
@@ -0,0 +1,228 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+bwa_base_repository_name = 'bwa_base_repository_0100'
+bwa_base_repository_description = "BWA Base"
+bwa_base_repository_long_description = "BWA tool that depends on bwa 0.5.9, with a complex repository dependency pointing at bwa_tool_repository_0100"
+
+bwa_tool_repository_name = 'bwa_tool_repository_0100'
+bwa_tool_repository_description = "BWA Tool"
+bwa_tool_repository_long_description = "BWA repository with a package tool dependency defined for BWA 0.5.9."
+
+category_name = 'Test 0100 Complex Repository Dependencies'
+category_description = 'Test 0100 Complex Repository Dependencies'
+running_standalone = False
+
+class TestInstallingComplexRepositoryDependencies( ShedTwillTestCase ):
+ '''Test features related to installing repositories with complex repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_bwa_tool_repository( self ):
+ '''Create and populate bwa_tool_0100.'''
+ global running_standalone
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_tool_repository_name,
+ description=bwa_tool_repository_description,
+ long_description=bwa_tool_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository,
+ 'bwa/complex/tool_dependencies.xml',
+ strings_displayed=[],
+ commit_message='Uploaded tool_dependencies.xml.' )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Tool dependencies', 'may not be', 'in this repository' ] )
+ def test_0010_create_bwa_base_repository( self ):
+ '''Create and populate bwa_base_0100.'''
+ global running_standalone
+ if running_standalone:
+ category = self.create_category( name=category_name, description=category_description )
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ repository = self.get_or_create_repository( name=bwa_base_repository_name,
+ description=bwa_base_repository_description,
+ long_description=bwa_base_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ self.upload_file( repository,
+ 'bwa/complex/bwa_base.tar',
+ strings_displayed=[],
+ commit_message='Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.' )
+ def test_0015_generate_complex_repository_dependency_invalid_shed_url( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid tool shed URL.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined' % url ]
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0020_generate_complex_repository_dependency_invalid_repository_name( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid repository name.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = 'invalid_repository!?'
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = 'Invalid repository name <b>%s</b> defined.' % name
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid repository name.',
+ strings_displayed=[ strings_displayed ] )
+ def test_0025_generate_complex_repository_dependency_invalid_owner_name( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid owner.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = tool_repository.name
+ owner = 'invalid_owner!?'
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined' % owner ]
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid owner.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_complex_repository_dependency_invalid_changeset_revision( self ):
+ '''Generate and upload a complex repository definition that specifies an invalid changeset revision.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex', 'shed' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = '1234abcd'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=True, package='bwa', version='0.5.9' )
+ strings_displayed = 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision
+ self.upload_file( repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on bwa_tool_0100 with invalid changeset revision.',
+ strings_displayed=[ strings_displayed ] )
+ def test_0035_generate_valid_complex_repository_dependency( self ):
+ '''Generate and upload a valid tool_dependencies.xml file that specifies bwa_tool_repository_0100.'''
+ global running_standalone
+ if running_standalone:
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ dependency_path = self.generate_temp_path( 'test_0100', additional_paths=[ 'complex' ] )
+ xml_filename = self.get_filename( 'tool_dependencies.xml', filepath=dependency_path )
+ url = self.url
+ name = tool_repository.name
+ owner = tool_repository.user.username
+ changeset_revision = self.get_repository_tip( tool_repository )
+ self.generate_repository_dependency_xml( [ tool_repository ], xml_filename, complex=True, package='bwa', version='0.5.9' )
+ self.upload_file( base_repository,
+ 'tool_dependencies.xml',
+ valid_tools_only=True,
+ filepath=dependency_path,
+ commit_message='Uploaded valid complex dependency on bwa_tool_0100.' )
+ self.check_repository_dependency( base_repository, tool_repository )
+ self.display_manage_repository_page( base_repository, strings_displayed=[ 'bwa', '0.5.9', 'package' ] )
+ def test_0040_update_tool_repository( self ):
+ '''Upload a new tool_dependencies.xml to the tool repository, and verify that the base repository displays the new changeset.'''
+ global running_standalone
+ if running_standalone:
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ previous_changeset = self.get_repository_tip( tool_repository )
+ self.upload_file( tool_repository,
+ 'bwa/complex/readme/tool_dependencies.xml',
+ strings_displayed=[],
+ commit_message='Uploaded new tool_dependencies.xml.' )
+ # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file.
+ self.display_manage_repository_page( base_repository,
+ strings_displayed=[ self.get_repository_tip( tool_repository ), 'bwa', '0.5.9', 'package' ],
+ strings_not_displayed=[ previous_changeset ] )
+ def test_0045_install_base_repository( self ):
+ '''Verify installation of the repository with complex repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ base_repository = test_db_util.get_repository_by_name_and_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_repository_by_name_and_owner( bwa_tool_repository_name, common.test_user_1_name )
+ preview_strings_displayed = [ tool_repository.name, self.get_repository_tip( tool_repository ) ]
+ self.install_repository( bwa_base_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=True,
+ preview_strings_displayed=preview_strings_displayed,
+ post_submit_strings_displayed=[ base_repository.name, tool_repository.name, 'new' ],
+ includes_tools=True )
+ def test_0050_verify_installed_repositories( self ):
+ '''Verify that the installed repositories are displayed properly.'''
+ base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_installed_repository_by_name_owner( bwa_tool_repository_name, common.test_user_1_name )
+ strings_displayed = [ base_repository.name, base_repository.owner, base_repository.installed_changeset_revision ]
+ strings_displayed.extend( [ tool_repository.name, tool_repository.owner, tool_repository.installed_changeset_revision ] )
+ strings_displayed.append( self.url.replace( 'http://', '' ) )
+ self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed, strings_not_displayed=[] )
+ checks = [ ( tool_repository,
+ [ tool_repository.name, tool_repository.owner, tool_repository.installed_changeset_revision ],
+ [ 'Missing tool dependencies' ] ),
+ ( base_repository,
+ [ base_repository.name, base_repository.owner, base_repository.installed_changeset_revision, tool_repository.name,
+ tool_repository.owner, tool_repository.installed_changeset_revision ],
+ [ 'Missing tool dependencies' ] ) ]
+ for repository, strings_displayed, strings_not_displayed in checks:
+ self.display_installed_repository_manage_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed )
+ def test_0055_verify_complex_tool_dependency( self ):
+ '''Verify that the generated env.sh contains the right data.'''
+ base_repository = test_db_util.get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name )
+ tool_repository = test_db_util.get_installed_repository_by_name_owner( bwa_tool_repository_name, common.test_user_1_name )
+ env_sh_path = os.path.join( self.galaxy_tool_dependency_dir,
+ 'bwa',
+ '0.5.9',
+ base_repository.owner,
+ base_repository.name,
+ base_repository.installed_changeset_revision,
+ 'env.sh' )
+ assert os.path.exists( env_sh_path ), 'env.sh was not generated in %s for this dependency.' % env_sh_path
+ contents = file( env_sh_path, 'r' ).read()
+ if tool_repository.installed_changeset_revision not in contents or tool_repository.name not in contents:
+ raise AssertionError( 'The env.sh file was not correctly generated. Contents: %s' % contents )
+
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
--- /dev/null
+++ b/test/tool_shed/functional/test_1110_install_repository_with_invalid_repository_dependency.py
@@ -0,0 +1,159 @@
+from tool_shed.base.twilltestcase import ShedTwillTestCase, common, os
+import tool_shed.base.test_db_util as test_db_util
+
+datatypes_repository_name = 'emboss_datatypes_0110'
+datatypes_repository_description = "Galaxy applicable data formats used by Emboss tools."
+datatypes_repository_long_description = "Galaxy applicable data formats used by Emboss tools. This repository contains no tools."
+
+emboss_repository_name = 'emboss_0110'
+emboss_repository_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+emboss_repository_long_description = 'Galaxy wrappers for Emboss version 5.0.0 tools'
+
+category_name = 'Test 0110 Invalid Repository Dependencies'
+category_desc = 'Test 0110 Invalid Repository Dependencies'
+running_standalone = False
+
+class TestBasicRepositoryDependencies( ShedTwillTestCase ):
+ '''Testing emboss 5 with repository dependencies.'''
+ def test_0000_initiate_users( self ):
+ """Create necessary user accounts and login as an admin user."""
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ test_user_1 = test_db_util.get_user( common.test_user_1_email )
+ assert test_user_1 is not None, 'Problem retrieving user with email %s from the database' % common.test_user_1_email
+ test_user_1_private_role = test_db_util.get_private_role( test_user_1 )
+ self.logout()
+ self.login( email=common.admin_email, username=common.admin_username )
+ admin_user = test_db_util.get_user( common.admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % common.admin_email
+ admin_user_private_role = test_db_util.get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category for this test suite"""
+ self.create_category( name=category_name, description=category_desc )
+ def test_0010_create_emboss_datatypes_repository_and_upload_tarball( self ):
+ '''Create and populate the emboss_datatypes repository.'''
+ global running_standalone
+ self.logout()
+ self.login( email=common.test_user_1_email, username=common.test_user_1_name )
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=datatypes_repository_name,
+ description=datatypes_repository_description,
+ long_description=datatypes_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ if self.repository_is_new( repository ):
+ running_standalone = True
+ self.upload_file( repository, 'emboss/datatypes/datatypes_conf.xml', commit_message='Uploaded datatypes_conf.xml.' )
+ def test_0015_verify_datatypes_in_datatypes_repository( self ):
+ '''Verify that the emboss_datatypes repository contains datatype entries.'''
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ self.display_manage_repository_page( repository, strings_displayed=[ 'Datatypes', 'equicktandem', 'hennig86', 'vectorstrip' ] )
+ def test_0020_create_emboss_5_repository_and_upload_files( self ):
+ '''Create and populate the emboss_5_0110 repository.'''
+ global running_standalone
+ if running_standalone:
+ category = test_db_util.get_category_by_name( category_name )
+ repository = self.get_or_create_repository( name=emboss_repository_name,
+ description=emboss_repository_description,
+ long_description=emboss_repository_long_description,
+ owner=common.test_user_1_name,
+ category_id=self.security.encode_id( category.id ),
+ strings_displayed=[] )
+ self.upload_file( repository, 'emboss/emboss.tar', commit_message='Uploaded emboss_5.tar' )
+ def test_0025_generate_repository_dependency_with_invalid_url( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid URL.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = 'http://http://this is not an url!'
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid tool shed <b>%s</b> defined for repository <b>%s</b>' % ( url, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid url.',
+ strings_displayed=strings_displayed )
+ def test_0030_generate_repository_dependency_with_invalid_name( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid name.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = '!?invalid?!'
+ owner = repository.user.username
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid repository name <b>%s</b> defined.' % name ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid name.',
+ strings_displayed=strings_displayed )
+ def test_0035_generate_repository_dependency_with_invalid_owner( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid owner.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = '!?invalid?!'
+ changeset_revision = self.get_repository_tip( repository )
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid owner <b>%s</b> defined for repository <b>%s</b>' % ( owner, repository.name ) ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid owner.',
+ strings_displayed=strings_displayed )
+ def test_0040_generate_repository_dependency_with_invalid_changeset_revision( self ):
+ '''Generate a repository dependency for emboss 5 with an invalid changeset revision.'''
+ global running_standalone
+ if running_standalone:
+ dependency_path = self.generate_temp_path( 'test_0110', additional_paths=[ 'simple', 'invalid' ] )
+ xml_filename = self.get_filename( 'repository_dependencies.xml', filepath=dependency_path )
+ repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ url = self.url
+ name = repository.name
+ owner = repository.user.username
+ changeset_revision = '!?invalid?!'
+ self.generate_invalid_dependency_xml( xml_filename, url, name, owner, changeset_revision, complex=False, description='This is invalid.' )
+ strings_displayed = [ 'Invalid changeset revision <b>%s</b> defined.' % changeset_revision ]
+ self.upload_file( emboss_repository,
+ 'repository_dependencies.xml',
+ valid_tools_only=False,
+ filepath=dependency_path,
+ commit_message='Uploaded dependency on emboss_datatypes_0110 with invalid changeset revision.',
+ strings_displayed=strings_displayed )
+ def test_0045_install_repository_with_invalid_repository_dependency( self ):
+ '''Install the repository and verify that galaxy detects invalid repository dependencies.'''
+ self.galaxy_logout()
+ self.galaxy_login( email=common.admin_email, username=common.admin_username )
+ repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ preview_strings_displayed = [ repository.name, self.get_repository_tip( repository ), 'will be ignored' ]
+ self.install_repository( emboss_repository_name,
+ common.test_user_1_name,
+ category_name,
+ install_tool_dependencies=False,
+ install_repository_dependencies=True,
+ preview_strings_displayed=preview_strings_displayed,
+ post_submit_strings_displayed=[ repository.name, repository.name, 'new' ],
+ includes_tools=True )
+ repository = test_db_util.get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name )
+ self.display_installed_repository_manage_page( repository, strings_not_displayed=[ 'Repository dependencies' ] )
diff -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 -r 596b4c29f84256767975caf9770d8c316d97386e test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -288,6 +288,7 @@
shed_tool_path = galaxy_shed_tool_path,
template_path = "templates",
tool_data_path = tool_data_path,
+ tool_dependency_dir = galaxy_tool_dependency_dir,
tool_path = tool_path,
tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
@@ -391,18 +392,18 @@
galaxyapp.shutdown()
galaxyapp = None
log.info( "Embedded galaxy application stopped" )
- if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
- try:
- for dir in [ tool_shed_test_tmp_dir ]:
- if os.path.exists( dir ):
- log.info( "Cleaning up temporary files in %s" % dir )
- shutil.rmtree( dir )
- except:
- pass
- if success:
- return 0
- else:
- return 1
+# if 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
+# try:
+# for dir in [ tool_shed_test_tmp_dir ]:
+# if os.path.exists( dir ):
+# log.info( "Cleaning up temporary files in %s" % dir )
+# shutil.rmtree( dir )
+# except:
+# pass
+# if success:
+# return 0
+# else:
+# return 1
if __name__ == "__main__":
sys.exit( main() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7627567df99c/
changeset: 7627567df99c
user: jgoecks
date: 2013-02-04 16:04:41
summary: Use from_work_dir for all Tophat2 output datasets rather than copying datasets in tool wrapper.
affected #: 2 files
diff -r cef2e9ff0960bab52fd481ec77585b6ecca8dfce -r 7627567df99c879146560ed523f903ed17d12fd6 tools/ngs_rna/tophat2_wrapper.py
--- a/tools/ngs_rna/tophat2_wrapper.py
+++ b/tools/ngs_rna/tophat2_wrapper.py
@@ -260,11 +260,6 @@
if returncode != 0:
raise Exception, stderr
- # Copy output files from tmp directory to specified files.
- shutil.copyfile( os.path.join( "tophat_out", "junctions.bed" ), options.junctions_output_file )
- shutil.copyfile( os.path.join( "tophat_out", "accepted_hits.bam" ), options.accepted_hits_output_file )
-
- # TODO: look for errors in program output.
except Exception, e:
stop_err( 'Error in tophat:\n' + str( e ) )
diff -r cef2e9ff0960bab52fd481ec77585b6ecca8dfce -r 7627567df99c879146560ed523f903ed17d12fd6 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -356,7 +356,7 @@
</conditional></actions></data>
- <data format="bed" name="junctions" label="${tool.name} on ${on_string}: splice junctions">
+ <data format="bed" name="junctions" label="${tool.name} on ${on_string}: splice junctions" from_work_dir="tophat_out/junctions.bed"><actions><conditional name="refGenomeSource.genomeSource"><when value="indexed">
@@ -375,7 +375,7 @@
</conditional></actions></data>
- <data format="bam" name="accepted_hits" label="${tool.name} on ${on_string}: accepted_hits">
+ <data format="bam" name="accepted_hits" label="${tool.name} on ${on_string}: accepted_hits" from_work_dir="tophat_out/accepted_hits.bam"><actions><conditional name="refGenomeSource.genomeSource"><when value="indexed">
https://bitbucket.org/galaxy/galaxy-central/commits/3de3a5a2d9b1/
changeset: 3de3a5a2d9b1
user: jgoecks
date: 2013-02-04 16:06:13
summary: Use conditional so that from_work_dir is only active if file exists. This prevents spurious error messages when a tool fails.
affected #: 1 file
diff -r 7627567df99c879146560ed523f903ed17d12fd6 -r 3de3a5a2d9b1f467ad5a7d855de84b2e2e7888a8 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -34,11 +34,12 @@
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
- # -- Append commands to copy job outputs based on from_work_dir attribute. --
+ # Append commands to copy job outputs based on from_work_dir attribute.
if include_work_dir_outputs:
work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
if work_dir_outputs:
- commands += "; " + "; ".join( [ "cp %s %s" % ( source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
+ commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
+ ( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
04 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cef2e9ff0960/
changeset: cef2e9ff0960
user: dan
date: 2013-02-04 12:48:48
summary: Add sentry_dsn to Tool Shed config.py
affected #: 1 file
diff -r 0ebd6f6f29553b4ac49f78d0106e412115948191 -r cef2e9ff0960bab52fd481ec77585b6ecca8dfce lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -92,6 +92,8 @@
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
+ # Error logging with sentry
+ self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
# Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
self.hgweb_config_dir = resolve_path( kwargs.get( 'hgweb_config_dir', '' ), self.root )
# Proxy features
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0ebd6f6f2955/
changeset: 0ebd6f6f2955
user: dan
date: 2013-02-04 12:43:35
summary: Backout 3189a1bf18af
affected #: 15 files
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -59,7 +59,6 @@
<datatype extension="bowtie_base_index" type="galaxy.datatypes.ngsindex:BowtieBaseIndex" mimetype="text/html" display_in_upload="False"/><datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/><datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream" max_optional_metadata_filesize="1048576" />
- <datatype extension="data_manager_json" type="galaxy.datatypes.data:Text" mimetype="application/json" subclass="True" display_in_upload="False"/><datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true"><converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/><converter file="fasta_to_bowtie_base_index_converter.xml" target_datatype="bowtie_base_index"/>
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -18,7 +18,6 @@
from galaxy.tools.genome_index import load_genome_index_tools
from galaxy.sample_tracking import external_service_types
from galaxy.openid.providers import OpenIDProviders
-from galaxy.tools.data_manager.manager import DataManagers
class UniverseApplication( object ):
"""Encapsulates the state of a Universe application"""
@@ -94,8 +93,6 @@
self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
# Search support for tools
self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
- #datamanager
- self.data_managers = DataManagers( self )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -75,10 +75,6 @@
except:
self.hours_between_check = 12
self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
- self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
- self.data_manager_config_file = resolve_path( kwargs.get(' data_manager_config_file', 'data_manager_conf.xml' ), self.root )
- self.data_manager_move_files = string_as_bool( kwargs.get( "data_manager_move_files", "False" ) )
- self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_dir', self.tool_data_path )
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -44,7 +44,7 @@
log.debug( "%d workers ready", nworkers )
def run_next( self ):
- """Run the next job, waiting until one is available if necessary"""
+ """Run the next job, waiting until one is available if neccesary"""
while 1:
job_wrapper = self.queue.get()
if job_wrapper is self.STOP_SIGNAL:
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2996,20 +2996,6 @@
def set_item( self, visualization ):
self.visualization = visualization
-#Data Manager Classes
-class DataManagerHistoryAssociation( object ):
- def __init__( self, id=None, history=None, user=None ):
- self.id = id
- self.history = history
- self.user = user
-
-class DataManagerJobAssociation( object ):
- def __init__( self, id=None, job=None, data_manager_id=None ):
- self.id = id
- self.job = job
- self.data_manager_id = data_manager_id
-#end of Data Manager Classes
-
class UserPreference ( object ):
def __init__( self, name=None, value=None ):
self.name = name
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -930,23 +930,6 @@
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
-#Data Manager tables
-DataManagerHistoryAssociation.table = Table( "data_manager_history_association", metadata,
- Column( "id", Integer, primary_key=True),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
- Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
- )
-
-DataManagerJobAssociation.table = Table( "data_manager_job_association", metadata,
- Column( "id", Integer, primary_key=True),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
- Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
- Column( "data_manager_id", TEXT, index=True )
- )
-
# Tagging tables.
Tag.table = Table( "tag", metadata,
@@ -1916,17 +1899,6 @@
properties=dict( visualization=relation( Visualization ), user=relation( User ) )
)
-#Data Manager tables
-assign_mapper( context, DataManagerHistoryAssociation, DataManagerHistoryAssociation.table,
- properties=dict( history=relation( History ),
- user=relation( User, backref='data_manager_histories' )
- )
- )
-
-assign_mapper( context, DataManagerJobAssociation, DataManagerJobAssociation.table,
- properties=dict( job=relation( Job, backref=backref('data_manager_association', uselist=False ), uselist=False ) )
- )
-
# User tables.
assign_mapper( context, UserPreference, UserPreference.table,
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -6,7 +6,7 @@
pkg_resources.require( "simplejson" )
pkg_resources.require( "Mako" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback, re, pipes
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback, re
import simplejson
import binascii
from mako.template import Template
@@ -25,7 +25,6 @@
from galaxy.util.expressions import ExpressionContext
from galaxy.tools.test import ToolTestBuilder
from galaxy.tools.actions import DefaultToolAction
-from galaxy.tools.actions.data_manager import DataManagerToolAction
from galaxy.tools.deps import DependencyManager
from galaxy.model import directory_hash_id
from galaxy.model.orm import *
@@ -80,7 +79,6 @@
# In-memory dictionary that defines the layout of the tool panel.
self.tool_panel = odict()
self.index = 0
- self.data_manager_tools = odict()
# File that contains the XML section and tool tags from all tool panel config files integrated into a
# single file that defines the tool panel layout. This file can be changed by the Galaxy administrator
# (in a way similar to the single tool_conf.xml file in the past) to alter the layout of the tool panel.
@@ -510,7 +508,7 @@
self.integrated_tool_panel[ key ] = integrated_section
else:
self.integrated_tool_panel.insert( index, key, integrated_section )
- def load_tool( self, config_file, guid=None, **kwds ):
+ def load_tool( self, config_file, guid=None ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
tree = util.parse_xml( config_file )
@@ -526,7 +524,7 @@
ToolClass = tool_types.get( root.get( 'tool_type' ) )
else:
ToolClass = Tool
- return ToolClass( config_file, root, self.app, guid=guid, **kwds )
+ return ToolClass( config_file, root, self.app, guid=guid )
def reload_tool_by_id( self, tool_id ):
"""
Attempt to reload the tool identified by 'tool_id', if successful
@@ -813,7 +811,6 @@
"""
tool_type = 'default'
- default_tool_action = DefaultToolAction
def __init__( self, config_file, root, app, guid=None ):
"""Load a tool from the config named by `config_file`"""
@@ -1054,7 +1051,7 @@
# Action
action_elem = root.find( "action" )
if action_elem is None:
- self.tool_action = self.default_tool_action()
+ self.tool_action = DefaultToolAction()
else:
module = action_elem.get( 'module' )
cls = action_elem.get( 'class' )
@@ -2573,24 +2570,18 @@
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( hda.dataset.id ) )
try:
a_files = os.listdir( temp_file_path )
- print 'a_files',a_files
if len( a_files ) > 0:
for f in a_files:
- print 'f', f
self.app.object_store.update_from_file(hda.dataset,
extra_dir="dataset_%d_files" % hda.dataset.id,
alt_name = f,
file_name = os.path.join(temp_file_path, f),
- create = True,
- preserve_symlinks = True )
+ create = True)
# Clean up after being handled by object store.
# FIXME: If the object (e.g., S3) becomes async, this will
# cause issues so add it to the object store functionality?
- print 'before rmtree'
shutil.rmtree(temp_file_path)
- print 'after rm tree'
- except Exception, e:
- log.debug( "Error in collect_associated_files: %s" % ( e ) )
+ except:
continue
def collect_child_datasets( self, output, job_working_directory ):
"""
@@ -2815,64 +2806,7 @@
return tool_dict
- def get_default_history_by_trans( self, trans, create=False ):
- return trans.get_history( create=create )
-
-
-class OutputParameterJSONTool( Tool ):
- """
- Alternate implementation of Tool that provides parameters and other values
- JSONified within the contents of an output dataset
- """
- tool_type = 'output_parameter_json'
- def _prepare_json_list( self, param_list ):
- rval = []
- for value in param_list:
- if isinstance( value, dict ):
- rval.append( self._prepare_json_param_dict( value ) )
- elif isinstance( value, list ):
- rval.append( self._prepare_json_list( value ) )
- else:
- rval.append( str( value ) )
- return rval
- def _prepare_json_param_dict( self, param_dict ):
- rval = {}
- for key, value in param_dict.iteritems():
- if isinstance( value, dict ):
- rval[ key ] = self._prepare_json_param_dict( value )
- elif isinstance( value, list ):
- rval[ key ] = self._prepare_json_list( value )
- else:
- rval[ key ] = str( value )
- return rval
- def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
- if param_dict is None:
- param_dict = {}
- json_params = {}
- json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
- json_params[ 'output_data' ] = []
- json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
- json_filename = None
- for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
- #use wrapped dataset to access certain values
- wrapped_data = param_dict.get( out_name )
- #allow multiple files to be created
- file_name = str( wrapped_data )
- extra_files_path = str( wrapped_data.files_path )
- data_dict = dict( out_data_name = out_name,
- ext = data.ext,
- dataset_id = data.dataset.id,
- hda_id = data.id,
- file_name = file_name,
- extra_files_path = extra_files_path )
- json_params[ 'output_data' ].append( data_dict )
- if json_filename is None:
- json_filename = file_name
- out = open( json_filename, 'w' )
- out.write( simplejson.dumps( json_params ) )
- out.close()
-
-class DataSourceTool( OutputParameterJSONTool ):
+class DataSourceTool( Tool ):
"""
Alternate implementation of Tool for data_source tools -- those that
allow the user to query and extract data from another web site.
@@ -2882,10 +2816,29 @@
def _build_GALAXY_URL_parameter( self ):
return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=%s" />' % self.id ) )
def parse_inputs( self, root ):
- super( DataSourceTool, self ).parse_inputs( root )
+ Tool.parse_inputs( self, root )
if 'GALAXY_URL' not in self.inputs:
self.inputs[ 'GALAXY_URL' ] = self._build_GALAXY_URL_parameter()
- self.inputs_by_page[0][ 'GALAXY_URL' ] = self.inputs[ 'GALAXY_URL' ]
+ def _prepare_datasource_json_list( self, param_list ):
+ rval = []
+ for value in param_list:
+ if isinstance( value, dict ):
+ rval.append( self._prepare_datasource_json_param_dict( value ) )
+ elif isinstance( value, list ):
+ rval.append( self._prepare_datasource_json_list( value ) )
+ else:
+ rval.append( str( value ) )
+ return rval
+ def _prepare_datasource_json_param_dict( self, param_dict ):
+ rval = {}
+ for key, value in param_dict.iteritems():
+ if isinstance( value, dict ):
+ rval[ key ] = self._prepare_datasource_json_param_dict( value )
+ elif isinstance( value, list ):
+ rval[ key ] = self._prepare_datasource_json_list( value )
+ else:
+ rval[ key ] = str( value )
+ return rval
def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
if param_dict is None:
param_dict = {}
@@ -2895,7 +2848,7 @@
name = param_dict.get( 'name' )
json_params = {}
- json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
+ json_params[ 'param_dict' ] = self._prepare_datasource_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
json_params[ 'output_data' ] = []
json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
json_filename = None
@@ -2986,186 +2939,9 @@
class GenomeIndexTool( Tool ):
tool_type = 'index_genome'
-class DataManagerTool( OutputParameterJSONTool ):
- tool_type = 'manage_data'
- default_tool_action = DataManagerToolAction
-
- def __init__( self, config_file, root, app, guid=None, data_manager_id=None, **kwds ):
- self.data_manager_id = data_manager_id
- super( DataManagerTool, self ).__init__( config_file, root, app, guid=guid, **kwds )
- if self.data_manager_id is None:
- self.data_manager_id = self.id
-
- #def parse_inputs( self, root ):
- # super( DataManagerTool, self ).parse_inputs( root )
- # '''
- # if '__GALAXY_MOVE_OUTPUT_FILES__' not in self.inputs:
- # self.inputs[ '__GALAXY_MOVE_OUTPUT_FILES__' ] = ToolParameter.build( self, ElementTree.XML( '<param name="__GALAXY_MOVE_OUTPUT_FILES__" label="Move created data to cache destination" type="boolean" truevalue="MOVE" falsevalue="DO_NOT_MOVE" checked="%s" />' % self.app.config.data_manager_move_files ) )
- # print 'self.inputs_by_page',self.inputs_by_page
- # self.inputs_by_page[0][ '__GALAXY_MOVE_OUTPUT_FILES__' ] = self.inputs[ '__GALAXY_MOVE_OUTPUT_FILES__' ]
- # print 'self.inputs', self.inputs
- # '''
- # #self.inputs[ '__DATA_MANAGER_ID__' ] = ToolParameter.build( self, ElementTree.XML( '<param name="__DATA_MANAGER_ID__" type="hidden" value="%s" />' % ( self.data_manager_id ) ) )
- # #self.inputs_by_page[0][ '__DATA_MANAGER_ID__' ] = self.inputs[ '__DATA_MANAGER_ID__' ]
-
- def exec_after_process( self, app, inp_data, out_data, param_dict, job = None, **kwds ):
- #run original exec_after_process
- super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job = job, **kwds )
- #process results of tool
- print 'exect after', self.id
- print 'inp_data', inp_data
- print 'out_data', out_data
- print 'param_dict', param_dict
- print 'job', job, job.state
- if job and job.state == job.states.ERROR:
- return
- #print 'data_manager.output_ref',data_manager.output_ref
- #data_manager = self.app.data_managers.get( self.id, None ) #fix me to not only use tool ID!
- data_manager_id = job.data_manager_association.data_manager_id
- data_manager = self.app.data_managers.get( data_manager_id, None )
- #TODO: need to be able to handle using a data manager tool for more than one manager
- #manager id is currently same as tool id
- assert data_manager is not None, "Invalid data manager (%s) requested. It may have been removed before the job completed." % ( data_manager_id )
- data_manager_dicts = {}
- data_manager_dict = {}
- #TODO: fix this merging below
- for output_name, output_dataset in out_data.iteritems():
- try:
- output_dict = simplejson.loads( open( output_dataset.file_name ).read() )
- except Exception, e:
- log.warning( 'Error reading DataManagerTool json for "%s": %s' % ( output_name, e ) )
- continue
- data_manager_dicts[ output_name ] = output_dict
- print 'data_manager_dicts', data_manager_dicts
- for key, value in output_dict.iteritems():
- if key not in data_manager_dict:
- data_manager_dict[ key ] = {}
- print 'key', key
- print ' data_manager_dict[ key ]', data_manager_dict[ key ]
- print 'value', value
- data_manager_dict[ key ].update( value )
- data_manager_dict.update( output_dict )
-
- print 'data_manager_dicts',data_manager_dicts
- print 'data_manager_dict', data_manager_dict
- data_tables_dict = data_manager_dict.get( 'data_tables', {} )
- #for data_table_name, data_table_values in data_tables_dict.iteritems():
- for data_table_name, data_table_columns in data_manager.data_tables.iteritems():
- print 'data_table_name', data_table_name
- data_table_values = data_tables_dict.pop( data_table_name, None ) #data_tables_dict.get( data_table_name, [] )
- if not data_table_values:
- log.warning( 'No values for data table "%s" were returned by the data manager "%s".' % ( data_table_name, data_manager.id ) )
- continue #next data table
- data_table = app.tool_data_tables.get( data_table_name, None )
- if data_table is None:
- log.error( 'The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( data_manager.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml' ) )
- continue #next table name
- output_ref_values = {}
- if data_table_name in data_manager.output_ref_by_data_table:
- for data_table_column, output_ref in data_manager.output_ref_by_data_table[ data_table_name ].iteritems():
- output_ref_dataset = out_data.get( output_ref, None )
- assert output_ref_dataset is not None, "Referenced output was not found."
- output_ref_values[ data_table_column ] = output_ref_dataset
- print 'output_ref_values', output_ref_values
-
- final_data_table_values = []
- if not isinstance( data_table_values, list ):
- data_table_values = [ data_table_values ]
- columns = data_table.get_column_name_list()
-
- try:
- data_table_fh = open( data_table.filename, 'r+b' )
- except IOError, e:
- log.warning( 'Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s' % ( data_table.filename, e ) )
- data_table_fh = open( data_table.filename, 'wb' )
- if os.stat( data_table.filename )[6] != 0:
- # ensure last existing line ends with new line
- data_table_fh.seek( -1, 2 ) #last char in file
- last_char = data_table_fh.read()
- if last_char not in [ '\n', '\r' ]:
- data_table_fh.write( '\n' )
- for data_table_row in data_table_values:
- data_table_value = dict( **data_table_row ) #keep original values here
- for name, value in data_table_row.iteritems(): #FIXME: need to loop through here based upon order listed in data_manager config
- if name in output_ref_values:
- #TODO: Allow moving!
- #if param_dict[ '__GALAXY_MOVE_OUTPUT_FILES__' ]:
- # #FIXME: allow moving
- # log.error( "\n\nShould be moving output files directory, but not implemented yet.\n" )
- # base_path = output_ref_values[ name ].extra_files_path
- #else:
- # base_path = output_ref_values[ name ].extra_files_path
- moved = data_manager.process_move( data_table_name, name, output_ref_values[ name ].extra_files_path, **data_table_value )
- print 'moved', moved #should we always move?
- data_table_value[ name ] = data_manager.process_value_translation( data_table_name, name, **data_table_value )
- final_data_table_values.append( data_table_value )
- fields = []
- for column_name in columns:
- if column_name is None or column_name not in data_table_value:
- fields.append( data_table.get_empty_field_by_name( column_name ) )
- else:
- fields.append( data_table_value[ column_name ] )
- print 'fields', fields
- #should we add a comment to file about automatically generated value here?
- data_table_fh.write( "%s\n" % ( data_table.separator.join( self._replace_field_separators( fields, separator=data_table.separator ) ) ) ) #write out fields to disk
- data_table.data.append( fields ) #add fields to loaded data table
- print 'final_data_table_values', final_data_table_values
- print 'data_table.data', data_table.data
- data_table_fh.close()
- for data_table_name, data_table_values in data_tables_dict.iteritems():
- #tool returned extra data table entries, but data table was not declared in data manager
- #do not add these values, but do provide messages
- log.warning( 'The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( data_manager.id, data_table_name, data_table_values, data_table_name, self.app.data_managers.filename ) )
-
- def _replace_field_separators( self, fields, separator="\t", replace=None, comment_char=None ):
- #make sure none of the fields contain separator
- #make sure separator replace is different from comment_char,
- #due to possible leading replace
- if replace is None:
- if separator == " ":
- if comment_char == "\t":
- replace = "_"
- else:
- replace = "\t"
- else:
- if comment_char == " ":
- replace = "_"
- else:
- replace = " "
- return map( lambda x: x.replace( separator, replace ), fields )
-
- def get_default_history_by_trans( self, trans, create=False ):
- def _create_data_manager_history( user ):
- history = trans.app.model.History( name='Data Manager History (automatically created)', user=user )
- data_manager_association = trans.app.model.DataManagerHistoryAssociation( user=user, history=history )
- trans.sa_session.add_all( ( history, data_manager_association ) )
- trans.sa_session.flush()
- return history
- user = trans.user
- assert user, 'You must be logged in to use this tool.'
- history = user.data_manager_histories
- if not history:
- #create
- if create:
- history = _create_data_manager_history( user )
- else:
- history = None
- else:
- for history in reversed( history ):
- history = history.history
- if not history.deleted:
- break
- if history.deleted:
- if create:
- history = _create_data_manager_history( user )
- else:
- history = None
- return history
-
-
# Populate tool_type to ToolClass mappings
tool_types = {}
-for tool_class in [ Tool, DataDestinationTool, SetMetadataTool, DataSourceTool, AsyncDataSourceTool, DataManagerTool ]:
+for tool_class in [ Tool, DataDestinationTool, SetMetadataTool, DataSourceTool, AsyncDataSourceTool ]:
tool_types[ tool_class.tool_type ] = tool_class
# ---- Utility classes to be factored out -----------------------------------
@@ -3207,12 +2983,6 @@
"""
def __nonzero__( self ):
return bool( self.value )
- def get_display_text( self, quote=True ):
- print 'self.input',self.input
- print 'self.input.tool.app', self.input.tool.app
- print 'self.value', self.value
- print 'self.input.value_to_display_text( self.value, self.input.tool.app )', self.input.value_to_display_text( self.value, self.input.tool.app )
- return pipes.quote( self.input.value_to_display_text( self.value, self.input.tool.app ) )
class RawObjectWrapper( ToolParameterValueWrapper ):
"""
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -168,7 +168,7 @@
# Set history.
if not history:
- history = tool.get_default_history_by_trans( trans, create=True ) #trans..history
+ history = trans.history
out_data = odict()
# Collect any input datasets from the incoming parameters
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -28,11 +28,6 @@
return self.data_tables.__getitem__( key )
def __contains__( self, key ):
return self.data_tables.__contains__( key )
- def get( self, name, default=None ):
- try:
- return self[ name ]
- except KeyError:
- return default
def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
"""
This method is called under 3 conditions:
@@ -130,8 +125,6 @@
def __init__( self, config_element, tool_data_path ):
self.name = config_element.get( 'name' )
self.comment_char = config_element.get( 'comment_char' )
- self.empty_field_value = config_element.get( 'empty_field_value', '' )
- self.empty_field_values = {}
for file_elem in config_element.findall( 'file' ):
# There should only be one file_elem.
if 'path' in file_elem.attrib:
@@ -141,8 +134,6 @@
self.tool_data_file = None
self.tool_data_path = tool_data_path
self.missing_index_file = None
- def get_empty_field_by_name( self, name ):
- return self.empty_field_values.get( name, self.empty_field_value )
class TabularToolDataTable( ToolDataTable ):
"""
@@ -185,7 +176,6 @@
if os.path.exists( filename ):
found = True
all_rows.extend( self.parse_file_fields( open( filename ) ) )
- self.filename = filename
else:
# Since the path attribute can include a hard-coded path to a specific directory
# (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
@@ -197,7 +187,6 @@
if os.path.exists( corrected_filename ):
found = True
all_rows.extend( self.parse_file_fields( open( corrected_filename ) ) )
- self.filename = corrected_filename
if not found:
self.missing_index_file = filename
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
@@ -233,9 +222,6 @@
self.columns[name] = index
if index > self.largest_index:
self.largest_index = index
- empty_field_value = column_elem.get( 'empty_field_value', None )
- if empty_field_value is not None:
- self.empty_field_values[ name ] = empty_field_value
assert 'value' in self.columns, "Required 'value' column missing from column def"
if 'name' not in self.columns:
self.columns['name'] = self.columns['value']
@@ -254,19 +240,7 @@
fields = line.split( self.separator )
if self.largest_index < len( fields ):
rval.append( fields )
- return rval
- def get_column_name_list( self ):
- rval = []
- for i in range( self.largest_index + 1 ):
- found_column = False
- for name, index in self.columns.iteritems():
- if index == i:
- rval.append( name )
- found_column = True
- break
- if not found_column:
- rval.append( None )
- return rval
+ return rval
# Registry of tool data types by type_key
tool_data_table_types = dict( [ ( cls.type_key, cls ) for cls in [ TabularToolDataTable ] ] )
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -880,9 +880,6 @@
>>> print p.filter_value( "hg17" )
hg17
"""
- def __init__( self, *args, **kwds ):
- super( GenomeBuildParameter, self ).__init__( *args, **kwds )
- self.static_options = [ ( value, key, False ) for key, value in util.dbnames ]
def get_options( self, trans, other_values ):
if not trans.history:
yield 'unspecified', '?', False
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -567,22 +567,6 @@
return curdir
return join( *rel_list )
-def relativize_symlinks( path, start=None, followlinks=False):
- for root, dirs, files in os.walk( path, followlinks=followlinks ):
- rel_start = None
- for file_name in files:
- symlink_file_name = os.path.join( root, file_name )
- if os.path.islink( symlink_file_name ):
- symlink_target = os.readlink( symlink_file_name )
- if rel_start is None:
- if start is None:
- rel_start = root
- else:
- rel_start = start
- rel_path = relpath( symlink_target, rel_start )
- os.remove( symlink_file_name )
- os.symlink( rel_path, symlink_file_name )
-
def stringify_dictionary_keys( in_dict ):
#returns a new dictionary
#changes unicode keys into strings, only works on top level (does not recurse)
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -92,8 +92,6 @@
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
- # Error logging with sentry
- self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
# Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
self.hgweb_config_dir = resolve_path( kwargs.get( 'hgweb_config_dir', '' ), self.root )
# Proxy features
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -89,8 +89,7 @@
tool.input_translator.translate( params )
# We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
# so make sure to create a new history if we've never had one before.
- #history = trans.get_history( create=True )
- history = tool.get_default_history_by_trans( trans, create=True )
+ history = trans.get_history( create=True )
template, vars = tool.handle_input( trans, params.__dict__ )
if len( params ) > 0:
trans.log_event( "Tool params: %s" % ( str( params ) ), tool_id=tool_id )
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -57,10 +57,8 @@
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div><div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
%if trans.app.config.enable_beta_job_managers:
- <div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage old local data</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage local data</a></div>
%endif
- ##how to name this?
- <div class="toolTitle"><a href="${h.url_for( controller='data_manager' )}" target="galaxy_main">Manage local (cached) data (beta)</a></div></div></div><div class="toolSectionPad"></div>
diff -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 -r 0ebd6f6f29553b4ac49f78d0106e412115948191 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -33,7 +33,7 @@
</param><param name="async_datasets" type="hidden" value="None"/><upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type" metadata_ref="files_metadata">
- <param name="file_data" type="file" size="30" label="File" ajax-upload="False" help="TIP: Due to browser limitations, uploading files larger than 2GB is guaranteed to fail. To upload large files, use the URL method (below) or FTP (if enabled by the site administrator).">
+ <param name="file_data" type="file" size="30" label="File" ajax-upload="true" help="TIP: Due to browser limitations, uploading files larger than 2GB is guaranteed to fail. To upload large files, use the URL method (below) or FTP (if enabled by the site administrator)."><validator type="expression" message="You will need to reselect the file you specified (%s)." substitute_value_in_message="True">not ( ( isinstance( value, unicode ) or isinstance( value, str ) ) and value != "" )</validator><!-- use validator to post message to user about needing to reselect the file, since most browsers won't accept the value attribute for file inputs --></param><param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Add sentry_dsn to Tool Shed config.py' lib/galaxy/webapps/community/config.py
by Bitbucket 04 Feb '13
by Bitbucket 04 Feb '13
04 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3189a1bf18af/
changeset: 3189a1bf18af
user: dan
date: 2013-02-04 12:33:19
summary: Add sentry_dsn to Tool Shed config.py' lib/galaxy/webapps/community/config.py
affected #: 15 files
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -59,6 +59,7 @@
<datatype extension="bowtie_base_index" type="galaxy.datatypes.ngsindex:BowtieBaseIndex" mimetype="text/html" display_in_upload="False"/><datatype extension="csfasta" type="galaxy.datatypes.sequence:csFasta" display_in_upload="true"/><datatype extension="data" type="galaxy.datatypes.data:Data" mimetype="application/octet-stream" max_optional_metadata_filesize="1048576" />
+ <datatype extension="data_manager_json" type="galaxy.datatypes.data:Text" mimetype="application/json" subclass="True" display_in_upload="False"/><datatype extension="fasta" type="galaxy.datatypes.sequence:Fasta" display_in_upload="true"><converter file="fasta_to_tabular_converter.xml" target_datatype="tabular"/><converter file="fasta_to_bowtie_base_index_converter.xml" target_datatype="bowtie_base_index"/>
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -18,6 +18,7 @@
from galaxy.tools.genome_index import load_genome_index_tools
from galaxy.sample_tracking import external_service_types
from galaxy.openid.providers import OpenIDProviders
+from galaxy.tools.data_manager.manager import DataManagers
class UniverseApplication( object ):
"""Encapsulates the state of a Universe application"""
@@ -93,6 +94,8 @@
self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
# Search support for tools
self.toolbox_search = galaxy.tools.search.ToolBoxSearch( self.toolbox )
+ #datamanager
+ self.data_managers = DataManagers( self )
# If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories.
if self.config.get_bool( 'enable_tool_shed_check', False ):
from tool_shed import update_manager
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -75,6 +75,10 @@
except:
self.hours_between_check = 12
self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
+ self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
+ self.data_manager_config_file = resolve_path( kwargs.get(' data_manager_config_file', 'data_manager_conf.xml' ), self.root )
+ self.data_manager_move_files = string_as_bool( kwargs.get( "data_manager_move_files", "False" ) )
+ self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_dir', self.tool_data_path )
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -44,7 +44,7 @@
log.debug( "%d workers ready", nworkers )
def run_next( self ):
- """Run the next job, waiting until one is available if neccesary"""
+ """Run the next job, waiting until one is available if necessary"""
while 1:
job_wrapper = self.queue.get()
if job_wrapper is self.STOP_SIGNAL:
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2996,6 +2996,20 @@
def set_item( self, visualization ):
self.visualization = visualization
+#Data Manager Classes
+class DataManagerHistoryAssociation( object ):
+ def __init__( self, id=None, history=None, user=None ):
+ self.id = id
+ self.history = history
+ self.user = user
+
+class DataManagerJobAssociation( object ):
+ def __init__( self, id=None, job=None, data_manager_id=None ):
+ self.id = id
+ self.job = job
+ self.data_manager_id = data_manager_id
+#end of Data Manager Classes
+
class UserPreference ( object ):
def __init__( self, name=None, value=None ):
self.name = name
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -930,6 +930,23 @@
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
)
+#Data Manager tables
+DataManagerHistoryAssociation.table = Table( "data_manager_history_association", metadata,
+ Column( "id", Integer, primary_key=True),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+ Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True )
+ )
+
+DataManagerJobAssociation.table = Table( "data_manager_job_association", metadata,
+ Column( "id", Integer, primary_key=True),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+ Column( "job_id", Integer, ForeignKey( "job.id" ), index=True ),
+ Column( "data_manager_id", TEXT, index=True )
+ )
+
# Tagging tables.
Tag.table = Table( "tag", metadata,
@@ -1899,6 +1916,17 @@
properties=dict( visualization=relation( Visualization ), user=relation( User ) )
)
+#Data Manager tables
+assign_mapper( context, DataManagerHistoryAssociation, DataManagerHistoryAssociation.table,
+ properties=dict( history=relation( History ),
+ user=relation( User, backref='data_manager_histories' )
+ )
+ )
+
+assign_mapper( context, DataManagerJobAssociation, DataManagerJobAssociation.table,
+ properties=dict( job=relation( Job, backref=backref('data_manager_association', uselist=False ), uselist=False ) )
+ )
+
# User tables.
assign_mapper( context, UserPreference, UserPreference.table,
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -6,7 +6,7 @@
pkg_resources.require( "simplejson" )
pkg_resources.require( "Mako" )
-import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback, re
+import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback, re, pipes
import simplejson
import binascii
from mako.template import Template
@@ -25,6 +25,7 @@
from galaxy.util.expressions import ExpressionContext
from galaxy.tools.test import ToolTestBuilder
from galaxy.tools.actions import DefaultToolAction
+from galaxy.tools.actions.data_manager import DataManagerToolAction
from galaxy.tools.deps import DependencyManager
from galaxy.model import directory_hash_id
from galaxy.model.orm import *
@@ -79,6 +80,7 @@
# In-memory dictionary that defines the layout of the tool panel.
self.tool_panel = odict()
self.index = 0
+ self.data_manager_tools = odict()
# File that contains the XML section and tool tags from all tool panel config files integrated into a
# single file that defines the tool panel layout. This file can be changed by the Galaxy administrator
# (in a way similar to the single tool_conf.xml file in the past) to alter the layout of the tool panel.
@@ -508,7 +510,7 @@
self.integrated_tool_panel[ key ] = integrated_section
else:
self.integrated_tool_panel.insert( index, key, integrated_section )
- def load_tool( self, config_file, guid=None ):
+ def load_tool( self, config_file, guid=None, **kwds ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
tree = util.parse_xml( config_file )
@@ -524,7 +526,7 @@
ToolClass = tool_types.get( root.get( 'tool_type' ) )
else:
ToolClass = Tool
- return ToolClass( config_file, root, self.app, guid=guid )
+ return ToolClass( config_file, root, self.app, guid=guid, **kwds )
def reload_tool_by_id( self, tool_id ):
"""
Attempt to reload the tool identified by 'tool_id', if successful
@@ -811,6 +813,7 @@
"""
tool_type = 'default'
+ default_tool_action = DefaultToolAction
def __init__( self, config_file, root, app, guid=None ):
"""Load a tool from the config named by `config_file`"""
@@ -1051,7 +1054,7 @@
# Action
action_elem = root.find( "action" )
if action_elem is None:
- self.tool_action = DefaultToolAction()
+ self.tool_action = self.default_tool_action()
else:
module = action_elem.get( 'module' )
cls = action_elem.get( 'class' )
@@ -2570,18 +2573,24 @@
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( hda.dataset.id ) )
try:
a_files = os.listdir( temp_file_path )
+ print 'a_files',a_files
if len( a_files ) > 0:
for f in a_files:
+ print 'f', f
self.app.object_store.update_from_file(hda.dataset,
extra_dir="dataset_%d_files" % hda.dataset.id,
alt_name = f,
file_name = os.path.join(temp_file_path, f),
- create = True)
+ create = True,
+ preserve_symlinks = True )
# Clean up after being handled by object store.
# FIXME: If the object (e.g., S3) becomes async, this will
# cause issues so add it to the object store functionality?
+ print 'before rmtree'
shutil.rmtree(temp_file_path)
- except:
+ print 'after rm tree'
+ except Exception, e:
+ log.debug( "Error in collect_associated_files: %s" % ( e ) )
continue
def collect_child_datasets( self, output, job_working_directory ):
"""
@@ -2806,7 +2815,64 @@
return tool_dict
-class DataSourceTool( Tool ):
+ def get_default_history_by_trans( self, trans, create=False ):
+ return trans.get_history( create=create )
+
+
+class OutputParameterJSONTool( Tool ):
+ """
+ Alternate implementation of Tool that provides parameters and other values
+ JSONified within the contents of an output dataset
+ """
+ tool_type = 'output_parameter_json'
+ def _prepare_json_list( self, param_list ):
+ rval = []
+ for value in param_list:
+ if isinstance( value, dict ):
+ rval.append( self._prepare_json_param_dict( value ) )
+ elif isinstance( value, list ):
+ rval.append( self._prepare_json_list( value ) )
+ else:
+ rval.append( str( value ) )
+ return rval
+ def _prepare_json_param_dict( self, param_dict ):
+ rval = {}
+ for key, value in param_dict.iteritems():
+ if isinstance( value, dict ):
+ rval[ key ] = self._prepare_json_param_dict( value )
+ elif isinstance( value, list ):
+ rval[ key ] = self._prepare_json_list( value )
+ else:
+ rval[ key ] = str( value )
+ return rval
+ def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
+ if param_dict is None:
+ param_dict = {}
+ json_params = {}
+ json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
+ json_params[ 'output_data' ] = []
+ json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
+ json_filename = None
+ for i, ( out_name, data ) in enumerate( out_data.iteritems() ):
+ #use wrapped dataset to access certain values
+ wrapped_data = param_dict.get( out_name )
+ #allow multiple files to be created
+ file_name = str( wrapped_data )
+ extra_files_path = str( wrapped_data.files_path )
+ data_dict = dict( out_data_name = out_name,
+ ext = data.ext,
+ dataset_id = data.dataset.id,
+ hda_id = data.id,
+ file_name = file_name,
+ extra_files_path = extra_files_path )
+ json_params[ 'output_data' ].append( data_dict )
+ if json_filename is None:
+ json_filename = file_name
+ out = open( json_filename, 'w' )
+ out.write( simplejson.dumps( json_params ) )
+ out.close()
+
+class DataSourceTool( OutputParameterJSONTool ):
"""
Alternate implementation of Tool for data_source tools -- those that
allow the user to query and extract data from another web site.
@@ -2816,29 +2882,10 @@
def _build_GALAXY_URL_parameter( self ):
return ToolParameter.build( self, ElementTree.XML( '<param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=%s" />' % self.id ) )
def parse_inputs( self, root ):
- Tool.parse_inputs( self, root )
+ super( DataSourceTool, self ).parse_inputs( root )
if 'GALAXY_URL' not in self.inputs:
self.inputs[ 'GALAXY_URL' ] = self._build_GALAXY_URL_parameter()
- def _prepare_datasource_json_list( self, param_list ):
- rval = []
- for value in param_list:
- if isinstance( value, dict ):
- rval.append( self._prepare_datasource_json_param_dict( value ) )
- elif isinstance( value, list ):
- rval.append( self._prepare_datasource_json_list( value ) )
- else:
- rval.append( str( value ) )
- return rval
- def _prepare_datasource_json_param_dict( self, param_dict ):
- rval = {}
- for key, value in param_dict.iteritems():
- if isinstance( value, dict ):
- rval[ key ] = self._prepare_datasource_json_param_dict( value )
- elif isinstance( value, list ):
- rval[ key ] = self._prepare_datasource_json_list( value )
- else:
- rval[ key ] = str( value )
- return rval
+ self.inputs_by_page[0][ 'GALAXY_URL' ] = self.inputs[ 'GALAXY_URL' ]
def exec_before_job( self, app, inp_data, out_data, param_dict=None ):
if param_dict is None:
param_dict = {}
@@ -2848,7 +2895,7 @@
name = param_dict.get( 'name' )
json_params = {}
- json_params[ 'param_dict' ] = self._prepare_datasource_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
+ json_params[ 'param_dict' ] = self._prepare_json_param_dict( param_dict ) #it would probably be better to store the original incoming parameters here, instead of the Galaxy modified ones?
json_params[ 'output_data' ] = []
json_params[ 'job_config' ] = dict( GALAXY_DATATYPES_CONF_FILE=param_dict.get( 'GALAXY_DATATYPES_CONF_FILE' ), GALAXY_ROOT_DIR=param_dict.get( 'GALAXY_ROOT_DIR' ), TOOL_PROVIDED_JOB_METADATA_FILE=jobs.TOOL_PROVIDED_JOB_METADATA_FILE )
json_filename = None
@@ -2939,9 +2986,186 @@
class GenomeIndexTool( Tool ):
tool_type = 'index_genome'
+class DataManagerTool( OutputParameterJSONTool ):
+ tool_type = 'manage_data'
+ default_tool_action = DataManagerToolAction
+
+ def __init__( self, config_file, root, app, guid=None, data_manager_id=None, **kwds ):
+ self.data_manager_id = data_manager_id
+ super( DataManagerTool, self ).__init__( config_file, root, app, guid=guid, **kwds )
+ if self.data_manager_id is None:
+ self.data_manager_id = self.id
+
+ #def parse_inputs( self, root ):
+ # super( DataManagerTool, self ).parse_inputs( root )
+ # '''
+ # if '__GALAXY_MOVE_OUTPUT_FILES__' not in self.inputs:
+ # self.inputs[ '__GALAXY_MOVE_OUTPUT_FILES__' ] = ToolParameter.build( self, ElementTree.XML( '<param name="__GALAXY_MOVE_OUTPUT_FILES__" label="Move created data to cache destination" type="boolean" truevalue="MOVE" falsevalue="DO_NOT_MOVE" checked="%s" />' % self.app.config.data_manager_move_files ) )
+ # print 'self.inputs_by_page',self.inputs_by_page
+ # self.inputs_by_page[0][ '__GALAXY_MOVE_OUTPUT_FILES__' ] = self.inputs[ '__GALAXY_MOVE_OUTPUT_FILES__' ]
+ # print 'self.inputs', self.inputs
+ # '''
+ # #self.inputs[ '__DATA_MANAGER_ID__' ] = ToolParameter.build( self, ElementTree.XML( '<param name="__DATA_MANAGER_ID__" type="hidden" value="%s" />' % ( self.data_manager_id ) ) )
+ # #self.inputs_by_page[0][ '__DATA_MANAGER_ID__' ] = self.inputs[ '__DATA_MANAGER_ID__' ]
+
+ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None, **kwds ):
+ #run original exec_after_process
+ super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job = job, **kwds )
+ #process results of tool
+ print 'exect after', self.id
+ print 'inp_data', inp_data
+ print 'out_data', out_data
+ print 'param_dict', param_dict
+ print 'job', job, job.state
+ if job and job.state == job.states.ERROR:
+ return
+ #print 'data_manager.output_ref',data_manager.output_ref
+ #data_manager = self.app.data_managers.get( self.id, None ) #fix me to not only use tool ID!
+ data_manager_id = job.data_manager_association.data_manager_id
+ data_manager = self.app.data_managers.get( data_manager_id, None )
+ #TODO: need to be able to handle using a data manager tool for more than one manager
+ #manager id is currently same as tool id
+ assert data_manager is not None, "Invalid data manager (%s) requested. It may have been removed before the job completed." % ( data_manager_id )
+ data_manager_dicts = {}
+ data_manager_dict = {}
+ #TODO: fix this merging below
+ for output_name, output_dataset in out_data.iteritems():
+ try:
+ output_dict = simplejson.loads( open( output_dataset.file_name ).read() )
+ except Exception, e:
+ log.warning( 'Error reading DataManagerTool json for "%s": %s' % ( output_name, e ) )
+ continue
+ data_manager_dicts[ output_name ] = output_dict
+ print 'data_manager_dicts', data_manager_dicts
+ for key, value in output_dict.iteritems():
+ if key not in data_manager_dict:
+ data_manager_dict[ key ] = {}
+ print 'key', key
+ print ' data_manager_dict[ key ]', data_manager_dict[ key ]
+ print 'value', value
+ data_manager_dict[ key ].update( value )
+ data_manager_dict.update( output_dict )
+
+ print 'data_manager_dicts',data_manager_dicts
+ print 'data_manager_dict', data_manager_dict
+ data_tables_dict = data_manager_dict.get( 'data_tables', {} )
+ #for data_table_name, data_table_values in data_tables_dict.iteritems():
+ for data_table_name, data_table_columns in data_manager.data_tables.iteritems():
+ print 'data_table_name', data_table_name
+ data_table_values = data_tables_dict.pop( data_table_name, None ) #data_tables_dict.get( data_table_name, [] )
+ if not data_table_values:
+ log.warning( 'No values for data table "%s" were returned by the data manager "%s".' % ( data_table_name, data_manager.id ) )
+ continue #next data table
+ data_table = app.tool_data_tables.get( data_table_name, None )
+ if data_table is None:
+ log.error( 'The data manager "%s" returned an unknown data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( data_manager.id, data_table_name, data_table_values, data_table_name, 'tool_data_table_conf.xml' ) )
+ continue #next table name
+ output_ref_values = {}
+ if data_table_name in data_manager.output_ref_by_data_table:
+ for data_table_column, output_ref in data_manager.output_ref_by_data_table[ data_table_name ].iteritems():
+ output_ref_dataset = out_data.get( output_ref, None )
+ assert output_ref_dataset is not None, "Referenced output was not found."
+ output_ref_values[ data_table_column ] = output_ref_dataset
+ print 'output_ref_values', output_ref_values
+
+ final_data_table_values = []
+ if not isinstance( data_table_values, list ):
+ data_table_values = [ data_table_values ]
+ columns = data_table.get_column_name_list()
+
+ try:
+ data_table_fh = open( data_table.filename, 'r+b' )
+ except IOError, e:
+ log.warning( 'Error opening data table file (%s) with r+b, assuming file does not exist and will open as wb: %s' % ( data_table.filename, e ) )
+ data_table_fh = open( data_table.filename, 'wb' )
+ if os.stat( data_table.filename )[6] != 0:
+ # ensure last existing line ends with new line
+ data_table_fh.seek( -1, 2 ) #last char in file
+ last_char = data_table_fh.read()
+ if last_char not in [ '\n', '\r' ]:
+ data_table_fh.write( '\n' )
+ for data_table_row in data_table_values:
+ data_table_value = dict( **data_table_row ) #keep original values here
+ for name, value in data_table_row.iteritems(): #FIXME: need to loop through here based upon order listed in data_manager config
+ if name in output_ref_values:
+ #TODO: Allow moving!
+ #if param_dict[ '__GALAXY_MOVE_OUTPUT_FILES__' ]:
+ # #FIXME: allow moving
+ # log.error( "\n\nShould be moving output files directory, but not implemented yet.\n" )
+ # base_path = output_ref_values[ name ].extra_files_path
+ #else:
+ # base_path = output_ref_values[ name ].extra_files_path
+ moved = data_manager.process_move( data_table_name, name, output_ref_values[ name ].extra_files_path, **data_table_value )
+ print 'moved', moved #should we always move?
+ data_table_value[ name ] = data_manager.process_value_translation( data_table_name, name, **data_table_value )
+ final_data_table_values.append( data_table_value )
+ fields = []
+ for column_name in columns:
+ if column_name is None or column_name not in data_table_value:
+ fields.append( data_table.get_empty_field_by_name( column_name ) )
+ else:
+ fields.append( data_table_value[ column_name ] )
+ print 'fields', fields
+ #should we add a comment to file about automatically generated value here?
+ data_table_fh.write( "%s\n" % ( data_table.separator.join( self._replace_field_separators( fields, separator=data_table.separator ) ) ) ) #write out fields to disk
+ data_table.data.append( fields ) #add fields to loaded data table
+ print 'final_data_table_values', final_data_table_values
+ print 'data_table.data', data_table.data
+ data_table_fh.close()
+ for data_table_name, data_table_values in data_tables_dict.iteritems():
+ #tool returned extra data table entries, but data table was not declared in data manager
+ #do not add these values, but do provide messages
+ log.warning( 'The data manager "%s" returned an undeclared data table "%s" with new entries "%s". These entries will not be created. Please confirm that an entry for "%s" exists in your "%s" file.' % ( data_manager.id, data_table_name, data_table_values, data_table_name, self.app.data_managers.filename ) )
+
+ def _replace_field_separators( self, fields, separator="\t", replace=None, comment_char=None ):
+ #make sure none of the fields contain separator
+ #make sure separator replace is different from comment_char,
+ #due to possible leading replace
+ if replace is None:
+ if separator == " ":
+ if comment_char == "\t":
+ replace = "_"
+ else:
+ replace = "\t"
+ else:
+ if comment_char == " ":
+ replace = "_"
+ else:
+ replace = " "
+ return map( lambda x: x.replace( separator, replace ), fields )
+
+ def get_default_history_by_trans( self, trans, create=False ):
+ def _create_data_manager_history( user ):
+ history = trans.app.model.History( name='Data Manager History (automatically created)', user=user )
+ data_manager_association = trans.app.model.DataManagerHistoryAssociation( user=user, history=history )
+ trans.sa_session.add_all( ( history, data_manager_association ) )
+ trans.sa_session.flush()
+ return history
+ user = trans.user
+ assert user, 'You must be logged in to use this tool.'
+ history = user.data_manager_histories
+ if not history:
+ #create
+ if create:
+ history = _create_data_manager_history( user )
+ else:
+ history = None
+ else:
+ for history in reversed( history ):
+ history = history.history
+ if not history.deleted:
+ break
+ if history.deleted:
+ if create:
+ history = _create_data_manager_history( user )
+ else:
+ history = None
+ return history
+
+
# Populate tool_type to ToolClass mappings
tool_types = {}
-for tool_class in [ Tool, DataDestinationTool, SetMetadataTool, DataSourceTool, AsyncDataSourceTool ]:
+for tool_class in [ Tool, DataDestinationTool, SetMetadataTool, DataSourceTool, AsyncDataSourceTool, DataManagerTool ]:
tool_types[ tool_class.tool_type ] = tool_class
# ---- Utility classes to be factored out -----------------------------------
@@ -2983,6 +3207,12 @@
"""
def __nonzero__( self ):
return bool( self.value )
+ def get_display_text( self, quote=True ):
+ print 'self.input',self.input
+ print 'self.input.tool.app', self.input.tool.app
+ print 'self.value', self.value
+ print 'self.input.value_to_display_text( self.value, self.input.tool.app )', self.input.value_to_display_text( self.value, self.input.tool.app )
+ return pipes.quote( self.input.value_to_display_text( self.value, self.input.tool.app ) )
class RawObjectWrapper( ToolParameterValueWrapper ):
"""
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -168,7 +168,7 @@
# Set history.
if not history:
- history = trans.history
+ history = tool.get_default_history_by_trans( trans, create=True ) #trans..history
out_data = odict()
# Collect any input datasets from the incoming parameters
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -28,6 +28,11 @@
return self.data_tables.__getitem__( key )
def __contains__( self, key ):
return self.data_tables.__contains__( key )
+ def get( self, name, default=None ):
+ try:
+ return self[ name ]
+ except KeyError:
+ return default
def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
"""
This method is called under 3 conditions:
@@ -125,6 +130,8 @@
def __init__( self, config_element, tool_data_path ):
self.name = config_element.get( 'name' )
self.comment_char = config_element.get( 'comment_char' )
+ self.empty_field_value = config_element.get( 'empty_field_value', '' )
+ self.empty_field_values = {}
for file_elem in config_element.findall( 'file' ):
# There should only be one file_elem.
if 'path' in file_elem.attrib:
@@ -134,6 +141,8 @@
self.tool_data_file = None
self.tool_data_path = tool_data_path
self.missing_index_file = None
+ def get_empty_field_by_name( self, name ):
+ return self.empty_field_values.get( name, self.empty_field_value )
class TabularToolDataTable( ToolDataTable ):
"""
@@ -176,6 +185,7 @@
if os.path.exists( filename ):
found = True
all_rows.extend( self.parse_file_fields( open( filename ) ) )
+ self.filename = filename
else:
# Since the path attribute can include a hard-coded path to a specific directory
# (e.g., <file path="tool-data/cg_crr_files.loc" />) which may not be the same value
@@ -187,6 +197,7 @@
if os.path.exists( corrected_filename ):
found = True
all_rows.extend( self.parse_file_fields( open( corrected_filename ) ) )
+ self.filename = corrected_filename
if not found:
self.missing_index_file = filename
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
@@ -222,6 +233,9 @@
self.columns[name] = index
if index > self.largest_index:
self.largest_index = index
+ empty_field_value = column_elem.get( 'empty_field_value', None )
+ if empty_field_value is not None:
+ self.empty_field_values[ name ] = empty_field_value
assert 'value' in self.columns, "Required 'value' column missing from column def"
if 'name' not in self.columns:
self.columns['name'] = self.columns['value']
@@ -240,7 +254,19 @@
fields = line.split( self.separator )
if self.largest_index < len( fields ):
rval.append( fields )
- return rval
+ return rval
+ def get_column_name_list( self ):
+ rval = []
+ for i in range( self.largest_index + 1 ):
+ found_column = False
+ for name, index in self.columns.iteritems():
+ if index == i:
+ rval.append( name )
+ found_column = True
+ break
+ if not found_column:
+ rval.append( None )
+ return rval
# Registry of tool data types by type_key
tool_data_table_types = dict( [ ( cls.type_key, cls ) for cls in [ TabularToolDataTable ] ] )
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -880,6 +880,9 @@
>>> print p.filter_value( "hg17" )
hg17
"""
+ def __init__( self, *args, **kwds ):
+ super( GenomeBuildParameter, self ).__init__( *args, **kwds )
+ self.static_options = [ ( value, key, False ) for key, value in util.dbnames ]
def get_options( self, trans, other_values ):
if not trans.history:
yield 'unspecified', '?', False
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -567,6 +567,22 @@
return curdir
return join( *rel_list )
+def relativize_symlinks( path, start=None, followlinks=False):
+ for root, dirs, files in os.walk( path, followlinks=followlinks ):
+ rel_start = None
+ for file_name in files:
+ symlink_file_name = os.path.join( root, file_name )
+ if os.path.islink( symlink_file_name ):
+ symlink_target = os.readlink( symlink_file_name )
+ if rel_start is None:
+ if start is None:
+ rel_start = root
+ else:
+ rel_start = start
+ rel_path = relpath( symlink_target, rel_start )
+ os.remove( symlink_file_name )
+ os.symlink( rel_path, symlink_file_name )
+
def stringify_dictionary_keys( in_dict ):
#returns a new dictionary
#changes unicode keys into strings, only works on top level (does not recurse)
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -92,6 +92,8 @@
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
+ # Error logging with sentry
+ self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
# Where the tool shed hgweb.config file is stored - the default is the Galaxy installation directory.
self.hgweb_config_dir = resolve_path( kwargs.get( 'hgweb_config_dir', '' ), self.root )
# Proxy features
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -89,7 +89,8 @@
tool.input_translator.translate( params )
# We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ),
# so make sure to create a new history if we've never had one before.
- history = trans.get_history( create=True )
+ #history = trans.get_history( create=True )
+ history = tool.get_default_history_by_trans( trans, create=True )
template, vars = tool.handle_input( trans, params.__dict__ )
if len( params ) > 0:
trans.log_event( "Tool params: %s" % ( str( params ) ), tool_id=tool_id )
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -57,8 +57,10 @@
<div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas' )}" target="galaxy_main">Manage quotas</a></div><div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
%if trans.app.config.enable_beta_job_managers:
- <div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage local data</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='data_admin', action='manage_data' )}" target="galaxy_main">Manage old local data</a></div>
%endif
+ ##how to name this?
+ <div class="toolTitle"><a href="${h.url_for( controller='data_manager' )}" target="galaxy_main">Manage local (cached) data (beta)</a></div></div></div><div class="toolSectionPad"></div>
diff -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 -r 3189a1bf18af72f750e476caa54d7e3ae2d3cc36 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -33,7 +33,7 @@
</param><param name="async_datasets" type="hidden" value="None"/><upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type" metadata_ref="files_metadata">
- <param name="file_data" type="file" size="30" label="File" ajax-upload="true" help="TIP: Due to browser limitations, uploading files larger than 2GB is guaranteed to fail. To upload large files, use the URL method (below) or FTP (if enabled by the site administrator).">
+ <param name="file_data" type="file" size="30" label="File" ajax-upload="False" help="TIP: Due to browser limitations, uploading files larger than 2GB is guaranteed to fail. To upload large files, use the URL method (below) or FTP (if enabled by the site administrator)."><validator type="expression" message="You will need to reselect the file you specified (%s)." substitute_value_in_message="True">not ( ( isinstance( value, unicode ) or isinstance( value, str ) ) and value != "" )</validator><!-- use validator to post message to user about needing to reselect the file, since most browsers won't accept the value attribute for file inputs --></param><param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: error middleware: return writable from start_response correctly
by Bitbucket 03 Feb '13
by Bitbucket 03 Feb '13
03 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d7f4d3a8d0b2/
changeset: d7f4d3a8d0b2
user: james_taylor
date: 2013-02-04 06:09:23
summary: error middleware: return writable from start_response correctly
affected #: 1 file
diff -r 0c1b71a50ccb5b6f5aff233b40355d84141960c6 -r d7f4d3a8d0b273163ae3d04872e211b7dcfbeba9 lib/galaxy/web/framework/middleware/error.py
--- a/lib/galaxy/web/framework/middleware/error.py
+++ b/lib/galaxy/web/framework/middleware/error.py
@@ -200,7 +200,9 @@
def __call__(self, *args):
self.response_started = True
- self.start_response(*args)
+ # Return whatever the wrapped start_response would have
+ # returned
+ return self.start_response(*args)
class CatchingIter(object):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: genetrack: missed one reference to display application
by Bitbucket 03 Feb '13
by Bitbucket 03 Feb '13
03 Feb '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0c1b71a50ccb/
changeset: 0c1b71a50ccb
user: james_taylor
date: 2013-02-04 05:39:06
summary: genetrack: missed one reference to display application
affected #: 1 file
diff -r 898c9e140da953fe18ebfda5a96b9810d372706d -r 0c1b71a50ccb5b6f5aff233b40355d84141960c6 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -114,7 +114,6 @@
<converter file="interval_to_tabix_converter.xml" target_datatype="tabix" depends_on="bgzip"/><converter file="interval_to_summary_tree_converter.xml" target_datatype="summary_tree"/><!-- <display file="ucsc/interval_as_bed.xml" inherit="True" /> -->
- <display file="genetrack.xml" inherit="True"/><display file="ensembl/ensembl_interval_as_bed.xml" inherit="True"/><display file="gbrowse/gbrowse_interval_as_bed.xml" inherit="True"/><display file="rviewer/bed.xml" inherit="True"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0