galaxy-commits
Threads by month
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: jgoecks: Fixes that (a) simplify data providers framework and (b) make it possible to view different trees in the same nexus file.
by Bitbucket 09 Oct '12
by Bitbucket 09 Oct '12
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8269f76312af/
changeset: 8269f76312af
user: jgoecks
date: 2012-10-09 05:33:03
summary: Fixes that (a) simplify data providers framework and (b) make it possible to view different trees in the same nexus file.
affected #: 7 files
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -520,7 +520,7 @@
rval.append( payload )
- return { 'data': rval, 'message': message }
+ return { 'data': rval, 'dataset_type': self.dataset_type, 'message': message }
def write_data_to_file( self, regions, filename ):
out = open( filename, "w" )
@@ -550,8 +550,6 @@
for large datasets.
"""
- dataset_type = 'interval_index'
-
def get_iterator( self, chrom=None, start=None, end=None ):
# Read first line in order to match chrom naming format.
line = source.readline()
@@ -696,8 +694,6 @@
for large datasets.
"""
- dataset_type = 'tabix'
-
def get_iterator( self, chrom, start, end ):
# Read first line in order to match chrom naming format.
line = source.readline()
@@ -1278,7 +1274,7 @@
results.append( payload )
- return { 'data': results, 'message': message }
+ return { 'data': results, 'dataset_type': self.dataset_type, 'message': message }
class GtfTabixDataProvider( TabixDataProvider ):
"""
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/phyloviz/__init__.py
--- a/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
+++ b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
@@ -7,36 +7,37 @@
class PhylovizDataProvider( BaseDataProvider ):
+ dataset_type = "phylo"
+
def __init__( self, original_dataset=None ):
super( PhylovizDataProvider, self ).__init__( original_dataset=original_dataset )
- def get_data( self ):
- """returns [trees], meta
- Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
+ def get_data( self, tree_index=0 ):
+ """
+ Returns trees.
+ Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
"""
- jsonDicts, meta = [], {}
file_ext = self.original_dataset.datatype.file_ext
file_name = self.original_dataset.file_name
- try:
- if file_ext == "nhx": # parses newick files
- newickParser = Newick_Parser()
- jsonDicts, parseMsg = newickParser.parseFile( file_name )
- elif file_ext == "phyloxml": # parses phyloXML files
- phyloxmlParser = Phyloxml_Parser()
- jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
- elif file_ext == "nex": # parses nexus files
- nexusParser = Nexus_Parser()
- jsonDicts, parseMsg = nexusParser.parseFile( file_name )
- meta["trees"] = parseMsg
- else:
- raise Exception("File type is not supported")
+ parseMsg = None
+ jsonDicts = []
+ rval = { 'dataset_type': self.dataset_type }
- meta["msg"] = parseMsg
+ if file_ext == "nhx": # parses newick files
+ newickParser = Newick_Parser()
+ jsonDicts, parseMsg = newickParser.parseFile( file_name )
+ elif file_ext == "phyloxml": # parses phyloXML files
+ phyloxmlParser = Phyloxml_Parser()
+ jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
+ elif file_ext == "nex": # parses nexus files
+ nexusParser = Nexus_Parser()
+ jsonDicts, parseMsg = nexusParser.parseFile( file_name )
+ jsonDicts = jsonDicts[ int( tree_index ) ]
+ rval["trees"] = parseMsg
- except Exception, e:
- raise e
- jsonDicts, meta["msg"] = [], "Parse failed"
+ rval[ "data" ] = jsonDicts
+ rval[ "msg"] = parseMsg
+
+ return rval
- return jsonDicts, meta
-
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/registry.py
--- a/lib/galaxy/visualization/data_providers/registry.py
+++ b/lib/galaxy/visualization/data_providers/registry.py
@@ -1,5 +1,8 @@
from galaxy.visualization.data_providers.basic import ColumnDataProvider
from galaxy.visualization.data_providers.genome import *
+from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider
+from galaxy.datatypes.xml import Phyloxml
+from galaxy.datatypes.data import Newick, Nexus
class DataProviderRegistry( object ):
"""
@@ -45,6 +48,8 @@
data_provider_class = RawVcfDataProvider
elif isinstance( original_dataset.datatype, Tabular ):
data_provider_class = ColumnDataProvider
+ elif isinstance( original_dataset.datatype, ( Nexus, Newick, Phyloxml ) ):
+ data_provider_class = PhylovizDataProvider
data_provider = data_provider_class( original_dataset=original_dataset )
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -185,19 +185,7 @@
return msg
# Return data.
- data = None
data_provider = trans.app.data_provider_registry.get_data_provider( trans, raw=True, original_dataset=dataset )
-
- if isinstance( data_provider, ColumnDataProvider ):
- data = data_provider.get_data( **kwargs )
-
- else:
- # Default to genomic data.
- # FIXME: need better way to set dataset_type.
- low, high = int( kwargs.get( 'low' ) ), int( kwargs.get( 'high' ) )
- data = data_provider.get_data( start=low, end=high, **kwargs )
- data[ 'dataset_type' ] = 'interval_index'
- data[ 'extra_info' ] = None
- if isinstance( dataset.datatype, Vcf ):
- data[ 'dataset_type' ] = 'tabix'
+ data = data_provider.get_data( **kwargs )
+
return data
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -847,8 +847,7 @@
# Get data.
pd = PhylovizDataProvider( original_dataset=hda )
- json, config = pd.get_data()
- json = json[tree_index]
+ config = pd.get_data( tree_index=tree_index )
config["title"] = hda.display_name()
config["ext"] = hda.datatype.file_ext
@@ -857,7 +856,7 @@
config["saved_visualization"] = False
# Return viz.
- return trans.fill_template_mako( "visualization/phyloviz.mako", data = json, config=config )
+ return trans.fill_template_mako( "visualization/phyloviz.mako", data = config[ "data" ], config=config )
@web.json
def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 static/scripts/viz/phyloviz.js
--- a/static/scripts/viz/phyloviz.js
+++ b/static/scripts/viz/phyloviz.js
@@ -1,4 +1,4 @@
-define(['libs/d3', 'viz/visualization'], function(d3, visualization_mod) {
+define(['libs/d3', 'viz/visualization', 'mvc/data'], function(d3, visualization_mod, data_mod) {
var UserMenuBase = Backbone.View.extend({
/**
@@ -181,6 +181,12 @@
nodeAttrChangedTime : 0
},
+ initialize: function(options) {
+ this.set("dataset", new data_mod.Dataset({
+ id: options.dataset_id
+ }));
+ },
+
root : {}, // Root has to be its own independent object because it is not part of the viz_config
toggle : function (d) {
@@ -255,7 +261,7 @@
},
success: function(res){
var viz_id = res.url.split("id=")[1].split("&")[0],
- viz_url = "/phyloviz/visualization?id=" + viz_id;
+ viz_url = "/visualization?id=" + viz_id;
window.history.pushState({}, "", viz_url + window.location.hash);
hide_modal();
}
@@ -662,11 +668,11 @@
* Primes the Ajax URL to load another Nexus tree
*/
var self = this,
- treeIndex = $("#phylovizNexSelector :selected").val(),
- dataset_id = self.phyloTree.get("dataset_id"),
- url = "phyloviz/getJsonData?dataset_id=" + dataset_id + "&treeIndex=" + String(treeIndex);
- $.getJSON(url, function(packedJson){
- window.initPhyloViz(packedJson.data, packedJson.config);
+ treeIndex = $("#phylovizNexSelector :selected").val();
+ $.getJSON(self.phyloTree.get("dataset").url(), { tree_index: treeIndex, data_type: 'raw_data' }, function(packedJson){
+ self.data = packedJson.data;
+ self.config = packedJson;
+ self.render();
});
}
});
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 templates/visualization/phyloviz.mako
--- a/templates/visualization/phyloviz.mako
+++ b/templates/visualization/phyloviz.mako
@@ -165,7 +165,7 @@
// -- Render viz. --
phyloviz.render();
- }
+ };
$(function firstVizLoad(){ // calls when viz is loaded for the first time
var config = JSON.parse( '${ h.to_json_string( config )}');
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Add interpreter and absolute path logic to version tag. Thanks to Björn Grüning.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/02fe49c3d251/
changeset: 02fe49c3d251
user: jgoecks
date: 2012-10-08 20:22:44
summary: Add interpreter and absolute path logic to version tag. Thanks to Björn Grüning.
affected #: 1 file
diff -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -960,6 +960,12 @@
version_cmd = root.find("version_command")
if version_cmd is not None:
self.version_string_cmd = version_cmd.text
+ version_cmd_interpreter = version_cmd.get( "interpreter", None )
+ if version_cmd_interpreter:
+ executable = self.version_string_cmd.split()[0]
+ abs_executable = os.path.abspath(os.path.join(self.tool_dir, executable))
+ command_line = self.version_string_cmd.replace(executable, abs_executable, 1)
+ self.version_string_cmd = self.interpreter + " " + command_line
# Parallelism for tasks, read from tool config.
parallelism = root.find("parallelism")
if parallelism is not None and parallelism.get("method"):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history.js: add persistant storage, show prev. opened datasets on page refresh; base-mvc.js: add PersistantStorage object adapter
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/724aaf15bcbe/
changeset: 724aaf15bcbe
user: carlfeberhard
date: 2012-10-08 19:27:11
summary: history.js: add persistant storage, show prev. opened datasets on page refresh; base-mvc.js: add PersistantStorage object adapter
affected #: 3 files
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 static/scripts/mvc/base-mvc.js
--- a/static/scripts/mvc/base-mvc.js
+++ b/static/scripts/mvc/base-mvc.js
@@ -41,6 +41,7 @@
}
});
+
//==============================================================================
/**
* Adds logging capabilities to your Models/Views
@@ -69,6 +70,7 @@
}
};
+
// =============================================================================
/** Global string localization object (and global short form alias)
* set with either:
@@ -78,8 +80,8 @@
* _l( original )
*/
//TODO: move to Galaxy.Localization (maybe galaxy.base.js)
-var GalaxyLocalization = jQuery.extend({}, {
- aliasName : '_l',
+var GalaxyLocalization = jQuery.extend( {}, {
+ ALIAS_NAME : '_l',
localizedStrings : {},
setLocalizedString : function( str_or_obj, localizedString ){
@@ -127,7 +129,7 @@
});
// global localization alias
-window[ GalaxyLocalization.aliasName ] = function( str ){ return GalaxyLocalization.localize( str ); };
+window[ GalaxyLocalization.ALIAS_NAME ] = function( str ){ return GalaxyLocalization.localize( str ); };
//TEST: setLocalizedString( string, string ), _l( string )
//TEST: setLocalizedString( hash ), _l( string )
@@ -135,203 +137,112 @@
//TEST: _l( non assigned string )
-
//==============================================================================
/**
- * Base class for template loaders:
- * The main interface is loader.getTemplates( templatesToLoad )
- * where templatesToLoad is in the form:
- * {
- * remoteTemplateFilename1: {
- * templateFunctionName1 : templateID1,
- * templateFunctionName2 : templateID2,
- * ...
- * },
- * remoteTemplateFilename2: {
- * templateFunctionName3 : templateID3,
- * templateFunctionName4 : templateID4,
- * ...
- * }
- * }
- * getTemplates will return a map of the templates in the form:
- * {
- * templateFunctionName1 : compiledTemplateFn1(),
- * templateFunctionName2 : compiledTemplateFn2(),
- * templateFunctionName3 : compiledTemplateFn3(),
- * ...
- * }
+ * @class PersistantStorage
+ * persistant storage adapter to:
+ * provide an easy interface to object based storage using method chaining
+ * allow easy change of the storage engine used (h5's local storage?)
*
- * Generally meant to be called for Backbone views, etc like this:
- * BackboneView.templates = CompiledTemplateLoader( templatesToLoad );
+ * @param {String} storageKey : the key the storage engine will place the storage object under
+ * @param {Object} storageDefaults : [optional] initial object to set up storage with
+ *
+ * @example :
+ * HistoryPanel.storage = new PersistanStorage( HistoryPanel.toString(), { visibleItems, {} })
+ * itemView.bind( 'toggleBodyVisibility', function( id, visible ){
+ * if( visible ){
+ * HistoryPanel.storage.get( 'visibleItems' ).set( id, true );
+ * } else {
+ * HistoryPanel.storage.get( 'visibleItems' ).deleteKey( id );
+ * }
+ * });
*/
-var TemplateLoader = _.extend( {}, LoggableMixin, {
- //TODO: incorporate caching of template functions (for use across objects)
- //TODO: only require and use 2 level (or some variation) map templatesToLoad for the remote loader
-
- // comment next line out to suppress logging
- //logger : console,
-
- //cachedTemplates : {},
-
- getTemplateLoadFn : function(){
- throw( "There is no templateLoadFn. Make sure you're using a subclass of TemplateLoader" );
- },
-
- // loop through templatesToLoad assuming it is a map in the form mentioned above
- getTemplates : function( templatesToLoad, forceReload ){
- forceReload = forceReload || false;
- this.log( this, 'getTemplates:', templatesToLoad, ', forceReload:', forceReload );
-
- //!TODO: cache templates here
- var templates = {},
- loader = this,
- templateLoadFn = this.getTemplateLoadFn();
-
- if( !templatesToLoad ){ return templates; }
- jQuery.each( templatesToLoad, function( templateFile, templateData ){
-
- //TODO: handle flatter map versions of templatesToLoad ({ name : id })
- jQuery.each( templateData, function( templateName, templateID ){
- loader.log( loader + ', templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
- templates[ templateName ] = templateLoadFn.call( loader, templateFile, templateName, templateID );
- });
- });
- return templates;
+var PersistantStorage = function( storageKey, storageDefaults ){
+ if( !storageKey ){
+ throw( "PersistantStorage needs storageKey argument" );
}
-});
+ storageDefaults = storageDefaults || {};
+ // ~constants for the current engine
+ //TODO:?? this would be greatly simplified if we're IE9+ only (setters/getters)
+ var STORAGE_ENGINE_GETTER = jQuery.jStorage.get,
+ STORAGE_ENGINE_SETTER = jQuery.jStorage.set,
+ STORAGE_ENGINE_KEY_DELETER = jQuery.jStorage.deleteKey;
-//..............................................................................
-/** find the compiled template in Handlebars.templates by templateName
- * and return the entire, requested templates map
- */
-var CompiledTemplateLoader = _.extend( {}, TemplateLoader, {
- getTemplateLoadFn : function(){ return this.loadCompiledHandlebarsTemplate; },
-
- // override if new compiler
- loadCompiledHandlebarsTemplate : function( templateFile, templateName, templateID ){
- //pre: compiled templates should have been loaded with the mako helper h.templates
- // (although these could be dynamically loaded as well?)
- this.log( 'getInDomTemplates, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
-
- if( !Handlebars.templates || !Handlebars.templates[ templateID ] ){
- throw( 'Template not found: Handlebars.' + templateID
- + '. Check your h.templates() call in the mako file that rendered this page' );
- }
- this.log( 'found template function:', templateID );
- // really this is just a lookup
- return Handlebars.templates[ templateID ];
- }
-
- //TEST: Handlebars.full NOT runtime
- //TEST: no Handlebars
- //TEST: bad id
- //TEST: Handlebars.runtime, good id
-});
+ // recursion helper for method chaining access
+ var StorageRecursionHelper = function( data, parent ){
+ //console.debug( 'new StorageRecursionHelper. data:', data );
+ data = data || {};
+ parent = parent || null;
+ return {
+ // get a value from the storage obj named 'key',
+ // if it's an object - return a new StorageRecursionHelper wrapped around it
+ // if it's something simpler - return the value
+ // if this isn't passed a key - return the data at this level of recursion
+ get : function( key ){
+ //console.debug( this + '.get', key );
+ if( key === undefined ){
+ return data;
+ } else if( data.hasOwnProperty( key ) ){
+ return ( jQuery.type( data[ key ] ) === 'object' )?
+ ( new StorageRecursionHelper( data[ key ], this ) )
+ :( data[ key ] );
+ }
+ return undefined;
+ },
+ // set a value on the current data - then pass up to top to save current entire object in storage
+ set : function( key, value ){
+ //TODO: add parameterless variation setting the data somehow
+ // ??: difficult bc of obj by ref, closure
+ //console.debug( this + '.set', key, value );
+ data[ key ] = value;
+ this.save();
+ return this;
+ },
+ // remove a key at this level - then save entire (as 'set' above)
+ deleteKey : function( key ){
+ //console.debug( this + '.deleteKey', key );
+ delete data[ key ];
+ this.save();
+ return this;
+ },
+ // pass up the recursion chain (see below for base case)
+ save : function(){
+ //console.debug( this + '.save', parent );
+ return parent.save();
+ },
+ toString : function(){
+ return ( 'StorageRecursionHelper(' + data + ')' );
+ }
+ };
+ };
-//..............................................................................
-/** find the NON-compiled template templateID in the DOM, compile it (using Handlebars),
- * and return the entire, requested templates map
- * (Note: for use with Mako.include and multiple templates)
- */
-var InDomTemplateLoader = _.extend( {}, TemplateLoader, {
-
- // override or change if a new compiler (Underscore, etc.) is being used
- compileTemplate : function( templateText ){
- // we'll need the compiler
- if( !Handlebars || !Handlebars.compile ){
- throw( 'No Handlebars.compile found. You may only have Handlebars.runtime loaded.'
- + 'Include handlebars.full for this to work' );
- }
- // copy fn ref to this view under the templateName
- this.log( 'compiling template:', templateText );
- return Handlebars.compile( templateText );
- },
-
- findTemplateInDom : function( templateFile, templateName, templateID ){
- // assume the last is best
- return $( 'script#' + templateID ).last();
- },
-
- getTemplateLoadFn : function(){ return this.loadInDomTemplate; },
-
- loadInDomTemplate : function( templateFile, templateName, templateID ){
- this.log( 'getInDomTemplate, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
-
- // find it in the dom by the id and compile
- var template = this.findTemplateInDom( templateFile, templateName, templateID );
- if( !template || !template.length ){
- throw( 'Template not found within the DOM: ' + templateID
- + '. Check that this template has been included in the page' );
- }
- this.log( 'found template in dom:', template.html() );
- return this.compileTemplate( template.html() );
+ //??: more readable to make another class?
+ var returnedStorage = {};
+ // attempt to get starting data from engine...
+ data = STORAGE_ENGINE_GETTER( storageKey );
+
+ // ...if that fails, use the defaults (and store them)
+ if( data === null ){
+ //console.debug( 'no previous data. using defaults...' );
+ data = jQuery.extend( true, {}, storageDefaults );
+ STORAGE_ENGINE_SETTER( storageKey, data );
}
- //TEST: no compiler
- //TEST: good url, good id, in DOM
- //TEST: good url, good id, NOT in DOM
-});
+ // the object returned by this constructor will be a modified StorageRecursionHelper
+ returnedStorage = new StorageRecursionHelper( data );
+ // the base case for save()'s upward recursion - save everything to storage
+ returnedStorage.save = function( newData ){
+ //console.debug( returnedStorage, '.save:', JSON.stringify( returnedStorage.get() ) );
+ STORAGE_ENGINE_SETTER( storageKey, returnedStorage.get() );
+ };
+ // delete function to remove the base data object from the storageEngine
+ returnedStorage.destroy = function(){
+ //console.debug( returnedStorage, '.destroy:' );
+ STORAGE_ENGINE_KEY_DELETER( storageKey );
+ };
+ returnedStorage.toString = function(){ return 'PersistantStorage(' + data + ')'; };
+
+ return returnedStorage;
+};
-//..............................................................................
-/** HTTP GET the NON-compiled templates, append into the DOM, compile them,
- * and return the entire, requested templates map
- * (for use with dynamically loaded views)
- */
-var RemoteTemplateLoader = _.extend( {}, InDomTemplateLoader, {
- templateBaseURL : 'static/scripts/templates/',
-
- getTemplateLoadFn : function(){ return this.loadViaHttpGet; },
-
- loadViaHttpGet : function( templateFile, templateName, templateID ){
- var templateBaseURL = 'static/scripts/templates/';
- this.log( 'loadViaHttpGet, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID,
- 'templateBaseURL:', this.templateBaseURL );
-
- //??: possibly not the best pattern here...
- // try in-dom first (prevent loading the same templateFile for each of its templates)
- var template = null;
- try {
- template = this.loadInDomTemplate( templateFile, templateName, templateID );
-
- // if that didn't work, load the templateFile via GET,...
- } catch( exception ){
- this.log( 'getInDomTemplate exception:' + exception );
- // handle no compiler exception
- if( !Handlebars.compile ){ throw( exception ); }
- //TEST:
-
- this.log( "Couldn't locate template in DOM: " + templateID );
- var loader = this;
- var url = templateBaseURL + templateFile;
- //??: async : false may cause problems in the long run
- jQuery.ajax( url, {
- method : 'GET',
- async : false,
- success : function( data ){
- loader.log( templateFile + ' loaded via GET. Attempting compile...' );
- //...move the templateFile into the DOM and try that again
- $( 'body' ).append( data );
- template = loader.loadInDomTemplate( templateFile, templateName, templateID );
- },
- error : function( data, status, xhr ){
- throw( 'Failed to fetch ' + url + ':' + status );
- }
- });
- }
- if( !template ){
- throw( "Couldn't load or fetch template: " + templateID );
- }
- return template;
- }
-
- //TEST: no compiler
- //TEST: good url, good id, already local
- //TEST: good url, good id, remote load
- //TEST: good url, bad template id
- //TEST: bad url, error from ajax
-});
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 static/scripts/mvc/history.js
--- a/static/scripts/mvc/history.js
+++ b/static/scripts/mvc/history.js
@@ -6,6 +6,8 @@
Backbone.js implementation of history panel
TODO:
+ currently, adding a dataset (via tool execute, etc.) creates a new dataset and refreshes the page
+
meta:
require.js
convert function comments to jsDoc style, complete comments
@@ -142,8 +144,9 @@
className : "historyItemContainer",
// ................................................................................ SET UP
- initialize : function(){
+ initialize : function( attributes ){
this.log( this + '.initialize:', this, this.model );
+ this.visible = attributes.visible;
},
// ................................................................................ RENDER MAIN
@@ -209,6 +212,7 @@
return buttonDiv;
},
+ //TODO: ?? the three title buttons render for err'd datasets: is this normal?
_render_displayButton : function(){
// don't show display while uploading
if( this.model.get( 'state' ) === HistoryItem.STATES.UPLOAD ){ return null; }
@@ -608,6 +612,11 @@
if( this.model.get( 'bodyIsShown' ) === false ){
body.hide();
}
+ if( this.visible ){
+ body.show();
+ } else {
+ body.hide();
+ }
return body;
},
@@ -693,14 +702,15 @@
return false;
},
- toggleBodyVisibility : function(){
- this.log( this + '.toggleBodyVisibility' );
- this.$el.find( '.historyItemBody' ).toggle();
+ toggleBodyVisibility : function( visible ){
+ var $body = this.$el.find( '.historyItemBody' );
+ $body.toggle();
+ this.trigger( 'toggleBodyVisibility', this.model.get( 'id' ), $body.is( ':visible' ) );
},
// ................................................................................ UTILTIY
toString : function(){
- var modelString = ( this.model )?( this.model + '' ):( '' );
+ var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
return 'HistoryItemView(' + modelString + ')';
}
});
@@ -708,21 +718,18 @@
//------------------------------------------------------------------------------
//HistoryItemView.templates = InDomTemplateLoader.getTemplates({
-HistoryItemView.templates = CompiledTemplateLoader.getTemplates({
- 'common-templates.html' : {
- warningMsg : 'template-warningmessagesmall'
- },
- 'history-templates.html' : {
- messages : 'template-history-warning-messages',
- titleLink : 'template-history-titleLink',
- hdaSummary : 'template-history-hdaSummary',
- downloadLinks : 'template-history-downloadLinks',
- failedMetadata : 'template-history-failedMetaData',
- tagArea : 'template-history-tagArea',
- annotationArea : 'template-history-annotationArea',
- displayApps : 'template-history-displayApps'
- }
-});
+HistoryItemView.templates = {
+ warningMsg : Handlebars.templates[ 'template-warningmessagesmall' ],
+
+ messages : Handlebars.templates[ 'template-history-warning-messages' ],
+ titleLink : Handlebars.templates[ 'template-history-titleLink' ],
+ hdaSummary : Handlebars.templates[ 'template-history-hdaSummary' ],
+ downloadLinks : Handlebars.templates[ 'template-history-downloadLinks' ],
+ failedMetadata : Handlebars.templates[ 'template-history-failedMetaData' ],
+ tagArea : Handlebars.templates[ 'template-history-tagArea' ],
+ annotationArea : Handlebars.templates[ 'template-history-annotationArea' ],
+ displayApps : Handlebars.templates[ 'template-history-displayApps' ]
+};
//==============================================================================
var HistoryCollection = Backbone.Collection.extend({
@@ -867,22 +874,51 @@
});
//------------------------------------------------------------------------------
+// view for the HistoryCollection (as per current right hand panel)
+//var HistoryView = BaseView.extend( LoggableMixin ).extend( UsesStorageMixin ) .extend({
var HistoryView = BaseView.extend( LoggableMixin ).extend({
- // view for the HistoryCollection (as per current right hand panel)
// uncomment this out see log messages
//logger : console,
// direct attachment to existing element
el : 'body.historyPage',
-
+ //TODO: add id?
+
initialize : function(){
this.log( this + '.initialize:', this );
- this.itemViews = [];
- var parent = this;
+ // data that needs to be persistant over page refreshes
+ this.storage = new PersistantStorage(
+ 'HistoryView.' + this.model.get( 'id' ),
+ { visibleItems : {} }
+ );
+ // set up the individual history items/datasets
+ this.initializeItems();
+ },
+
+ initializeItems : function(){
+ this.itemViews = {};
+ var historyPanel = this;
this.model.items.each( function( item ){
- var itemView = new HistoryItemView({ model: item });
- parent.itemViews.push( itemView );
+ var itemId = item.get( 'id' ),
+ itemView = new HistoryItemView({
+ model: item, visible:
+ historyPanel.storage.get( 'visibleItems' ).get( itemId )
+ });
+ historyPanel.setUpItemListeners( itemView );
+ historyPanel.itemViews[ itemId ] = itemView;
+ });
+ },
+
+ setUpItemListeners : function( itemView ){
+ var HistoryPanel = this;
+ // use storage to maintain a list of items whose bodies are visible
+ itemView.bind( 'toggleBodyVisibility', function( id, visible ){
+ if( visible ){
+ HistoryPanel.storage.get( 'visibleItems' ).set( id, true );
+ } else {
+ HistoryPanel.storage.get( 'visibleItems' ).deleteKey( id );
+ }
});
},
@@ -907,8 +943,8 @@
var div = $( '<div/>' ),
view = this;
//NOTE!: render in reverse (newest on top) via prepend (instead of append)
- _.each( this.itemViews, function( itemView ){
- view.log( view + '.render_items:', itemView );
+ _.each( this.itemViews, function( itemView, viewId ){
+ view.log( view + '.render_items:', viewId, itemView );
div.prepend( itemView.render() );
});
return div;
@@ -919,12 +955,9 @@
return 'HistoryView(' + nameString + ')';
}
});
-//HistoryItemView.templates = InDomTemplateLoader.getTemplates({
-HistoryView.templates = CompiledTemplateLoader.getTemplates({
- 'history-templates.html' : {
- historyPanel : 'template-history-historyPanel'
- }
-});
+HistoryView.templates = {
+ historyPanel : Handlebars.templates[ 'template-history-historyPanel' ]
+};
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -344,7 +344,9 @@
${parent.javascripts()}
${h.js(
- "libs/jquery/jstorage", "libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging",
+ "libs/jquery/jstorage",
+ "libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging",
+ "libs/json2",
"mvc/base-mvc", "mvc/ui"
)}
@@ -395,10 +397,10 @@
if( pageData.hdaId ){
self.location = "#" + pageData.hdaId;
}
-
- glx_history = new History( pageData.history ).loadDatasetsAsHistoryItems( pageData.hdas );
- glx_history_view = new HistoryView({ model: glx_history });
+ var glx_history = new History( pageData.history ).loadDatasetsAsHistoryItems( pageData.hdas ),
+ glx_history_view = new HistoryView({ model: glx_history });
glx_history_view.render();
+ window.glx_history = glx_history; window.glx_history_view = glx_history_view;
return;
@@ -452,4 +454,4 @@
${_('Galaxy History')}
</%def>
-<body class="historyPage"></body>
\ No newline at end of file
+<body class="historyPage"></body>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Categories method cleanup in the tool shed admin controller, and add the search feature to the Admin Repository grid in the tool shed.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c456d67423b6/
changeset: c456d67423b6
user: greg
date: 2012-10-08 17:02:16
summary: Categories method cleanup in the tool shed admin controller, and add the search feature to the Admin Repository grid in the tool shed.
affected #: 3 files
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -18,8 +18,7 @@
log = logging.getLogger( __name__ )
class UserListGrid( grids.Grid ):
- # TODO: move this to an admin_common controller since it is virtually the same
- # in the galaxy webapp.
+ # TODO: move this to an admin_common controller since it is virtually the same in the galaxy webapp.
class UserLoginColumn( grids.TextColumn ):
def get_value( self, trans, grid, user ):
return user.email
@@ -61,7 +60,6 @@
return query
return query.filter( and_( model.Tool.table.c.user_id == model.User.table.c.id,
model.User.table.c.email == column_filter ) )
- # Grid definition
title = "Users"
model_class = model.User
template='/admin/user/grid.mako'
@@ -146,8 +144,6 @@
if role.users:
return len( role.users )
return 0
-
- # Grid definition
title = "Roles"
model_class = model.Role
template='/admin/dataset_security/role/grid.mako'
@@ -231,19 +227,14 @@
if group.members:
return len( group.members )
return 0
-
- # Grid definition
title = "Groups"
model_class = model.Group
template='/admin/dataset_security/group/grid.mako'
default_sort_key = "name"
columns = [
NameColumn( "Name",
- #key="name",
link=( lambda item: dict( operation="Manage users and roles", id=item.id ) ),
- attach_popup=True
- #filterable="advanced"
- ),
+ attach_popup=True ),
UsersColumn( "Users", attach_popup=False ),
RolesColumn( "Roles", attach_popup=False ),
StatusColumn( "Status", attach_popup=False ),
@@ -301,6 +292,30 @@
]
class AdminRepositoryListGrid( RepositoryListGrid ):
+ columns = [ RepositoryListGrid.NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+ attach_popup=True ),
+ RepositoryListGrid.DescriptionColumn( "Synopsis",
+ key="description",
+ attach_popup=False ),
+ RepositoryListGrid.MetadataRevisionColumn( "Metadata Revisions" ),
+ RepositoryListGrid.UserColumn( "Owner",
+ model_class=model.User,
+ link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+ attach_popup=False,
+ key="User.username" ),
+ RepositoryListGrid.EmailAlertsColumn( "Alert", attach_popup=False ),
+ # Columns that are valid for filtering but are not visible.
+ grids.DeletedColumn( "Deleted",
+ key="deleted",
+ visible=False,
+ filterable="advanced" ) ]
+ columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+ cols_to_filter=[ columns[0], columns[1] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
operations = [ operation for operation in RepositoryListGrid.operations ]
operations.append( grids.GridOperation( "Delete",
allow_multiple=False,
@@ -507,41 +522,26 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
+ name = util.restore_text( params.get( 'name', '' ) ).strip()
+ description = util.restore_text( params.get( 'description', '' ) ).strip()
if params.get( 'create_category_button', False ):
- name = util.restore_text( params.name )
- description = util.restore_text( params.description )
- error = False
if not name or not description:
message = 'Enter a valid name and a description'
- error = True
- elif trans.sa_session.query( trans.app.model.Category ) \
- .filter( trans.app.model.Category.table.c.name==name ) \
- .first():
+ status = 'error'
+ elif get_category_by_name( trans, name ):
message = 'A category with that name already exists'
- error = True
- if error:
- return trans.fill_template( '/webapps/community/category/create_category.mako',
- name=name,
- description=description,
- message=message,
- status='error' )
+ status = 'error'
else:
# Create the category
category = trans.app.model.Category( name=name, description=description )
trans.sa_session.add( category )
+ trans.sa_session.flush()
message = "Category '%s' has been created" % category.name
- trans.sa_session.flush()
+ status = 'done'
trans.response.send_redirect( web.url_for( controller='admin',
action='manage_categories',
- message=util.sanitize_text( message ),
- status='done' ) )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='create_category',
- message=util.sanitize_text( message ),
- status='error' ) )
- else:
- name = ''
- description = ''
+ message=message,
+ status=status ) )
return trans.fill_template( '/webapps/community/category/create_category.mako',
name=name,
description=description,
@@ -623,8 +623,7 @@
if not new_name:
message = 'Enter a valid name'
status = 'error'
- elif category.name != new_name and \
- trans.sa_session.query( trans.app.model.Category ).filter( trans.app.model.Category.table.c.name==new_name ).first():
+ elif category.name != new_name and get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -633,10 +632,11 @@
trans.sa_session.add( category )
trans.sa_session.flush()
message = "The information has been saved for category '%s'" % ( category.name )
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin',
action='manage_categories',
- message=util.sanitize_text( message ),
- status='done' ) )
+ message=message,
+ status=status ) )
return trans.fill_template( '/webapps/community/category/edit_category.mako',
category=category,
message=message,
@@ -649,20 +649,31 @@
# What we've done is rendered the search box for the RepositoryListGrid on the grid.mako
# template for the CategoryListGrid. See ~/templates/webapps/community/category/grid.mako.
# Since we are searching repositories and not categories, redirect to browse_repositories().
- return self.browse_repositories( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repositories',
+ **kwd ) )
if 'operation' in kwd:
operation = kwd['operation'].lower()
if operation == "create":
- return self.create_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='create_category',
+ **kwd ) )
elif operation == "delete":
- return self.mark_category_deleted( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='mark_category_deleted',
+ **kwd ) )
elif operation == "undelete":
- return self.undelete_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='undelete_category',
+ **kwd ) )
elif operation == "purge":
- return self.purge_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='purge_category',
+ **kwd ) )
elif operation == "edit":
- return self.edit_category( trans, **kwd )
- # Render the list view
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='edit_category',
+ **kwd ) )
return self.manage_category_list_grid( trans, **kwd )
@web.expose
@web.require_admin
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -76,8 +76,6 @@
viewable_repositories += 1
return viewable_repositories
return 0
-
- # Grid definition
title = "Categories of valid repositories"
model_class = model.Category
template='/webapps/community/category/valid_grid.mako'
@@ -146,15 +144,6 @@
if column_filter == "All":
return query
return query.filter( model.Category.name == column_filter )
- class DeletedColumn( grids.DeletedColumn ):
- def get_accepted_filters( self ):
- """ Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
- accepted_filters = []
- for label, val in accepted_filter_labels_and_vals.items():
- args = { self.key: val }
- accepted_filters.append( grids.GridColumnFilter( label, args) )
- return accepted_filters
class UserColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
if repository.user:
@@ -179,8 +168,7 @@
columns = [
NameColumn( "Name",
key="name",
- link=( lambda item: dict( operation="view_or_manage_repository",
- id=item.id ) ),
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
attach_popup=True ),
DescriptionColumn( "Synopsis",
key="description",
@@ -207,10 +195,10 @@
model_class=model.Category,
key="Category.name",
visible=False ),
- DeletedColumn( "Status",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ grids.DeletedColumn( "Deleted",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name, description",
cols_to_filter=[ columns[0], columns[1] ],
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -55,6 +55,15 @@
class ToolShedColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
return tool_shed_repository.tool_shed
+ class DeletedColumn( grids.DeletedColumn ):
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
# Grid definition
title = "Installed tool shed repositories"
model_class = model.ToolShedRepository
@@ -74,10 +83,10 @@
filterable="advanced" ),
ToolShedColumn( "Tool shed" ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ DeletedColumn( "Status",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
cols_to_filter=[ columns[0] ],
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Make the DeletedColumn in the Repositories grid a subclass so that the Advanced search labels clarify the status of the filtered repositories.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04290e284245/
changeset: 04290e284245
user: greg
date: 2012-10-08 16:30:43
summary: Make the DeletedColumn in the Repositories grid a subclass so that the Advanced search labels clarify the status of the filtered repositories.
affected #: 1 file
diff -r 685a17af92dfd6a2e3d7e3c9a3a4b119c78a6f96 -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -40,8 +40,6 @@
viewable_repositories += 1
return viewable_repositories
return 0
-
- # Grid definition
title = "Categories"
model_class = model.Category
template='/webapps/community/category/grid.mako'
@@ -148,6 +146,15 @@
if column_filter == "All":
return query
return query.filter( model.Category.name == column_filter )
+ class DeletedColumn( grids.DeletedColumn ):
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
class UserColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
if repository.user:
@@ -200,10 +207,10 @@
model_class=model.Category,
key="Category.name",
visible=False ),
- grids.DeletedColumn( "Deleted",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ DeletedColumn( "Status",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name, description",
cols_to_filter=[ columns[0], columns[1] ],
@@ -2182,7 +2189,10 @@
cntrller = params.get( 'cntrller', 'repository' )
repository = get_repository( trans, id )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- metadata = repository_metadata.metadata
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
if metadata and 'readme' in metadata:
readme_file = str( metadata[ 'readme' ] )
repo_files_dir = repository.repo_path
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Add logical operators to the compute tool whitelist.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/685a17af92df/
changeset: 685a17af92df
user: natefoo
date: 2012-10-08 16:28:29
summary: Add logical operators to the compute tool whitelist.
affected #: 1 file
diff -r 26dfa56403e1011445854560b3e9818180b3c8c2 -r 685a17af92dfd6a2e3d7e3c9a3a4b119c78a6f96 tools/stats/column_maker.py
--- a/tools/stats/column_maker.py
+++ b/tools/stats/column_maker.py
@@ -46,9 +46,10 @@
for key, value in mapped_str.items():
expr = expr.replace( key, value )
+operators = 'is|not|or|and'
builtin_and_math_functions = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'
string_and_list_methods = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
-whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,: ]|%s|%s)*$" % (builtin_and_math_functions, '|'.join(string_and_list_methods))
+whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,:! ]|%s|%s|%s)*$" % (operators, builtin_and_math_functions, '|'.join(string_and_list_methods))
if not re.compile(whitelist).match(expr):
stop_err("Invalid expression")
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Another fix needed due to the elimination of the get_webapp() method in the old base controller.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/26dfa56403e1/
changeset: 26dfa56403e1
user: greg
date: 2012-10-08 15:13:18
summary: Another fix needed due to the elimination of the get_webapp() method in the old base controller.
affected #: 1 file
diff -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 -r 26dfa56403e1011445854560b3e9818180b3c8c2 lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -711,7 +711,7 @@
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
- url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&webapp=install_manager' % \
+ url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
( tool_shed_url, repository_name, changeset_revision )
response = urllib2.urlopen( url )
text = response.read()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Enable circster to accept a dataset to add to visualization. Also, make visualization creation more flexible and some small refactoring for JS visualization objects.
by Bitbucket 05 Oct '12
by Bitbucket 05 Oct '12
05 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/05fc04a70a3b/
changeset: 05fc04a70a3b
user: jgoecks
date: 2012-10-05 22:01:55
summary: Enable circster to accept a dataset to add to visualization. Also, make visualization creation more flexible and some small refactoring for JS visualization objects.
affected #: 6 files
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -335,16 +335,18 @@
viz_types = [ "trackster", "circster" ]
- def create_visualization( self, trans, title, slug, type, dbkey, annotation=None, config={} ):
+ def create_visualization( self, trans, type, title="Untitled Genome Vis", slug=None, dbkey=None, annotation=None, config={}, save=True ):
""" Create visualiation and first revision. """
- visualization = self._create_visualization( trans, title, type, dbkey, slug, annotation )
+ visualization = self._create_visualization( trans, title, type, dbkey, slug, annotation, save )
# Create and save first visualization revision
revision = trans.model.VisualizationRevision( visualization=visualization, title=title, config=config, dbkey=dbkey )
visualization.latest_revision = revision
- session = trans.sa_session
- session.add( revision )
- session.flush()
+
+ if save:
+ session = trans.sa_session
+ session.add( revision )
+ session.flush()
return visualization
@@ -440,7 +442,7 @@
""" Returns a visualization's configuration. Only works for trackster visualizations right now. """
config = None
- if visualization.type == 'trackster':
+ if visualization.type in [ 'trackster', 'genome' ]:
# Unpack Trackster config.
latest_revision = visualization.latest_revision
bookmarks = latest_revision.config.get( 'bookmarks', [] )
@@ -566,7 +568,7 @@
# -- Helper functions --
- def _create_visualization( self, trans, title, type, dbkey, slug=None, annotation=None ):
+ def _create_visualization( self, trans, title, type, dbkey=None, slug=None, annotation=None, save=True ):
""" Create visualization but not first revision. Returns Visualization object. """
user = trans.get_user()
@@ -593,9 +595,10 @@
annotation = sanitize_html( annotation, 'utf-8', 'text/html' )
self.add_item_annotation( trans.sa_session, trans.user, visualization, annotation )
- session = trans.sa_session
- session.add( visualization )
- session.flush()
+ if save:
+ session = trans.sa_session
+ session.add( visualization )
+ session.flush()
return visualization
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -741,20 +741,36 @@
return self.save_visualization( trans, config, type, id, title, dbkey, annotation )
@web.expose
- def circster( self, trans, id, **kwargs ):
+ def circster( self, trans, id=None, hda_ldda=None, dataset_id=None, dbkey=None ):
"""
Display a circster visualization.
"""
- vis = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+
+ # Get/create vis.
+ if id:
+ # Display existing viz.
+ vis = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+ else:
+ # Create new viz.
+ vis = self.create_visualization( trans, type="genome", dbkey=dbkey, save=False )
+
+ # Get the vis config and work with it from here on out. Working with the
+ # config is only possible because the config structure of trackster/genome
+ # visualizations is well known.
viz_config = self.get_visualization_config( trans, vis )
+ # Add dataset if specified.
+ if dataset_id:
+ dataset = self.get_hda_or_ldda( trans, hda_ldda, dataset_id )
+ viz_config[ 'tracks' ].append( self.get_new_track_config( trans, dataset ) )
+
# Get genome info.
dbkey = viz_config[ 'dbkey' ]
chroms_info = self.app.genomes.chroms( trans, dbkey=dbkey )
genome = { 'dbkey': dbkey, 'chroms_info': chroms_info }
# Add genome-wide summary tree data to each track in viz.
- tracks = viz_config[ 'tracks' ]
+ tracks = viz_config.get( 'tracks', [] )
for track in tracks:
# Get dataset and indexed datatype.
dataset = self.get_hda_or_ldda( trans, track[ 'hda_ldda'], track[ 'dataset_id' ] )
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 static/scripts/viz/visualization.js
--- a/static/scripts/viz/visualization.js
+++ b/static/scripts/viz/visualization.js
@@ -351,7 +351,6 @@
if (!extra_params) { extra_params = {}; }
// Use additional parameters to get more detailed data.
- var mode;
if (cur_data.dataset_type === 'bigwig') {
extra_params.num_samples = cur_data.data.length * detail_multiplier;
}
@@ -642,21 +641,10 @@
*/
var Visualization = Backbone.RelationalModel.extend({
defaults: {
- id: '',
title: '',
- type: '',
- dbkey: '',
- tracks: null
+ type: ''
},
- relations: [
- {
- type: Backbone.HasMany,
- key: 'tracks',
- relatedModel: BackboneTrack
- }
- ],
-
// Use function because visualization_url changes depending on viz.
// FIXME: all visualizations should save to the same URL (and hence
// this function won't be needed).
@@ -682,13 +670,27 @@
});
/**
- * A Genome space visualization.
+ * A visualization of genome data.
*/
var GenomeVisualization = Visualization.extend({
defaults: _.extend({}, Visualization.prototype.defaults, {
+ dbkey: '',
+ tracks: null,
bookmarks: null,
viewport: null
- })
+ }),
+
+ relations: [
+ {
+ type: Backbone.HasMany,
+ key: 'tracks',
+ relatedModel: BackboneTrack
+ }
+ ],
+
+ add_track: function(track) {
+ this.get('tracks').push(track);
+ }
});
/**
@@ -727,6 +729,7 @@
});
return {
+ BackboneTrack: BackboneTrack,
BrowserBookmark: BrowserBookmark,
BrowserBookmarkCollection: BrowserBookmarkCollection,
Cache: Cache,
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -236,7 +236,10 @@
}
return action;
},
- params = {dataset_id: dataset_id};
+ params = {
+ dataset_id: dataset_id,
+ hda_ldda: 'hda'
+ };
// Add dbkey to params if it exists.
if (dbkey) { params['dbkey'] = dbkey; }
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -226,11 +226,7 @@
## Visualization icon + visualizations. Using anchor attributes is a HACK to encode needed
## information--URL base, dataset id, dbkey, visualizations--in anchor.
<%
- visualizations = data.get_visualizations()
- ## HACK: if there are visualizations, only provide a subset for now
- ## since others are not ready. - comment out to see all WIP visualizations
- if visualizations:
- visualizations = [ vis for vis in visualizations if vis in [ 'trackster', 'phyloviz', 'scatterplot' ] ]
+ visualizations = data.get_visualizations()
%>
%if visualizations:
<a href="${h.url_for( controller='visualization' )}"
diff -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd -r 05fc04a70a3bcbfaeedfcf6f2ec16a7e38fc7c94 templates/visualization/circster.mako
--- a/templates/visualization/circster.mako
+++ b/templates/visualization/circster.mako
@@ -33,7 +33,7 @@
}
});
- require( [ "libs/d3", "viz/visualization", "viz/circster" ], function( d3, visualization, circster ) {
+ require( [ "viz/visualization", "viz/circster" ], function(visualization, circster ) {
$(function() {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b5bda7a5c345/
changeset: b5bda7a5c345
user: natefoo
date: 2012-10-05 21:49:26
summary: Fix the Compute tool to only allow for execution of a limited set of expressions.
affected #: 1 file
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r b5bda7a5c34535ada63722941f2a2d62524b1faa tools/stats/column_maker.py
--- a/tools/stats/column_maker.py
+++ b/tools/stats/column_maker.py
@@ -46,6 +46,12 @@
for key, value in mapped_str.items():
expr = expr.replace( key, value )
+builtin_and_math_functions = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'
+string_and_list_methods = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
+whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,: ]|%s|%s)*$" % (builtin_and_math_functions, '|'.join(string_and_list_methods))
+if not re.compile(whitelist).match(expr):
+ stop_err("Invalid expression")
+
# Prepare the column variable names and wrappers for column data types
cols, type_casts = [], []
for col in range( 1, in_columns + 1 ):
https://bitbucket.org/galaxy/galaxy-central/changeset/3b63335f5b1b/
changeset: 3b63335f5b1b
user: natefoo
date: 2012-10-05 21:52:50
summary: Merge from galaxy-dist
affected #: 1 file
diff -r cf93e8aa50f96e4d43d39e817d9754dc41a5886e -r 3b63335f5b1b33a2d6f74c2dfd38b85d97e9eebd tools/stats/column_maker.py
--- a/tools/stats/column_maker.py
+++ b/tools/stats/column_maker.py
@@ -46,6 +46,12 @@
for key, value in mapped_str.items():
expr = expr.replace( key, value )
+builtin_and_math_functions = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'
+string_and_list_methods = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
+whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,: ]|%s|%s)*$" % (builtin_and_math_functions, '|'.join(string_and_list_methods))
+if not re.compile(whitelist).match(expr):
+ stop_err("Invalid expression")
+
# Prepare the column variable names and wrappers for column data types
cols, type_casts = [], []
for col in range( 1, in_columns + 1 ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: When finding applicable visualizations, check to ensure that there are column_types before trying to iterate over them.
by Bitbucket 05 Oct '12
by Bitbucket 05 Oct '12
05 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/cf93e8aa50f9/
changeset: cf93e8aa50f9
user: jgoecks
date: 2012-10-05 15:43:04
summary: When finding applicable visualizations, check to ensure that there are column_types before trying to iterate over them.
affected #: 1 file
diff -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa -r cf93e8aa50f96e4d43d39e817d9754dc41a5886e lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -309,9 +309,10 @@
# Can visualize tabular data as scatterplot if there are 2+ numerical
# columns.
num_numerical_cols = 0
- for col_type in dataset.metadata.column_types:
- if col_type in [ 'int', 'float' ]:
- num_numerical_cols += 1
+ if dataset.metadata.column_types:
+ for col_type in dataset.metadata.column_types:
+ if col_type in [ 'int', 'float' ]:
+ num_numerical_cols += 1
vizs = super( Tabular, self ).get_visualizations( dataset )
if num_numerical_cols >= 2:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: smcmanus: Moved galaxy.web.controllers references to galaxy.webapps.galaxy.controllers
by Bitbucket 05 Oct '12
by Bitbucket 05 Oct '12
05 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/210c39f4bf7f/
changeset: 210c39f4bf7f
user: smcmanus
date: 2012-10-05 03:43:15
summary: Moved galaxy.web.controllers references to galaxy.webapps.galaxy.controllers
affected #: 5 files
diff -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa templates/admin/requests/rename_datasets.mako
--- a/templates/admin/requests/rename_datasets.mako
+++ b/templates/admin/requests/rename_datasets.mako
@@ -1,7 +1,7 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
-<% from galaxy.web.controllers.requests_admin import build_rename_datasets_for_sample_select_field %>
+<% from galaxy.webapps.galaxy.controllers.requests_admin import build_rename_datasets_for_sample_select_field %><h3>Rename datasets for Sample "${sample.name}"</h3>
diff -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa templates/library/common/browse_library_opt.mako
--- a/templates/library/common/browse_library_opt.mako
+++ b/templates/library/common/browse_library_opt.mako
@@ -214,7 +214,7 @@
## children, which are always lddas ). We also need to make sure we're displaying the latest version of this
## library_dataset, so we display the attributes from the ldda.
- from galaxy.web.controllers.library_common import branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
current_version = ( ldda == library_dataset.library_dataset_dataset_association )
@@ -312,7 +312,7 @@
<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False, simple=False )"><%
- from galaxy.web.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, map_library_datasets_to_lddas, branch_deleted, datasets_for_lddas
+ from galaxy.webapps.galaxy.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, map_library_datasets_to_lddas, branch_deleted, datasets_for_lddas
# SM: DELETEME
from datetime import datetime, timedelta
@@ -472,7 +472,7 @@
<%def name="render_content(simple=False)"><%
from galaxy import util
- from galaxy.web.controllers.library_common import branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
from time import strftime
import logging
log = logging.getLogger( __name__ )
diff -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa templates/library/common/ldda_info.mako
--- a/templates/library/common/ldda_info.mako
+++ b/templates/library/common/ldda_info.mako
@@ -3,7 +3,7 @@
<%namespace file="/common/template_common.mako" import="render_template_fields" /><%
from galaxy import util
- from galaxy.web.controllers.library_common import branch_deleted, get_containing_library_from_library_dataset
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted, get_containing_library_from_library_dataset
from galaxy.web.framework.helpers import time_ago
if ldda == ldda.library_dataset.library_dataset_dataset_association:
diff -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa templates/library/common/library_dataset_search_results.mako
--- a/templates/library/common/library_dataset_search_results.mako
+++ b/templates/library/common/library_dataset_search_results.mako
@@ -66,8 +66,7 @@
<%def name="render_content()"><%
from galaxy import util
- from galaxy.web.controllers.library_common import branch_deleted
- #from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
from time import strftime
class RowCounter( object ):
diff -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 -r 210c39f4bf7f2d91c68df84dd9ccecb2ff9f93aa templates/webapps/reports/dataset_info.mako
--- a/templates/webapps/reports/dataset_info.mako
+++ b/templates/webapps/reports/dataset_info.mako
@@ -4,6 +4,7 @@
<%
from galaxy.web.framework.helpers import time_ago
from galaxy.web.controllers.library_common import get_containing_library_from_library_dataset
+ from galaxy.webapps.galaxy.controllers.library_common import get_containing_library_from_library_dataset
%>
%if message:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
05 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2565580bb7cf/
changeset: 2565580bb7cf
user: smcmanus
date: 2012-10-05 02:48:46
summary: Cleanup: take out traceback and debug
affected #: 3 files
diff -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -143,8 +143,6 @@
message=message,
status=status )
except Exception, e:
- import traceback
- log.debug( "traceback: %s" % traceback.format_exc() )
message = 'Error attempting to display contents of library (%s): %s.' % ( str( library.name ), str( e ) )
status = 'error'
default_action = params.get( 'default_action', None )
diff -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 lib/galaxy/webapps/galaxy/controllers/requests_admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
@@ -284,7 +284,6 @@
request_id = trans.security.encode_id( sample.request.id )
library_id = trans.security.encode_id( sample.library.id )
self.datatx_grid.title = 'Manage "%s" datasets' % sample.name
- log.debug( "!!!!!!!!!!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@##################" )
self.datatx_grid.global_actions = [ grids.GridAction( "Browse target data library",
dict( controller='library_common',
action='browse_library',
diff -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb -r 2565580bb7cffd28c237ecafab1fbb46fe6185e9 templates/library/common/library_dataset_search_results.mako
--- a/templates/library/common/library_dataset_search_results.mako
+++ b/templates/library/common/library_dataset_search_results.mako
@@ -67,6 +67,7 @@
<%
from galaxy import util
from galaxy.web.controllers.library_common import branch_deleted
+ #from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
from time import strftime
class RowCounter( object ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
05 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/029818b7e546/
changeset: 029818b7e546
user: smcmanus
date: 2012-10-05 02:43:55
summary: The browse_library.mako template referred to galaxy.web.controllers.library_common, which has been updated to galaxy.webapps.galaxy.controllers.library_common. The Data Library browsing should be fixed.
affected #: 3 files
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -131,7 +131,7 @@
comptypes = get_comptypes( trans )
try:
# SM: TODO: Add configuration variable asap.
- return trans.fill_template( '/library/common/browse_library.mako',
+ return trans.fill_template( 'library/common/browse_library.mako',
cntrller=cntrller,
use_panels=use_panels,
library=library,
@@ -143,6 +143,8 @@
message=message,
status=status )
except Exception, e:
+ import traceback
+ log.debug( "traceback: %s" % traceback.format_exc() )
message = 'Error attempting to display contents of library (%s): %s.' % ( str( library.name ), str( e ) )
status = 'error'
default_action = params.get( 'default_action', None )
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb lib/galaxy/webapps/galaxy/controllers/requests_admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/requests_admin.py
@@ -284,6 +284,7 @@
request_id = trans.security.encode_id( sample.request.id )
library_id = trans.security.encode_id( sample.library.id )
self.datatx_grid.title = 'Manage "%s" datasets' % sample.name
+ log.debug( "!!!!!!!!!!!!!!!!!!!!!!@@@@@@@@@@@@@@@@@@@@@@@##################" )
self.datatx_grid.global_actions = [ grids.GridAction( "Browse target data library",
dict( controller='library_common',
action='browse_library',
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 029818b7e546a3b6835c49ef4f8694c5ac695dfb templates/library/common/browse_library.mako
--- a/templates/library/common/browse_library.mako
+++ b/templates/library/common/browse_library.mako
@@ -213,7 +213,7 @@
## children, which are always lddas ). We also need to make sure we're displaying the latest version of this
## library_dataset, so we display the attributes from the ldda.
- from galaxy.web.controllers.library_common import branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
@@ -312,7 +312,7 @@
<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False, simple=False )"><%
- from galaxy.web.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, branch_deleted
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, folder, trans.user, current_user_roles, search_downward=False )
@@ -472,7 +472,7 @@
<%def name="render_content(simple=False)"><%
from galaxy import util
- from galaxy.web.controllers.library_common import branch_deleted
+ from galaxy.webapps.galaxy.controllers.library_common import branch_deleted
from time import strftime
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
04 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f3b183e756f9/
changeset: f3b183e756f9
user: jgoecks
date: 2012-10-04 18:35:16
summary: Pack scripts and add compiled template.
affected #: 3 files
diff -r 89c27f6b20f8f261a8e45e33abcc2ed9c1509a71 -r f3b183e756f9b209ef0904718ed547e04c74ab7a static/scripts/packed/templates/compiled/template-visualization-scatterplotControlForm.js
--- /dev/null
+++ b/static/scripts/packed/templates/compiled/template-visualization-scatterplotControlForm.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-visualization-scatterplotControlForm"]=b(function(g,n,f,m,l){f=f||g.helpers;var j="",d,i,h="function",k=this.escapeExpression,o=this;function e(t,s){var q="",r,p;q+='\n <option value="';p=f.index;if(p){r=p.call(t,{hash:{}})}else{r=t.index;r=typeof r===h?r():r}q+=k(r)+'">';p=f.name;if(p){r=p.call(t,{hash:{}})}else{r=t.name;r=typeof r===h?r():r}q+=k(r)+"</option>\n ";return q}function c(t,s){var q="",r,p;q+='\n <option value="';p=f.index;if(p){r=p.call(t,{hash:{}})}else{r=t.index;r=typeof r===h?r():r}q+=k(r)+'">';p=f.name;if(p){r=p.call(t,{hash:{}})}else{r=t.name;r=typeof r===h?r():r}q+=k(r)+"</option>\n ";return q}j+='\n<div id="loading-indicator" style="display: none;">\n <img class="loading-img" src="';i=f.loadingIndicatorImagePath;if(i){d=i.call(n,{hash:{}})}else{d=n.loadingIndicatorImagePath;d=typeof d===h?d():d}j+=k(d)+'" />\n <span class="loading-message"></span>\n</div>\n\n';j+='\n<div id="chart-settings">\n\n ';j+='\n <div id="x-column-input">\n <label for="">Data column for X: </label>\n <select name="x-column">\n ';d=n.availableColumns;d=f.each.call(n,d,{hash:{},inverse:o.noop,fn:o.program(1,e,l)});if(d||d===0){j+=d}j+='\n </select>\n </div>\n <div id="y-column-input">\n <label for="">Data column for Y: </label>\n <select name="y-column">\n ';d=n.availableColumns;d=f.each.call(n,d,{hash:{},inverse:o.noop,fn:o.program(3,c,l)});if(d||d===0){j+=d}j+='\n </select>\n </div>\n \n <input id="render-button" type="button" value="Draw" />\n <div class="clear"></div>\n</div>';return j})})();
\ No newline at end of file
diff -r 89c27f6b20f8f261a8e45e33abcc2ed9c1509a71 -r f3b183e756f9b209ef0904718ed547e04c74ab7a static/scripts/packed/viz/phyloviz.js
--- a/static/scripts/packed/viz/phyloviz.js
+++ b/static/scripts/packed/viz/phyloviz.js
@@ -1,1 +1,1 @@
-var UserMenuBase=Backbone.View.extend({className:"UserMenuBase",isAcceptableValue:function(e,c,a){var b=this,f=e.val(),g=e.attr("displayLabel")||e.attr("id").replace("phyloViz","");function d(h){return !isNaN(parseFloat(h))&&isFinite(h)}if(!d(f)){alert(g+" is not a number!");return false}if(f>a){alert(g+" is too large.");return false}else{if(f<c){alert(g+" is too small.");return false}}return true},hasIllegalJsonCharacters:function(a){if(a.val().search(/"|'|\\/)!==-1){alert("Named fields cannot contain these illegal characters: double quote(\"), single guote('), or back slash(\\). ");return true}return false}});function PhyloTreeLayout(){var j=this,e=d3.layout.hierarchy().sort(null).value(null),i=360,d="Linear",h=18,f=200,g=0,c=0.5,a=50;j.leafHeight=function(k){if(typeof k==="undefined"){return h}else{h=k;return j}};j.layoutMode=function(k){if(typeof k==="undefined"){return d}else{d=k;return j}};j.layoutAngle=function(k){if(typeof k==="undefined"){return i}if(isNaN(k)||k<0||k>360){return j}else{i=k;return j}};j.separation=function(k){if(typeof k==="undefined"){return f}else{f=k;return j}};j.links=function(k){return d3.layout.tree().links(k)};j.nodes=function(n,l){var m=e.call(j,n,l),k=[],p=0,o=0;m.forEach(function(q){var r=q.data;r.depth=q.depth;p=r.depth>p?r.depth:p;k.push(r)});k.forEach(function(q){if(!q.children){o+=1;q.depth=p}});h=d==="Circular"?i/o:h;g=0;b(k[0],p,h,null);return k};function b(o,q,n,m){var l=o.children,k=0;var p=o.dist||c;p=p>1?1:p;o.dist=p;if(m!==null){o.y0=m.y0+p*f}else{o.y0=a}if(!l){o.x0=g++*n}else{l.forEach(function(r){r.parent=o;k+=b(r,q,n,o)});o.x0=k/l.length}o.x=o.x0;o.y=o.y0;return o.x0}return j}var PhyloTree=Visualization.extend({defaults:{layout:"Linear",separation:250,leafHeight:18,type:"phyloviz",title:"Title",scaleFactor:1,translate:[0,0],fontSize:12,selectedNode:null,nodeAttrChangedTime:0},root:{},toggle:function(a){if(typeof a==="undefined"){return}if(a.children){a._children=a.children;a.children=null}else{a.children=a._children;a._children=null}},toggleAll:function(a){if(a.children&&a.children.length!==0){a.children.forEach(this.toggleAll);toggle(a)}},getData:function(){return this.root},save:function(){var a=this.root;b(a);this.set("root",a);function b(d){delete d.parent;if(d._selected){delete d._selected}d.children?d.children.forEach(b):0;d._children?d._children.forEach(b):0}var c=jQuery.extend(true,{},this.attributes);c.selectedNode=null;show_message("Saving to Galaxy","progress");return $.ajax({url:this.url(),type:"POST",dataType:"json",data:{vis_json:JSON.stringify(c)},success:function(d){var e=d.url.split("id=")[1].split("&")[0],f="/phyloviz/visualization?id="+e;window.history.pushState({},"",f+window.location.hash);hide_modal()}})}});var PhylovizLayoutBase=Backbone.View.extend({defaults:{nodeRadius:4.5},stdInit:function(b){var a=this;a.model.on("change:separation change:leafHeight change:fontSize change:nodeAttrChangedTime",a.updateAndRender,a);a.vis=b.vis;a.i=0;a.maxDepth=-1;a.width=b.width;a.height=b.height},updateAndRender:function(c){var b=d3.select(".vis"),a=this;c=c||a.model.root;a.renderNodes(c);a.renderLinks(c);a.addTooltips()},renderLinks:function(a){var j=this;var b=j.diagonal;var c=j.duration;var e=j.layoutMode;var g=j.vis.selectAll("g.completeLink").data(j.tree.links(j.nodes),function(k){return k.target.id});var i=function(k){k.pos0=k.source.y0+" "+k.source.x0;k.pos1=k.source.y0+" "+k.target.x0;k.pos2=k.target.y0+" "+k.target.x0};var h=g.enter().insert("svg:g","g.node").attr("class","completeLink");h.append("svg:path").attr("class","link").attr("d",function(k){i(k);return"M "+k.pos0+" L "+k.pos1});var f=g.transition().duration(500);f.select("path.link").attr("d",function(k){i(k);return"M "+k.pos0+" L "+k.pos1+" L "+k.pos2});var d=g.exit().remove()},selectNode:function(b){var a=this;d3.selectAll("g.node").classed("selectedHighlight",function(c){if(b.id===c.id){if(b._selected){delete b._selected;return false}else{b._selected=true;return true}}return false});a.model.set("selectedNode",b);$("#phyloVizSelectedNodeName").val(b.name);$("#phyloVizSelectedNodeDist").val(b.dist);$("#phyloVizSelectedNodeAnnotation").val(b.annotation||"")},addTooltips:function(){$(".bs-tooltip").remove();$(".node").attr("data-original-title",function(){var b=this.__data__,a=b.annotation||"None";return b?(b.name?b.name+"<br/>":"")+"Dist: "+b.dist+" <br/>Annotation: "+a:""}).tooltip({placement:"top",trigger:"hover"})}});var PhylovizLinearView=PhylovizLayoutBase.extend({initialize:function(b){var a=this;a.margins=b.margins;a.layoutMode="Linear";a.stdInit(b);a.layout();a.updateAndRender(a.model.root)},layout:function(){var a=this;a.tree=new PhyloTreeLayout().layoutMode("Linear");a.diagonal=d3.svg.diagonal().projection(function(b){return[b.y,b.x]})},renderNodes:function(a){var h=this,i=h.model.get("fontSize")+"px";h.tree.separation(h.model.get("separation")).leafHeight(h.model.get("leafHeight"));var d=500,b=h.tree.separation(h.model.get("separation")).nodes(h.model.root);var c=h.vis.selectAll("g.node").data(b,function(j){return j.name+j.id||(j.id=++h.i)});h.nodes=b;h.duration=d;var e=c.enter().append("svg:g").attr("class","node").on("dblclick",function(){d3.event.stopPropagation()}).on("click",function(j){if(d3.event.altKey){h.selectNode(j)}else{if(j.children&&j.children.length===0){return}h.model.toggle(j);h.updateAndRender(j)}});e.attr("transform",function(j){return"translate("+a.y0+","+a.x0+")"});e.append("svg:circle").attr("r",0.000001).style("fill",function(j){return j._children?"lightsteelblue":"#fff"});e.append("svg:text").attr("class","nodeLabel").attr("x",function(j){return j.children||j._children?-10:10}).attr("dy",".35em").attr("text-anchor",function(j){return j.children||j._children?"end":"start"}).style("fill-opacity",0.000001);var f=c.transition().duration(d);f.attr("transform",function(j){return"translate("+j.y+","+j.x+")"});f.select("circle").attr("r",h.defaults.nodeRadius).style("fill",function(j){return j._children?"lightsteelblue":"#fff"});f.select("text").style("fill-opacity",1).style("font-size",i).text(function(j){return j.name});var g=c.exit().transition().duration(d).remove();g.select("circle").attr("r",0.000001);g.select("text").style("fill-opacity",0.000001);b.forEach(function(j){j.x0=j.x;j.y0=j.y})}});var PhylovizView=Backbone.View.extend({className:"phyloviz",initialize:function(b){var a=this;a.MIN_SCALE=0.05;a.MAX_SCALE=5;a.MAX_DISPLACEMENT=500;a.margins=[10,60,10,80];a.width=$("#PhyloViz").width();a.height=$("#PhyloViz").height();a.radius=a.width;a.data=b.data;$(window).resize(function(){a.width=$("#PhyloViz").width();a.height=$("#PhyloViz").height();a.render()});a.phyloTree=new PhyloTree(b.config);a.phyloTree.root=a.data;a.zoomFunc=d3.behavior.zoom().scaleExtent([a.MIN_SCALE,a.MAX_SCALE]);a.zoomFunc.translate(a.phyloTree.get("translate"));a.zoomFunc.scale(a.phyloTree.get("scaleFactor"));a.navMenu=new HeaderButtons(a);a.settingsMenu=new SettingsMenu({phyloTree:a.phyloTree});a.nodeSelectionView=new NodeSelectionView({phyloTree:a.phyloTree});a.search=new PhyloVizSearch();setTimeout(function(){a.zoomAndPan()},1000)},render:function(){var b=this;$("#PhyloViz").empty();b.mainSVG=d3.select("#PhyloViz").append("svg:svg").attr("width",b.width).attr("height",b.height).attr("pointer-events","all").call(b.zoomFunc.on("zoom",function(){b.zoomAndPan()}));b.boundingRect=b.mainSVG.append("svg:rect").attr("class","boundingRect").attr("width",b.width).attr("height",b.height).attr("stroke","black").attr("fill","white");b.vis=b.mainSVG.append("svg:g").attr("class","vis");b.layoutOptions={model:b.phyloTree,width:b.width,height:b.height,vis:b.vis,margins:b.margins};$("#title").text("Phylogenetic Tree from "+b.phyloTree.get("title")+":");var a=new PhylovizLinearView(b.layoutOptions)},zoomAndPan:function(a){if(typeof a!=="undefined"){var g=a.zoom,c=a.translate}var j=this,e=j.zoomFunc.scale(),i=j.zoomFunc.translate(),f="",h="";switch(g){case"reset":e=1;i=[0,0];break;case"+":e*=1.1;break;case"-":e*=0.9;break;default:if(typeof g==="number"){e=g}else{if(d3.event!==null){e=d3.event.scale}}}if(e<j.MIN_SCALE||e>j.MAX_SCALE){return}j.zoomFunc.scale(e);f="translate("+j.margins[3]+","+j.margins[0]+") scale("+e+")";if(d3.event!==null){h="translate("+d3.event.translate+")"}else{if(typeof c!=="undefined"){var d=c.split(",")[0];var b=c.split(",")[1];if(!isNaN(d)&&!isNaN(b)){i=[i[0]+parseFloat(d),i[1]+parseFloat(b)]}}j.zoomFunc.translate(i);h="translate("+i+")"}j.phyloTree.set("scaleFactor",e);j.phyloTree.set("translate",i);j.vis.attr("transform",h+f)},reloadViz:function(){var b=this,d=$("#phylovizNexSelector :selected").val(),a=b.phyloTree.get("dataset_id"),c="phyloviz/getJsonData?dataset_id="+a+"&treeIndex="+String(d);$.getJSON(c,function(e){window.initPhyloViz(e.data,e.config)})}});var HeaderButtons=Backbone.View.extend({initialize:function(b){var a=this;a.phylovizView=b;$("#panelHeaderRightBtns").empty();$("#phyloVizNavBtns").empty();$("#phylovizNexSelector").off();a.initNavBtns();a.initRightHeaderBtns();$("#phylovizNexSelector").off().on("change",function(){a.phylovizView.reloadViz()})},initRightHeaderBtns:function(){var a=this;rightMenu=create_icon_buttons_menu([{icon_class:"gear",title:"PhyloViz Settings",on_click:function(){$("#SettingsMenu").show();a.settingsMenu.updateUI()}},{icon_class:"disk",title:"Save visualization",on_click:function(){var b=$("#phylovizNexSelector option:selected").text();if(b){a.phylovizView.phyloTree.set("title",b)}a.phylovizView.phyloTree.save()}},{icon_class:"chevron-expand",title:"Search / Edit Nodes",on_click:function(){$("#nodeSelectionView").show()}},{icon_class:"information",title:"Phyloviz Help",on_click:function(){window.open("http://wiki.g2.bx.psu.edu/Learn/Visualization/PhylogeneticTree")}}],{tooltip_config:{placement:"bottom"}});$("#panelHeaderRightBtns").append(rightMenu.$el)},initNavBtns:function(){var a=this,b=create_icon_buttons_menu([{icon_class:"zoom-in",title:"Zoom in",on_click:function(){a.phylovizView.zoomAndPan({zoom:"+"})}},{icon_class:"zoom-out",title:"Zoom out",on_click:function(){a.phylovizView.zoomAndPan({zoom:"-"})}},{icon_class:"arrow-circle",title:"Reset Zoom/Pan",on_click:function(){a.phylovizView.zoomAndPan({zoom:"reset"})}}],{tooltip_config:{placement:"bottom"}});$("#phyloVizNavBtns").append(b.$el)}});var SettingsMenu=UserMenuBase.extend({className:"Settings",initialize:function(b){var a=this;a.phyloTree=b.phyloTree;a.el=$("#SettingsMenu");a.inputs={separation:$("#phyloVizTreeSeparation"),leafHeight:$("#phyloVizTreeLeafHeight"),fontSize:$("#phyloVizTreeFontSize")};$("#settingsCloseBtn").off().on("click",function(){a.el.hide()});$("#phylovizResetSettingsBtn").off().on("click",function(){a.resetToDefaults()});$("#phylovizApplySettingsBtn").off().on("click",function(){a.apply()})},apply:function(){var a=this;if(!a.isAcceptableValue(a.inputs.separation,50,2500)||!a.isAcceptableValue(a.inputs.leafHeight,5,30)||!a.isAcceptableValue(a.inputs.fontSize,5,20)){return}$.each(a.inputs,function(b,c){a.phyloTree.set(b,c.val())})},updateUI:function(){var a=this;$.each(a.inputs,function(b,c){c.val(a.phyloTree.get(b))})},resetToDefaults:function(){$(".bs-tooltip").remove();var a=this;$.each(a.phyloTree.defaults,function(b,c){a.phyloTree.set(b,c)});a.updateUI()},render:function(){}});var NodeSelectionView=UserMenuBase.extend({className:"Settings",initialize:function(b){var a=this;a.el=$("#nodeSelectionView");a.phyloTree=b.phyloTree;a.UI={enableEdit:$("#phylovizEditNodesCheck"),saveChanges:$("#phylovizNodeSaveChanges"),cancelChanges:$("#phylovizNodeCancelChanges"),name:$("#phyloVizSelectedNodeName"),dist:$("#phyloVizSelectedNodeDist"),annotation:$("#phyloVizSelectedNodeAnnotation")};a.valuesOfConcern={name:null,dist:null,annotation:null};$("#nodeSelCloseBtn").off().on("click",function(){a.el.hide()});a.UI.saveChanges.off().on("click",function(){a.updateNodes()});a.UI.cancelChanges.off().on("click",function(){a.cancelChanges()});(function(c){c.fn.enable=function(d){return c(this).each(function(){if(d){c(this).removeAttr("disabled")}else{c(this).attr("disabled","disabled")}})}})(jQuery);a.UI.enableEdit.off().on("click",function(){a.toggleUI()})},toggleUI:function(){var a=this,b=a.UI.enableEdit.is(":checked");!b?a.cancelChanges():"";$.each(a.valuesOfConcern,function(c,d){a.UI[c].enable(b)});if(b){a.UI.saveChanges.show();a.UI.cancelChanges.show()}else{a.UI.saveChanges.hide();a.UI.cancelChanges.hide()}},cancelChanges:function(){var a=this,b=a.phyloTree.get("selectedNode");if(b){$.each(a.valuesOfConcern,function(c,d){a.UI[c].val(b[c])})}},updateNodes:function(){var a=this,b=a.phyloTree.get("selectedNode");if(b){if(!a.isAcceptableValue(a.UI.dist,0,1)||a.hasIllegalJsonCharacters(a.UI.name)||a.hasIllegalJsonCharacters(a.UI.annotation)){return}$.each(a.valuesOfConcern,function(c,d){(b[c])=a.UI[c].val()});a.phyloTree.set("nodeAttrChangedTime",new Date())}else{alert("No node selected")}}});var PhyloVizSearch=UserMenuBase.extend({initialize:function(){var a=this;$("#phyloVizSearchBtn").on("click",function(){var c=$("#phyloVizSearchTerm"),d=$("#phyloVizSearchCondition").val().split("-"),b=d[0],e=d[1];a.hasIllegalJsonCharacters(c);if(b==="dist"){a.isAcceptableValue(c,0,1)}a.searchTree(b,e,c.val())})},searchTree:function(a,c,b){d3.selectAll("g.node").classed("searchHighlight",function(f){var e=f[a];if(typeof e!=="undefined"&&e!==null){if(a==="dist"){switch(c){case"greaterEqual":return e>=+b;case"lesserEqual":return e<=+b;default:return}}else{if(a==="name"||a==="annotation"){return e.toLowerCase().indexOf(b.toLowerCase())!==-1}}}})}});
\ No newline at end of file
+define(["libs/d3","viz/visualization"],function(l,f){var k=Backbone.View.extend({className:"UserMenuBase",isAcceptableValue:function(q,o,m){var n=this,r=q.val(),s=q.attr("displayLabel")||q.attr("id").replace("phyloViz","");function p(t){return !isNaN(parseFloat(t))&&isFinite(t)}if(!p(r)){alert(s+" is not a number!");return false}if(r>m){alert(s+" is too large.");return false}else{if(r<o){alert(s+" is too small.");return false}}return true},hasIllegalJsonCharacters:function(m){if(m.val().search(/"|'|\\/)!==-1){alert("Named fields cannot contain these illegal characters: double quote(\"), single guote('), or back slash(\\). ");return true}return false}});function g(){var v=this,q=l.layout.hierarchy().sort(null).value(null),u=360,p="Linear",t=18,r=200,s=0,o=0.5,m=50;v.leafHeight=function(w){if(typeof w==="undefined"){return t}else{t=w;return v}};v.layoutMode=function(w){if(typeof w==="undefined"){return p}else{p=w;return v}};v.layoutAngle=function(w){if(typeof w==="undefined"){return u}if(isNaN(w)||w<0||w>360){return v}else{u=w;return v}};v.separation=function(w){if(typeof w==="undefined"){return r}else{r=w;return v}};v.links=function(w){return l.layout.tree().links(w)};v.nodes=function(z,x){var y=q.call(v,z,x),w=[],B=0,A=0;y.forEach(function(C){var D=C.data;D.depth=C.depth;B=D.depth>B?D.depth:B;w.push(D)});w.forEach(function(C){if(!C.children){A+=1;C.depth=B}});t=p==="Circular"?u/A:t;s=0;n(w[0],B,t,null);return w};function n(A,C,z,y){var x=A.children,w=0;var B=A.dist||o;B=B>1?1:B;A.dist=B;if(y!==null){A.y0=y.y0+B*r}else{A.y0=m}if(!x){A.x0=s++*z}else{x.forEach(function(D){D.parent=A;w+=n(D,C,z,A)});A.x0=w/x.length}A.x=A.x0;A.y=A.y0;return A.x0}return v}var b=f.Visualization.extend({defaults:{layout:"Linear",separation:250,leafHeight:18,type:"phyloviz",title:"Title",scaleFactor:1,translate:[0,0],fontSize:12,selectedNode:null,nodeAttrChangedTime:0},root:{},toggle:function(m){if(typeof m==="undefined"){return}if(m.children){m._children=m.children;m.children=null}else{m.children=m._children;m._children=null}},toggleAll:function(m){if(m.children&&m.children.length!==0){m.children.forEach(this.toggleAll);toggle(m)}},getData:function(){return this.root},save:function(){var m=this.root;n(m);this.set("root",m);function n(p){delete p.parent;if(p._selected){delete p._selected}if(p.children){p.children.forEach(n)}if(p._children){p._children.forEach(n)}}var o=jQuery.extend(true,{},this.attributes);o.selectedNode=null;show_message("Saving to Galaxy","progress");return $.ajax({url:this.url(),type:"POST",dataType:"json",data:{vis_json:JSON.stringify(o)},success:function(p){var q=p.url.split("id=")[1].split("&")[0],r="/phyloviz/visualization?id="+q;window.history.pushState({},"",r+window.location.hash);hide_modal()}})}});var d=Backbone.View.extend({defaults:{nodeRadius:4.5},stdInit:function(n){var m=this;m.model.on("change:separation change:leafHeight change:fontSize change:nodeAttrChangedTime",m.updateAndRender,m);m.vis=n.vis;m.i=0;m.maxDepth=-1;m.width=n.width;m.height=n.height},updateAndRender:function(o){var n=l.select(".vis"),m=this;o=o||m.model.root;m.renderNodes(o);m.renderLinks(o);m.addTooltips()},renderLinks:function(m){var v=this;var n=v.diagonal;var o=v.duration;var q=v.layoutMode;var s=v.vis.selectAll("g.completeLink").data(v.tree.links(v.nodes),function(w){return w.target.id});var u=function(w){w.pos0=w.source.y0+" "+w.source.x0;w.pos1=w.source.y0+" "+w.target.x0;w.pos2=w.target.y0+" "+w.target.x0};var t=s.enter().insert("svg:g","g.node").attr("class","completeLink");t.append("svg:path").attr("class","link").attr("d",function(w){u(w);return"M "+w.pos0+" L "+w.pos1});var r=s.transition().duration(500);r.select("path.link").attr("d",function(w){u(w);return"M "+w.pos0+" L "+w.pos1+" L "+w.pos2});var p=s.exit().remove()},selectNode:function(n){var m=this;l.selectAll("g.node").classed("selectedHighlight",function(o){if(n.id===o.id){if(n._selected){delete n._selected;return false}else{n._selected=true;return true}}return false});m.model.set("selectedNode",n);$("#phyloVizSelectedNodeName").val(n.name);$("#phyloVizSelectedNodeDist").val(n.dist);$("#phyloVizSelectedNodeAnnotation").val(n.annotation||"")},addTooltips:function(){$(".bs-tooltip").remove();$(".node").attr("data-original-title",function(){var n=this.__data__,m=n.annotation||"None";return n?(n.name?n.name+"<br/>":"")+"Dist: "+n.dist+" <br/>Annotation: "+m:""}).tooltip({placement:"top",trigger:"hover"})}});var a=d.extend({initialize:function(n){var m=this;m.margins=n.margins;m.layoutMode="Linear";m.stdInit(n);m.layout();m.updateAndRender(m.model.root)},layout:function(){var m=this;m.tree=new g().layoutMode("Linear");m.diagonal=l.svg.diagonal().projection(function(n){return[n.y,n.x]})},renderNodes:function(m){var t=this,u=t.model.get("fontSize")+"px";t.tree.separation(t.model.get("separation")).leafHeight(t.model.get("leafHeight"));var p=500,n=t.tree.separation(t.model.get("separation")).nodes(t.model.root);var o=t.vis.selectAll("g.node").data(n,function(v){return v.name+v.id||(v.id=++t.i)});t.nodes=n;t.duration=p;var q=o.enter().append("svg:g").attr("class","node").on("dblclick",function(){l.event.stopPropagation()}).on("click",function(v){if(l.event.altKey){t.selectNode(v)}else{if(v.children&&v.children.length===0){return}t.model.toggle(v);t.updateAndRender(v)}});q.attr("transform",function(v){return"translate("+m.y0+","+m.x0+")"});q.append("svg:circle").attr("r",0.000001).style("fill",function(v){return v._children?"lightsteelblue":"#fff"});q.append("svg:text").attr("class","nodeLabel").attr("x",function(v){return v.children||v._children?-10:10}).attr("dy",".35em").attr("text-anchor",function(v){return v.children||v._children?"end":"start"}).style("fill-opacity",0.000001);var r=o.transition().duration(p);r.attr("transform",function(v){return"translate("+v.y+","+v.x+")"});r.select("circle").attr("r",t.defaults.nodeRadius).style("fill",function(v){return v._children?"lightsteelblue":"#fff"});r.select("text").style("fill-opacity",1).style("font-size",u).text(function(v){return v.name});var s=o.exit().transition().duration(p).remove();s.select("circle").attr("r",0.000001);s.select("text").style("fill-opacity",0.000001);n.forEach(function(v){v.x0=v.x;v.y0=v.y})}});var i=Backbone.View.extend({className:"phyloviz",initialize:function(n){var m=this;m.MIN_SCALE=0.05;m.MAX_SCALE=5;m.MAX_DISPLACEMENT=500;m.margins=[10,60,10,80];m.width=$("#PhyloViz").width();m.height=$("#PhyloViz").height();m.radius=m.width;m.data=n.data;$(window).resize(function(){m.width=$("#PhyloViz").width();m.height=$("#PhyloViz").height();m.render()});m.phyloTree=new b(n.config);m.phyloTree.root=m.data;m.zoomFunc=l.behavior.zoom().scaleExtent([m.MIN_SCALE,m.MAX_SCALE]);m.zoomFunc.translate(m.phyloTree.get("translate"));m.zoomFunc.scale(m.phyloTree.get("scaleFactor"));m.navMenu=new c(m);m.settingsMenu=new h({phyloTree:m.phyloTree});m.nodeSelectionView=new e({phyloTree:m.phyloTree});m.search=new j();setTimeout(function(){m.zoomAndPan()},1000)},render:function(){var n=this;$("#PhyloViz").empty();n.mainSVG=l.select("#PhyloViz").append("svg:svg").attr("width",n.width).attr("height",n.height).attr("pointer-events","all").call(n.zoomFunc.on("zoom",function(){n.zoomAndPan()}));n.boundingRect=n.mainSVG.append("svg:rect").attr("class","boundingRect").attr("width",n.width).attr("height",n.height).attr("stroke","black").attr("fill","white");n.vis=n.mainSVG.append("svg:g").attr("class","vis");n.layoutOptions={model:n.phyloTree,width:n.width,height:n.height,vis:n.vis,margins:n.margins};$("#title").text("Phylogenetic Tree from "+n.phyloTree.get("title")+":");var m=new a(n.layoutOptions)},zoomAndPan:function(m){var s,o;if(typeof m!=="undefined"){s=m.zoom;o=m.translate}var v=this,q=v.zoomFunc.scale(),u=v.zoomFunc.translate(),r="",t="";switch(s){case"reset":q=1;u=[0,0];break;case"+":q*=1.1;break;case"-":q*=0.9;break;default:if(typeof s==="number"){q=s}else{if(l.event!==null){q=l.event.scale}}}if(q<v.MIN_SCALE||q>v.MAX_SCALE){return}v.zoomFunc.scale(q);r="translate("+v.margins[3]+","+v.margins[0]+") scale("+q+")";if(l.event!==null){t="translate("+l.event.translate+")"}else{if(typeof o!=="undefined"){var p=o.split(",")[0];var n=o.split(",")[1];if(!isNaN(p)&&!isNaN(n)){u=[u[0]+parseFloat(p),u[1]+parseFloat(n)]}}v.zoomFunc.translate(u);t="translate("+u+")"}v.phyloTree.set("scaleFactor",q);v.phyloTree.set("translate",u);v.vis.attr("transform",t+r)},reloadViz:function(){var n=this,p=$("#phylovizNexSelector :selected").val(),m=n.phyloTree.get("dataset_id"),o="phyloviz/getJsonData?dataset_id="+m+"&treeIndex="+String(p);$.getJSON(o,function(q){window.initPhyloViz(q.data,q.config)})}});var c=Backbone.View.extend({initialize:function(n){var m=this;m.phylovizView=n;$("#panelHeaderRightBtns").empty();$("#phyloVizNavBtns").empty();$("#phylovizNexSelector").off();m.initNavBtns();m.initRightHeaderBtns();$("#phylovizNexSelector").off().on("change",function(){m.phylovizView.reloadViz()})},initRightHeaderBtns:function(){var m=this;rightMenu=create_icon_buttons_menu([{icon_class:"gear",title:"PhyloViz Settings",on_click:function(){$("#SettingsMenu").show();m.settingsMenu.updateUI()}},{icon_class:"disk",title:"Save visualization",on_click:function(){var n=$("#phylovizNexSelector option:selected").text();if(n){m.phylovizView.phyloTree.set("title",n)}m.phylovizView.phyloTree.save()}},{icon_class:"chevron-expand",title:"Search / Edit Nodes",on_click:function(){$("#nodeSelectionView").show()}},{icon_class:"information",title:"Phyloviz Help",on_click:function(){window.open("http://wiki.g2.bx.psu.edu/Learn/Visualization/PhylogeneticTree")}}],{tooltip_config:{placement:"bottom"}});$("#panelHeaderRightBtns").append(rightMenu.$el)},initNavBtns:function(){var m=this,n=create_icon_buttons_menu([{icon_class:"zoom-in",title:"Zoom in",on_click:function(){m.phylovizView.zoomAndPan({zoom:"+"})}},{icon_class:"zoom-out",title:"Zoom out",on_click:function(){m.phylovizView.zoomAndPan({zoom:"-"})}},{icon_class:"arrow-circle",title:"Reset Zoom/Pan",on_click:function(){m.phylovizView.zoomAndPan({zoom:"reset"})}}],{tooltip_config:{placement:"bottom"}});$("#phyloVizNavBtns").append(n.$el)}});var h=k.extend({className:"Settings",initialize:function(n){var m=this;m.phyloTree=n.phyloTree;m.el=$("#SettingsMenu");m.inputs={separation:$("#phyloVizTreeSeparation"),leafHeight:$("#phyloVizTreeLeafHeight"),fontSize:$("#phyloVizTreeFontSize")};$("#settingsCloseBtn").off().on("click",function(){m.el.hide()});$("#phylovizResetSettingsBtn").off().on("click",function(){m.resetToDefaults()});$("#phylovizApplySettingsBtn").off().on("click",function(){m.apply()})},apply:function(){var m=this;if(!m.isAcceptableValue(m.inputs.separation,50,2500)||!m.isAcceptableValue(m.inputs.leafHeight,5,30)||!m.isAcceptableValue(m.inputs.fontSize,5,20)){return}$.each(m.inputs,function(n,o){m.phyloTree.set(n,o.val())})},updateUI:function(){var m=this;$.each(m.inputs,function(n,o){o.val(m.phyloTree.get(n))})},resetToDefaults:function(){$(".bs-tooltip").remove();var m=this;$.each(m.phyloTree.defaults,function(n,o){m.phyloTree.set(n,o)});m.updateUI()},render:function(){}});var e=k.extend({className:"Settings",initialize:function(n){var m=this;m.el=$("#nodeSelectionView");m.phyloTree=n.phyloTree;m.UI={enableEdit:$("#phylovizEditNodesCheck"),saveChanges:$("#phylovizNodeSaveChanges"),cancelChanges:$("#phylovizNodeCancelChanges"),name:$("#phyloVizSelectedNodeName"),dist:$("#phyloVizSelectedNodeDist"),annotation:$("#phyloVizSelectedNodeAnnotation")};m.valuesOfConcern={name:null,dist:null,annotation:null};$("#nodeSelCloseBtn").off().on("click",function(){m.el.hide()});m.UI.saveChanges.off().on("click",function(){m.updateNodes()});m.UI.cancelChanges.off().on("click",function(){m.cancelChanges()});(function(o){o.fn.enable=function(p){return o(this).each(function(){if(p){o(this).removeAttr("disabled")}else{o(this).attr("disabled","disabled")}})}})(jQuery);m.UI.enableEdit.off().on("click",function(){m.toggleUI()})},toggleUI:function(){var m=this,n=m.UI.enableEdit.is(":checked");if(!n){m.cancelChanges()}$.each(m.valuesOfConcern,function(o,p){m.UI[o].enable(n)});if(n){m.UI.saveChanges.show();m.UI.cancelChanges.show()}else{m.UI.saveChanges.hide();m.UI.cancelChanges.hide()}},cancelChanges:function(){var m=this,n=m.phyloTree.get("selectedNode");if(n){$.each(m.valuesOfConcern,function(o,p){m.UI[o].val(n[o])})}},updateNodes:function(){var m=this,n=m.phyloTree.get("selectedNode");if(n){if(!m.isAcceptableValue(m.UI.dist,0,1)||m.hasIllegalJsonCharacters(m.UI.name)||m.hasIllegalJsonCharacters(m.UI.annotation)){return}$.each(m.valuesOfConcern,function(o,p){(n[o])=m.UI[o].val()});m.phyloTree.set("nodeAttrChangedTime",new Date())}else{alert("No node selected")}}});var j=k.extend({initialize:function(){var m=this;$("#phyloVizSearchBtn").on("click",function(){var o=$("#phyloVizSearchTerm"),p=$("#phyloVizSearchCondition").val().split("-"),n=p[0],q=p[1];m.hasIllegalJsonCharacters(o);if(n==="dist"){m.isAcceptableValue(o,0,1)}m.searchTree(n,q,o.val())})},searchTree:function(m,o,n){l.selectAll("g.node").classed("searchHighlight",function(q){var p=q[m];if(typeof p!=="undefined"&&p!==null){if(m==="dist"){switch(o){case"greaterEqual":return p>=+n;case"lesserEqual":return p<=+n;default:return}}else{if(m==="name"||m==="annotation"){return p.toLowerCase().indexOf(n.toLowerCase())!==-1}}}})}});return{PhylovizView:i}});
\ No newline at end of file
diff -r 89c27f6b20f8f261a8e45e33abcc2ed9c1509a71 -r f3b183e756f9b209ef0904718ed547e04c74ab7a static/scripts/packed/viz/scatterplot.js
--- a/static/scripts/packed/viz/scatterplot.js
+++ b/static/scripts/packed/viz/scatterplot.js
@@ -1,1 +1,1 @@
-define(["../libs/underscore","../libs/d3","../mvc/base-mvc"],function(){function b(f){var i=this,d=10,h=7,g=10,e=8,c=5;this.log=function(){if(this.debugging&&console&&console.debug){var j=Array.prototype.slice.call(arguments);j.unshift(this.toString());console.debug.apply(null,j)}};this.log("new TwoVarScatterplot:",f);this.defaults={id:"TwoVarScatterplot",containerSelector:"body",maxDataPoints:30000,bubbleRadius:4,entryAnimDuration:500,xNumTicks:10,yNumTicks:10,xAxisLabelBumpY:40,yAxisLabelBumpX:-35,width:500,height:500,marginTop:50,marginRight:50,marginBottom:50,marginLeft:50,xMin:null,xMax:null,yMin:null,yMax:null,xLabel:"X",yLabel:"Y"};this.config=_.extend({},this.defaults,f);this.updateConfig=function(j){_.extend(this.config,j)};this.toString=function(){return this.config.id};this.translateStr=function(j,k){return"translate("+j+","+k+")"};this.rotateStr=function(k,j,l){return"rotate("+k+","+j+","+l+")"};this.svg=d3.select(this.config.containerSelector).append("svg:svg").attr("class","chart").style("display","none");this.content=this.svg.append("svg:g").attr("class","content");this.xAxis=this.content.append("g").attr("class","axis").attr("id","x-axis");this.xAxisLabel=this.xAxis.append("text").attr("class","axis-label").attr("id","x-axis-label");this.yAxis=this.content.append("g").attr("class","axis").attr("id","y-axis");this.yAxisLabel=this.yAxis.append("text").attr("class","axis-label").attr("id","y-axis-label");this.log("built svg:",d3.selectAll("svg"));this.adjustChartDimensions=function(m,k,j,l){m=m||0;k=k||0;j=j||0;l=l||0;this.svg.attr("width",this.config.width+(this.config.marginRight+k)+(this.config.marginLeft+l)).attr("height",this.config.height+(this.config.marginTop+m)+(this.config.marginBottom+j)).style("display","block");this.content=this.svg.select("g.content").attr("transform",this.translateStr(this.config.marginLeft+l,this.config.marginTop+m))};this.preprocessData=function(j){return(j.length>this.config.maxDataPoints)?(j.slice(0,this.config.maxDataPoints)):(j)};this.setUpDomains=function(j,l,k){this.log("setUpDomains");this.xMin=this.config.xMin||(k)?(k[0].min):(d3.min(j));this.xMax=this.config.xMax||(k)?(k[0].max):(d3.max(j));this.yMin=this.config.yMin||(k)?(k[1].min):(d3.min(l));this.yMax=this.config.yMax||(k)?(k[1].max):(d3.max(l))};this.setUpScales=function(){this.xScale=d3.scale.linear().domain([this.xMin,this.xMax]).range([0,this.config.width]),this.yScale=d3.scale.linear().domain([this.yMin,this.yMax]).range([this.config.height,0])};this.setUpXAxis=function(){this.xAxisFn=d3.svg.axis().scale(this.xScale).ticks(this.config.xNumTicks).orient("bottom");this.xAxis.attr("transform",this.translateStr(0,this.config.height)).call(this.xAxisFn);this.xLongestLabel=d3.max(_.map([this.xMin,this.xMax],function(j){return(String(j)).length}));if(this.xLongestLabel>=c){this.xAxis.selectAll("g").filter(":nth-child(odd)").style("display","none")}this.xAxisLabel.attr("x",this.config.width/2).attr("y",this.config.xAxisLabelBumpY).attr("text-anchor","middle").text(this.config.xLabel)};this.setUpYAxis=function(){this.yAxisFn=d3.svg.axis().scale(this.yScale).ticks(this.config.yNumTicks).orient("left");this.yAxis.call(this.yAxisFn);this.log("yAxis:",this.yAxis);var j=this.yAxis.selectAll("text").filter(function(n,m){return m!==0});this.yLongestLabel=d3.max(j[0].map(function(n,m){return(d3.select(n).text()).length}))||0;var k=d+(this.yLongestLabel*h)+e+g;this.config.yAxisLabelBumpX=-(k-g);if(this.config.marginLeft<k){var l=(k)-this.config.marginLeft;l=(l<0)?(0):(l);this.log("adjusting:",l);this.adjustChartDimensions(0,0,0,l)}this.yAxisLabel.attr("x",this.config.yAxisLabelBumpX).attr("y",this.config.height/2).attr("text-anchor","middle").attr("transform",this.rotateStr(-90,this.config.yAxisLabelBumpX,this.config.height/2)).text(this.config.yLabel)};this.renderGrid=function(){this.vGridLines=this.content.selectAll("line.v-grid-line").data(this.xScale.ticks(this.xAxisFn.ticks()[0]));this.vGridLines.enter().append("svg:line").classed("grid-line v-grid-line",true);this.vGridLines.attr("x1",this.xScale).attr("y1",0).attr("x2",this.xScale).attr("y2",this.config.height);this.vGridLines.exit().remove();this.hGridLines=this.content.selectAll("line.h-grid-line").data(this.yScale.ticks(this.yAxisFn.ticks()[0]));this.hGridLines.enter().append("svg:line").classed("grid-line h-grid-line",true);this.hGridLines.attr("x1",0).attr("y1",this.yScale).attr("x2",this.config.width).attr("y2",this.yScale);this.hGridLines.exit().remove()};this.glyphEnterState=function(j){};this.glyphFinalState=function(j){};this.glyphExitState=function(j){};this.renderDatapoints=function(j,m){var l=function(o,n){return i.xScale(j[n])};var k=function(o,n){return i.yScale(m[n])};this.datapoints=this.content.selectAll(".glyph").data(j);this.datapoints.enter().append("svg:circle").attr("class","glyph").attr("cx",l).attr("cy",0).attr("r",0);this.datapoints.transition().duration(this.config.entryAnimDuration).attr("cx",l).attr("cy",k).attr("r",this.config.bubbleRadius);this.datapoints.exit().transition().duration(this.config.entryAnimDuration).attr("cy",this.config.height).attr("r",0).style("fill-opacity",0).remove()};this.render=function(k,l){var j=k[0],m=k[1];this.log("renderScatterplot",j.length,m.length,this.config);j=this.preprocessData(j);m=this.preprocessData(m);this.setUpDomains(j,m,l);this.log("xMin, xMax, yMin, yMax:",this.xMin,this.xMax,this.yMin,this.yMax);this.setUpScales();this.adjustChartDimensions();this.setUpXAxis();this.setUpYAxis();this.renderGrid();this.renderDatapoints(j,m)}}var a=BaseView.extend(LoggableMixin).extend({tagName:"form",className:"scatterplot-settings-form",loadingIndicatorImagePath:(galaxy_paths.get("image_path")+"/loading_large_white_bg.gif"),events:{"click #render-button":"renderScatterplot"},initialize:function(c){if(!c||!c.dataset){throw ("ScatterplotView requires a dataset")}else{this.dataset=c.dataset}this.apiDatasetsURL=c.apiDatasetsURL;this.chartConfig=c.chartConfig||{};this.log("this.chartConfig:",this.chartConfig);this.plot=new b(this.chartConfig)},render:function(){var c=this,e="",d="";this.dataset.metadata_column_types=this.dataset.metadata_column_types.split(", ");_.each(this.dataset.metadata_column_types,function(h,g){if(h==="int"||h==="float"){var f="column "+g;if(c.dataset.metadata_column_names){f=c.dataset.metadata_column_names[g]}d+='<option value="'+g+'">'+f+"</option>"}});e+='<div id="loading-indicator" style="display: none;">';e+='<img class="loading-img" src='+this.loadingIndicatorImagePath+" />";e+='<span class="loading-message"></span>';e+="</div>";e+='<div id="x-column-input">';e+='<label for="">Data column for X: </label><select name="x-column">'+d+"</select>";e+="</div>";e+='<div id="y-column-input">';e+='<label for="">Data column for Y: </label><select name="y-column">'+d+"</select>";e+="</div>";e+='<input id="render-button" type="button" value="Draw" />';e+='<div class="clear"></div>';this.$el.append(e);this.$el.find("#render-button");return this},showLoadingIndicator:function(c){c=c||"";this.$el.find("div#loading-indicator").children(".loading-message").text(c);this.$el.find("div#loading-indicator").show("fast")},hideLoadingIndicator:function(){this.$el.find("div#loading-indicator").hide("fast")},renderScatterplot:function(){var d=this,e=this.apiDatasetsURL+"/"+this.dataset.id+"?data_type=raw_data&",i=this.$el.find('[name="x-column"]'),j=i.val(),g=i.children('[value="'+j+'"]').text(),h=this.$el.find('[name="y-column"]'),f=h.val(),c=h.children('[value="'+f+'"]').text();this.log(g,c);this.chartConfig.xLabel=g;this.chartConfig.yLabel=c;d.plot.updateConfig(this.chartConfig);e+=jQuery.param({columns:"["+[j,f]+"]"});this.log("url:",e);this.showLoadingIndicator("Fetching data...");jQuery.ajax({url:e,dataType:"json",success:function(k){d.showLoadingIndicator("Rendering...");d.endpoint=k.endpoint;d.plot.render(k.data,k.meta);d.hideLoadingIndicator()},error:function(m,k,l){d.hideLoadingIndicator();alert("ERROR:"+k+"\n"+l)}})}});return{ScatterplotView:a}});
\ No newline at end of file
+define(["../libs/underscore","../libs/d3","../mvc/base-mvc","../templates/compiled/template-visualization-scatterplotControlForm"],function(){function a(f){var i=this,d=10,h=7,g=10,e=8,c=5;this.log=function(){if(this.debugging&&console&&console.debug){var j=Array.prototype.slice.call(arguments);j.unshift(this.toString());console.debug.apply(null,j)}};this.log("new TwoVarScatterplot:",f);this.defaults={id:"TwoVarScatterplot",containerSelector:"body",maxDataPoints:30000,bubbleRadius:4,entryAnimDuration:500,xNumTicks:10,yNumTicks:10,xAxisLabelBumpY:40,yAxisLabelBumpX:-35,width:500,height:500,marginTop:50,marginRight:50,marginBottom:50,marginLeft:50,xMin:null,xMax:null,yMin:null,yMax:null,xLabel:"X",yLabel:"Y"};this.config=_.extend({},this.defaults,f);this.updateConfig=function(j){_.extend(this.config,j)};this.toString=function(){return this.config.id};this.translateStr=function(j,k){return"translate("+j+","+k+")"};this.rotateStr=function(k,j,l){return"rotate("+k+","+j+","+l+")"};this.svg=d3.select(this.config.containerSelector).append("svg:svg").attr("class","chart").style("display","none");this.content=this.svg.append("svg:g").attr("class","content");this.xAxis=this.content.append("g").attr("class","axis").attr("id","x-axis");this.xAxisLabel=this.xAxis.append("text").attr("class","axis-label").attr("id","x-axis-label");this.yAxis=this.content.append("g").attr("class","axis").attr("id","y-axis");this.yAxisLabel=this.yAxis.append("text").attr("class","axis-label").attr("id","y-axis-label");this.log("built svg:",d3.selectAll("svg"));this.adjustChartDimensions=function(m,k,j,l){m=m||0;k=k||0;j=j||0;l=l||0;this.svg.attr("width",this.config.width+(this.config.marginRight+k)+(this.config.marginLeft+l)).attr("height",this.config.height+(this.config.marginTop+m)+(this.config.marginBottom+j)).style("display","block");this.content=this.svg.select("g.content").attr("transform",this.translateStr(this.config.marginLeft+l,this.config.marginTop+m))};this.preprocessData=function(j){return(j.length>this.config.maxDataPoints)?(j.slice(0,this.config.maxDataPoints)):(j)};this.setUpDomains=function(j,l,k){this.log("setUpDomains");this.xMin=this.config.xMin||(k)?(k[0].min):(d3.min(j));this.xMax=this.config.xMax||(k)?(k[0].max):(d3.max(j));this.yMin=this.config.yMin||(k)?(k[1].min):(d3.min(l));this.yMax=this.config.yMax||(k)?(k[1].max):(d3.max(l))};this.setUpScales=function(){this.xScale=d3.scale.linear().domain([this.xMin,this.xMax]).range([0,this.config.width]),this.yScale=d3.scale.linear().domain([this.yMin,this.yMax]).range([this.config.height,0])};this.setUpXAxis=function(){this.xAxisFn=d3.svg.axis().scale(this.xScale).ticks(this.config.xNumTicks).orient("bottom");this.xAxis.attr("transform",this.translateStr(0,this.config.height)).call(this.xAxisFn);this.xLongestLabel=d3.max(_.map([this.xMin,this.xMax],function(j){return(String(j)).length}));if(this.xLongestLabel>=c){this.xAxis.selectAll("g").filter(":nth-child(odd)").style("display","none")}this.xAxisLabel.attr("x",this.config.width/2).attr("y",this.config.xAxisLabelBumpY).attr("text-anchor","middle").text(this.config.xLabel)};this.setUpYAxis=function(){this.yAxisFn=d3.svg.axis().scale(this.yScale).ticks(this.config.yNumTicks).orient("left");this.yAxis.call(this.yAxisFn);this.log("yAxis:",this.yAxis);var j=this.yAxis.selectAll("text").filter(function(n,m){return m!==0});this.yLongestLabel=d3.max(j[0].map(function(n,m){return(d3.select(n).text()).length}))||0;var k=d+(this.yLongestLabel*h)+e+g;this.config.yAxisLabelBumpX=-(k-g);if(this.config.marginLeft<k){var l=(k)-this.config.marginLeft;l=(l<0)?(0):(l);this.log("adjusting:",l);this.adjustChartDimensions(0,0,0,l)}this.yAxisLabel.attr("x",this.config.yAxisLabelBumpX).attr("y",this.config.height/2).attr("text-anchor","middle").attr("transform",this.rotateStr(-90,this.config.yAxisLabelBumpX,this.config.height/2)).text(this.config.yLabel)};this.renderGrid=function(){this.vGridLines=this.content.selectAll("line.v-grid-line").data(this.xScale.ticks(this.xAxisFn.ticks()[0]));this.vGridLines.enter().append("svg:line").classed("grid-line v-grid-line",true);this.vGridLines.attr("x1",this.xScale).attr("y1",0).attr("x2",this.xScale).attr("y2",this.config.height);this.vGridLines.exit().remove();this.hGridLines=this.content.selectAll("line.h-grid-line").data(this.yScale.ticks(this.yAxisFn.ticks()[0]));this.hGridLines.enter().append("svg:line").classed("grid-line h-grid-line",true);this.hGridLines.attr("x1",0).attr("y1",this.yScale).attr("x2",this.config.width).attr("y2",this.yScale);this.hGridLines.exit().remove()};this.glyphEnterState=function(j){};this.glyphFinalState=function(j){};this.glyphExitState=function(j){};this.renderDatapoints=function(j,m){var l=function(o,n){return i.xScale(j[n])};var k=function(o,n){return i.yScale(m[n])};this.datapoints=this.content.selectAll(".glyph").data(j);this.datapoints.enter().append("svg:circle").attr("class","glyph").attr("cx",l).attr("cy",0).attr("r",0);this.datapoints.transition().duration(this.config.entryAnimDuration).attr("cx",l).attr("cy",k).attr("r",this.config.bubbleRadius);this.datapoints.exit().transition().duration(this.config.entryAnimDuration).attr("cy",this.config.height).attr("r",0).style("fill-opacity",0).remove()};this.render=function(k,l){var j=k[0],m=k[1];this.log("renderScatterplot",j.length,m.length,this.config);j=this.preprocessData(j);m=this.preprocessData(m);this.setUpDomains(j,m,l);this.log("xMin, xMax, yMin, yMax:",this.xMin,this.xMax,this.yMin,this.yMax);this.setUpScales();this.adjustChartDimensions();this.setUpXAxis();this.setUpYAxis();this.renderGrid();this.renderDatapoints(j,m)}}var b=BaseView.extend(LoggableMixin).extend({tagName:"form",className:"scatterplot-settings-form",loadingIndicatorImagePath:(galaxy_paths.get("image_path")+"/loading_large_white_bg.gif"),events:{"click #render-button":"renderScatterplot"},initialize:function(c){if(!c||!c.dataset){throw ("ScatterplotView requires a dataset")}else{this.dataset=c.dataset}this.apiDatasetsURL=c.apiDatasetsURL;this.chartConfig=c.chartConfig||{};this.log("this.chartConfig:",this.chartConfig);this.plot=new a(this.chartConfig)},render:function(){var c=this,d="";var e={loadingIndicatorImagePath:this.loadingIndicatorImagePath,config:this.chartConfig,availableColumns:[]};_.each(this.dataset.metadata_column_types.split(", "),function(h,g){if(h==="int"||h==="float"){var f="column "+g;if(c.dataset.metadata_column_names){f=c.dataset.metadata_column_names[g]}e.availableColumns.push({index:g,name:f})}});d=b.templates.form(e);this.$el.append(d);return this},showLoadingIndicator:function(c){c=c||"";this.$el.find("div#loading-indicator").children(".loading-message").text(c);this.$el.find("div#loading-indicator").show("fast")},hideLoadingIndicator:function(){this.$el.find("div#loading-indicator").hide("fast")},renderScatterplot:function(){var d=this,e=this.apiDatasetsURL+"/"+this.dataset.id+"?data_type=raw_data&",i=this.$el.find('[name="x-column"]'),j=i.val(),g=i.children('[value="'+j+'"]').text(),h=this.$el.find('[name="y-column"]'),f=h.val(),c=h.children('[value="'+f+'"]').text();this.log(g,c);this.chartConfig.xLabel=g;this.chartConfig.yLabel=c;d.plot.updateConfig(this.chartConfig);e+=jQuery.param({columns:"["+[j,f]+"]"});this.log("url:",e);this.showLoadingIndicator("Fetching data...");jQuery.ajax({url:e,dataType:"json",success:function(k){d.endpoint=k.endpoint;d.showLoadingIndicator("Rendering...");d.plot.render(k.data,k.meta);d.hideLoadingIndicator()},error:function(m,k,l){d.hideLoadingIndicator();alert("ERROR:"+k+"\n"+l)}})}});b.templates=CompiledTemplateLoader.getTemplates({"visualization-templates.html":{form:"template-visualization-scatterplotControlForm"}});return{TwoVarScatterplot:a,ScatterplotControlForm:b}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: (a) JSLint cleanup for phyloviz.js and (b) make phyloviz, scatterplot available.
by Bitbucket 04 Oct '12
by Bitbucket 04 Oct '12
04 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/89c27f6b20f8/
changeset: 89c27f6b20f8
user: jgoecks
date: 2012-10-04 17:07:29
summary: (a) JSLint cleanup for phyloviz.js and (b) make phyloviz, scatterplot available.
affected #: 2 files
diff -r d7eba083859770e676f173b75f2053f575305113 -r 89c27f6b20f8f261a8e45e33abcc2ed9c1509a71 static/scripts/viz/phyloviz.js
--- a/static/scripts/viz/phyloviz.js
+++ b/static/scripts/viz/phyloviz.js
@@ -233,12 +233,16 @@
// removing unnecessary attributes
if (node._selected){ delete node._selected;}
- node.children ? node.children.forEach(cleanTree) : 0;
- node._children ? node._children.forEach(cleanTree) : 0;
+ if (node.children) {
+ node.children.forEach(cleanTree);
+ }
+ if (node._children) {
+ node._children.forEach(cleanTree);
+ }
}
var config = jQuery.extend(true, {}, this.attributes);
- config["selectedNode"] = null;
+ config.selectedNode = null;
show_message("Saving to Galaxy", "progress");
@@ -598,9 +602,11 @@
* Function to zoom and pan the svg element which the entire tree is contained within
* Uses d3.zoom events, and extend them to allow manual updates and keeping states in model
*/
+ var zoomParams,
+ translateParams;
if (typeof event !== "undefined") {
- var zoomParams = event.zoom,
- translateParams = event.translate;
+ zoomParams = event.zoom;
+ translateParams = event.translate;
}
var self = this,
@@ -761,9 +767,9 @@
* Applying user values to phylotree model.
*/
var self = this;
- if (!self.isAcceptableValue(self.inputs["separation"], 50, 2500) ||
- !self.isAcceptableValue(self.inputs["leafHeight"], 5, 30) ||
- !self.isAcceptableValue(self.inputs["fontSize"], 5, 20)){
+ if (!self.isAcceptableValue(self.inputs.separation, 50, 2500) ||
+ !self.isAcceptableValue(self.inputs.leafHeight, 5, 30) ||
+ !self.isAcceptableValue(self.inputs.fontSize, 5, 20)){
return;
}
$.each(self.inputs, function(key, $input){
@@ -854,7 +860,7 @@
var self = this,
checked = self.UI.enableEdit.is(':checked');
- !checked ? self.cancelChanges() : "";
+ if (!checked) { self.cancelChanges(); }
$.each(self.valuesOfConcern, function(key, value) {
self.UI[key].enable(checked);
diff -r d7eba083859770e676f173b75f2053f575305113 -r 89c27f6b20f8f261a8e45e33abcc2ed9c1509a71 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -227,10 +227,10 @@
## information--URL base, dataset id, dbkey, visualizations--in anchor.
<%
visualizations = data.get_visualizations()
- ## HACK: if there are visualizations, only provide trackster for now
+ ## HACK: if there are visualizations, only provide a subset for now
## since others are not ready. - comment out to see all WIP visualizations
- #if visualizations:
- # visualizations = [ vis for vis in visualizations if vis in [ 'trackster' ] ]
+ if visualizations:
+ visualizations = [ vis for vis in visualizations if vis in [ 'trackster', 'phyloviz', 'scatterplot' ] ]
%>
%if visualizations:
<a href="${h.url_for( controller='visualization' )}"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Make Phyloviz into an AMD module and use requireJS for Phyloviz.
by Bitbucket 04 Oct '12
by Bitbucket 04 Oct '12
04 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d7eba0838597/
changeset: d7eba0838597
user: jgoecks
date: 2012-10-04 16:51:10
summary: Make Phyloviz into an AMD module and use requireJS for Phyloviz.
affected #: 2 files
diff -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 -r d7eba083859770e676f173b75f2053f575305113 static/scripts/viz/phyloviz.js
--- a/static/scripts/viz/phyloviz.js
+++ b/static/scripts/viz/phyloviz.js
@@ -1,3 +1,5 @@
+define(['libs/d3', 'viz/visualization'], function(d3, visualization_mod) {
+
var UserMenuBase = Backbone.View.extend({
/**
* Base class of any menus that takes in user interaction. Contains checking methods.
@@ -165,7 +167,7 @@
/**
* -- PhyloTree Model --
*/
-var PhyloTree = Visualization.extend({
+var PhyloTree = visualization_mod.Visualization.extend({
defaults : {
layout: "Linear",
separation : 250, // px dist between nodes of different depth to represent 1 evolutionary until
@@ -588,7 +590,7 @@
$("#title").text("Phylogenetic Tree from " + self.phyloTree.get("title") + ":");
// -- Create Linear view instance --
- var linearView = new PhylovizLinearView(self.layoutOptions)
+ var linearView = new PhylovizLinearView(self.layoutOptions);
},
zoomAndPan : function(event){
@@ -952,4 +954,10 @@
}
});
}
+});
+
+return {
+ PhylovizView: PhylovizView
+};
+
});
\ No newline at end of file
diff -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 -r d7eba083859770e676f173b75f2053f575305113 templates/visualization/phyloviz.mako
--- a/templates/visualization/phyloviz.mako
+++ b/templates/visualization/phyloviz.mako
@@ -138,7 +138,43 @@
<%def name="javascripts()">
${parent.javascripts()}
- ${h.js( "galaxy.panels", "libs/d3", "mvc/data", "viz/visualization", "viz/phyloviz")}
+ ${h.js( "libs/require" )}
+
+ <script type="text/javascript">
+
+ require.config({
+ baseUrl: "${h.url_for('/static/scripts')}",
+ shim: {
+ "libs/underscore": { exports: "_" },
+ "libs/d3": { exports: "d3" }
+ }
+ });
+
+ require(["viz/phyloviz"], function(phyloviz_mod) {
+
+ function initPhyloViz(data, config) {
+ var phyloviz;
+
+ // -- Initialization code |-->
+ phyloviz = new phyloviz_mod.PhylovizView({
+ data: data,
+ layout : "Linear",
+ config : config
+ });
+
+ // -- Render viz. --
+ phyloviz.render();
+
+ }
+
+ $(function firstVizLoad(){ // calls when viz is loaded for the first time
+ var config = JSON.parse( '${ h.to_json_string( config )}');
+ var data = JSON.parse('${h.to_json_string(data)}');
+ initPhyloViz(data, config);
+ });
+ });
+
+ </script></%def>
@@ -290,31 +326,6 @@
<div id="PhyloViz" ></div>
- <script type="text/javascript">
-
- function initPhyloViz(data, config) {
- var phyloviz;
-
- // -- Initialization code |-->
- phyloviz = new PhylovizView({
- data: data,
- layout : "Linear",
- config : config
- });
-
- // -- Render viz. --
- phyloviz.render();
-
- }
-
- $(function firstVizLoad(){ // calls when viz is loaded for the first time
- var config = JSON.parse( '${ h.to_json_string( config )}');
- var data = JSON.parse('${h.to_json_string(data)}');
- initPhyloViz(data, config);
- });
-
- </script>
-
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Server-side refactoring for PhyloViz: move parsers + data providers into data_providers directory and remove phyloviz controller.
by Bitbucket 04 Oct '12
by Bitbucket 04 Oct '12
04 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f9ce8692bb6/
changeset: 6f9ce8692bb6
user: jgoecks
date: 2012-10-04 16:34:57
summary: Server-side refactoring for PhyloViz: move parsers + data providers into data_providers directory and remove phyloviz controller.
affected #: 14 files
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/data_providers/phyloviz/__init__.py
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
@@ -0,0 +1,42 @@
+""" Data providers code for PhyloViz """
+
+from galaxy.visualization.data_providers.basic import BaseDataProvider
+from galaxy.visualization.data_providers.phyloviz.nexusparser import Nexus_Parser
+from galaxy.visualization.data_providers.phyloviz.newickparser import Newick_Parser
+from galaxy.visualization.data_providers.phyloviz.phyloxmlparser import Phyloxml_Parser
+
+class PhylovizDataProvider( BaseDataProvider ):
+
+ def __init__( self, original_dataset=None ):
+ super( PhylovizDataProvider, self ).__init__( original_dataset=original_dataset )
+
+ def get_data( self ):
+ """returns [trees], meta
+ Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
+ """
+
+ jsonDicts, meta = [], {}
+ file_ext = self.original_dataset.datatype.file_ext
+ file_name = self.original_dataset.file_name
+ try:
+ if file_ext == "nhx": # parses newick files
+ newickParser = Newick_Parser()
+ jsonDicts, parseMsg = newickParser.parseFile( file_name )
+ elif file_ext == "phyloxml": # parses phyloXML files
+ phyloxmlParser = Phyloxml_Parser()
+ jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
+ elif file_ext == "nex": # parses nexus files
+ nexusParser = Nexus_Parser()
+ jsonDicts, parseMsg = nexusParser.parseFile( file_name )
+ meta["trees"] = parseMsg
+ else:
+ raise Exception("File type is not supported")
+
+ meta["msg"] = parseMsg
+
+ except Exception, e:
+ raise e
+ jsonDicts, meta["msg"] = [], "Parse failed"
+
+ return jsonDicts, meta
+
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py
@@ -0,0 +1,125 @@
+import json
+
+class Node(object):
+ """Node class of PhyloTree, which represents a CLAUDE in a phylogenetic tree"""
+ def __init__(self, nodeName, **kwargs):
+ """Creates a node and adds in the typical annotations"""
+ self.name, self.id = nodeName, kwargs.get("id", 0)
+ self.depth = kwargs.get("depth", 0)
+ self.children = []
+
+ self.isInternal = kwargs.get("isInternal", 0)
+ self.length, self.bootstrap = kwargs.get("length", 0), kwargs.get("bootstrap", None)
+ self.events = kwargs.get("events", "")
+
+ # clean up boot strap values
+ if self.bootstrap == -1:
+ self.bootstrap = None
+
+ def addChildNode(self, child):
+ """Adds a child node to the current node"""
+ if isinstance(child, Node):
+ self.children.append(child)
+ else:
+ self.children += child
+
+
+ def __str__(self):
+ return self.name + " id:" + str(self.id) + ", depth: " + str(self.depth)
+
+
+ def toJson(self):
+ """Converts the data in the node to a dict representation of json"""
+ thisJson = {
+ "name" : self.name,
+ "id" : self.id,
+ "depth" : self.depth,
+ "dist" : self.length
+ }
+ thisJson = self.addChildrenToJson(thisJson)
+ thisJson = self.addMiscToJson(thisJson)
+ return thisJson
+
+ def addChildrenToJson(self, jsonDict):
+ """Needs a special method to addChildren, such that the key does not appear in the Jsondict when the children is empty
+ this requirement is due to the layout algorithm used by d3 layout for hiding subtree """
+ if len(self.children) > 0:
+ children = [ node.toJson() for node in self.children]
+ jsonDict["children"] = children
+ return jsonDict
+
+
+ def addMiscToJson(self, jsonDict):
+ """Adds other misc attributes to json if they are present"""
+ if not self.events == "":
+ jsonDict["events"] = self.events
+ if not self.bootstrap == None:
+ jsonDict["bootstrap"] = self.bootstrap
+ return jsonDict
+
+
+
+class PhyloTree(object):
+ """Standardized python based class to represent the phylogenetic tree parsed from different
+ phylogenetic file formats."""
+
+ def __init__(self):
+ self.root, self.rootAttr = None, {}
+ self.nodes = {}
+ self.title = None
+ self.id = 1
+
+ def addAttributesToRoot(self, attrDict):
+ """Adds attributes to root, but first we put it in a temp store and bind it with root when .toJson is called"""
+ for key, value in attrDict.items():
+ self.rootAttr[key] = value
+
+ def makeNode(self, nodeName, **kwargs):
+ """Called to make a node within PhyloTree, arbitrary kwargs can be passed to annotate nodes
+ Tracks the number of nodes via internally incremented id"""
+ kwargs["id"] = self.id
+ self.id += 1
+ return Node(nodeName, **kwargs)
+
+ def addRoot(self, root):
+ """Creates a root for phyloTree"""
+ assert isinstance(root, Node)
+ root.parent = None
+ self.root = root
+
+ def generateJsonableDict(self):
+ """Changes itself into a dictonary by recurssively calling the tojson on all its nodes. Think of it
+ as a dict in an array of dict in an array of dict and so on..."""
+ jsonTree = ""
+ if self.root:
+ assert isinstance(self.root, Node)
+ jsonTree = self.root.toJson()
+ for key, value in self.rootAttr.items():
+ # transfer temporary stored attr to root
+ jsonTree[key] = value
+ else:
+ raise Exception("Root is not assigned!")
+ return jsonTree
+
+
+
+class Base_Parser(object):
+ """Base parsers contain all the methods to handle phylogeny tree creation and
+ converting the data to json that all parsers should have"""
+
+ def __init__(self):
+ self.phyloTrees = []
+
+ def parseFile(self, filePath):
+ """Base method that all phylogeny file parser should have"""
+ raise Exception("Base method for phylogeny file parsers is not implemented")
+
+ def toJson(self, jsonDict):
+ """Convenience method to get a json string from a python json dict"""
+ return json.dumps(jsonDict)
+
+ def _writeJsonToFile(self, filepath, json):
+ """Writes the file out to the system"""
+ f = open(filepath, "w")
+ f.writelines(json)
+ f.close()
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/newickparser.py
@@ -0,0 +1,185 @@
+from baseparser import Base_Parser, PhyloTree
+import re
+
+class Newick_Parser(Base_Parser):
+ """For parsing trees stored in the newick format (.nhx)
+ It is necessarily more complex because this parser is later extended by Nexus for parsing newick as well.."""
+
+
+ def __init__(self):
+ super(Newick_Parser, self).__init__()
+
+
+ def parseFile(self, filePath):
+ """Parses a newick file to obtain the string inside. Returns: jsonableDict"""
+ with open(filePath, "r") as newickFile:
+ newickString = newickFile.read()
+ newickString = newickString.replace("\n", "").replace("\r", "")
+ return [self.parseData(newickString)], "Success"
+
+
+ def parseData(self, newickString):
+ """To be called on a newickString directly to parse it. Returns: jsonableDict"""
+ return self._parseNewickToJson(newickString)
+
+
+ def _parseNewickToJson(self, newickString, treeName=None, nameMap=None):
+ """parses a newick representation of a tree into a PhyloTree data structure,
+ which can be easily converted to json"""
+ self.phyloTree = PhyloTree()
+ newickString = self.cleanNewickString(newickString)
+ if nameMap:
+ newickString = self._mapName(newickString, nameMap)
+
+ self.phyloTree.root = self.parseNode(newickString, 0)
+ if nameMap:
+ self.phyloTree.addAttributesToRoot({"treeName": treeName})
+
+ return self.phyloTree.generateJsonableDict()
+
+
+ def cleanNewickString(self, rawNewick):
+ """removing semi colon, and illegal json characters (\,',") and white spaces"""
+ return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
+
+
+ def _makeNodesFromString(self, string, depth):
+ """elements separated by comma could be empty"""
+
+ if string.find("(") != -1:
+ raise Exception("Tree is not well form, location: " + string)
+
+ childrenString = string.split(",")
+ childrenNodes = []
+
+ for childString in childrenString:
+ if len(childString) == 0:
+ continue
+ nodeInfo = childString.split(":")
+ name, length, bootstrap = "", None, -1
+ if len(nodeInfo) == 2: # has length info
+ length = nodeInfo[1]
+ # checking for bootstap values
+ name = nodeInfo[0]
+ try: # Nexus may bootstrap in names position
+ name = float(name)
+ if 0<= name <= 1:
+ bootstrap = name
+ elif 1 <= name <= 100:
+ bootstrap = name / 100
+ name = ""
+ except ValueError:
+ name = nodeInfo[0]
+ else:
+ name = nodeInfo[0] # string only contains name
+ node = self.phyloTree.makeNode(name, length=length, depth=depth, bootstrap= bootstrap)
+ childrenNodes += [node]
+ return childrenNodes
+
+
+
+ def _mapName(self, newickString, nameMap):
+ """
+ Necessary to replace names of terms inside nexus representation
+ Also, its here because Mailaud's doesnt deal with id_strings outside of quotes(" ")
+ """
+ newString = ""
+ start = 0
+ end = 0
+
+ for i in xrange(len(newickString)):
+ if newickString[i] == "(" or newickString[i] == ",":
+ if re.match(r"[,(]", newickString[i+1:]):
+ continue
+ else:
+ end = i + 1
+ # i now refers to the starting position of the term to be replaced,
+ # we will next find j which is the ending pos of the term
+ for j in xrange(i+1, len(newickString)):
+ enclosingSymbol = newickString[j] # the immediate symbol after a common or left bracket which denotes the end of a term
+ if enclosingSymbol == ")" or enclosingSymbol == ":" or enclosingSymbol == ",":
+ termToReplace = newickString[end:j]
+
+ newString += newickString[start : end] + nameMap[termToReplace] #+ "'" "'" +
+ start = j
+ break
+
+ newString += newickString[start:]
+ return newString
+
+
+ def parseNode(self, string, depth):
+ """ Recursive method for parsing newick string, works by stripping down the string into substring
+ of newick contained with brackers, which is used to call itself.
+ Eg ... ( A, B, (D, E)C, F, G ) ...
+ We will make the preceeding nodes first A, B, then the internal node C, its children D, E,
+ and finally the succeeding nodes F, G"""
+
+ # Base case where there is only an empty string
+ if string == "":
+ return
+ # Base case there its only an internal claude
+ if string.find("(") == -1:
+ return self._makeNodesFromString(string, depth)
+
+ nodes, children = [], [] # nodes refer to the nodes on this level, children refers to the child of the
+ start = 0
+ lenOfPreceedingInternalNodeString = 0
+ bracketStack = []
+
+ for j in xrange(len(string)):
+ if string[j] == "(": #finding the positions of all the open brackets
+ bracketStack.append(j)
+ continue
+ if string[j] == ")": #finding the positions of all the closed brackets to extract claude
+ i = bracketStack.pop()
+
+ if len(bracketStack) == 0: # is child of current node
+
+ InternalNode = None
+
+ #First flat call to make nodes of the same depth but from the preceeding string.
+ startSubstring = string[start + lenOfPreceedingInternalNodeString: i]
+ preceedingNodes = self._makeNodesFromString(startSubstring, depth)
+ nodes += preceedingNodes
+
+ # Then We will try to see if the substring has any internal nodes first, make it then make nodes preceeding it and succeeding it.
+ if j + 1 < len(string):
+ stringRightOfBracket = string[j+1:] # Eg. '(b:0.4,a:0.3)c:0.3, stringRightOfBracket = c:0.3
+ match = re.search(r"[\)\,\(]", stringRightOfBracket)
+ if match:
+ indexOfNextSymbol = match.start()
+ stringRepOfInternalNode = stringRightOfBracket[:indexOfNextSymbol]
+ internalNodes = self._makeNodesFromString( stringRepOfInternalNode, depth)
+ if len(internalNodes) > 0:
+ InternalNode = internalNodes[0]
+ lenOfPreceedingInternalNodeString = len(stringRepOfInternalNode)
+ else: # sometimes the node can be the last element of a string
+ InternalNode = self._makeNodesFromString(string[j+1:], depth)[0]
+ lenOfPreceedingInternalNodeString = len(string) - j
+ if InternalNode == None: #creating a generic node if it is unnamed
+ InternalNode = self.phyloTree.makeNode( "", depth=depth, isInternal=True ) #"internal-" + str(depth)
+ lenOfPreceedingInternalNodeString = 0
+
+ # recussive call to make the internal claude
+ childSubString = string[ i + 1 : j ]
+ InternalNode.addChildNode(self.parseNode(childSubString, depth + 1))
+
+ nodes.append(InternalNode) # we append the internal node later to preserve order
+
+ start = j + 1
+ continue
+
+ if depth == 0: # if its the root node, we do nothing about it and return
+ return nodes[0]
+
+ # Adding last most set of children
+ endString = string[start:]
+ if string[start-1] == ")": # if the symbol belongs to an internal node which is created previously, then we remove it from the string left to parse
+ match = re.search(r"[\)\,\(]", endString)
+ if match:
+ endOfNodeName = start + match.start() + 1
+ endString = string[endOfNodeName:]
+ nodes += self._makeNodesFromString(endString, depth)
+
+ return nodes
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/data_providers/phyloviz/nexusparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/nexusparser.py
@@ -0,0 +1,107 @@
+from newickparser import Newick_Parser
+import re
+
+MAX_READLINES = 200000
+
+
+class Nexus_Parser(Newick_Parser):
+
+ def __init__(self):
+ super(Nexus_Parser, self).__init__()
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Nexus content."""
+ return self.parseNexus(filePath)
+
+
+ def parseNexus(self, filename):
+ """ Nexus data is stored in blocks between a line starting with begin and another line starting with end;
+ Commends inside square brackets are to be ignored,
+ For more information: http://wiki.christophchamp.com/index.php/NEXUS_file_format
+ Nexus can store multiple trees
+ """
+
+ with open( filename, "rt") as nex_file:
+ nexlines = nex_file.readlines()
+
+ rowCount = 0
+ inTreeBlock = False # sentinel to check if we are in a tree block
+ intranslateBlock = False # sentinel to check if we are in the translate region of the tree. Stores synonyms of the labellings
+ self.inCommentBlock = False
+ self.nameMapping = None # stores mapping representation used in nexus format
+ treeNames = []
+
+ for line in nexlines:
+ line = line.replace(";\n", "")
+ lline = line.lower()
+
+ if rowCount > MAX_READLINES or (not nex_file) :
+ break
+ rowCount +=1
+ # We are only interested in the tree block.
+ if "begin" in lline and "tree" in lline and not inTreeBlock:
+ inTreeBlock = True
+ continue
+ if inTreeBlock and "end" in lline[:3]:
+ inTreeBlock, currPhyloTree = False, None
+ continue
+
+ if inTreeBlock:
+
+ if "title" in lline: # Adding title to the tree
+ titleLoc = lline.find("title")
+ title = line[titleLoc + 5:].replace(" ", "")
+
+ continue
+
+ if "translate" in lline:
+ intranslateBlock = True
+ self.nameMapping = {}
+ continue
+
+ if intranslateBlock:
+ mappingLine = self.splitLinebyWhitespaces(line)
+ key, value = mappingLine[1], mappingLine[2].replace(",", "").replace("'","") #replacing illegal json characters
+ self.nameMapping[key] = value
+
+ # Extracting newick Trees
+ if "tree" in lline:
+ intranslateBlock = False
+
+ treeLineCols = self.splitLinebyWhitespaces(line)
+ treeName, newick = treeLineCols[2], treeLineCols[-1]
+
+ if newick == "": # Empty lines can be found in tree blocks
+ continue
+
+ currPhyloTree = self._parseNewickToJson(newick, treeName, nameMap=self.nameMapping)
+
+ self.phyloTrees.append(currPhyloTree)
+ treeIndex = len(self.phyloTrees) - 1
+ treeNames.append( (treeName, treeIndex) ) # appending name of tree, and its index
+ continue
+
+ return self.phyloTrees, treeNames
+
+
+ def splitLinebyWhitespaces(self, line):
+ """replace tabs and write spaces to a single write space, so we can properly split it."""
+ return re.split(r"\s+", line)
+
+
+ def checkComments(self, line):
+ """Check to see if the line/lines is a comment."""
+ if not self.inCommentBlock:
+ if "[" in line:
+ if "]" not in line:
+ self.inCommentBlock = True
+ else:
+ return "Nextline" # need to move on to the nextline after getting out of comment
+ else :
+ if "]" in line:
+ if line.rfind("[") > line.rfind("]"):
+ pass # a comment block is closed but another is open.
+ else:
+ self.inCommentBlock = False
+ return "Nextline" # need to move on to the nextline after getting out of comment
+ return ""
\ No newline at end of file
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/data_providers/phyloviz/phyloxmlparser.py
--- /dev/null
+++ b/lib/galaxy/visualization/data_providers/phyloviz/phyloxmlparser.py
@@ -0,0 +1,133 @@
+from baseparser import Base_Parser, PhyloTree, Node
+from xml.etree import ElementTree
+
+class Phyloxml_Parser(Base_Parser):
+ """Parses a phyloxml file into a json file that will be passed to PhyloViz for display"""
+
+ def __init__(self):
+ super(Phyloxml_Parser, self).__init__()
+ self.phyloTree = PhyloTree()
+ self.tagsOfInterest = {
+ "clade": "",
+ "name" : "name",
+ "branch_length" : "length",
+ "confidence" : "bootstrap",
+ "events" : "events"
+ }
+
+ def parseFile(self, filePath):
+ """passes a file and extracts its Phylogeny Tree content."""
+ phyloXmlFile = open(filePath, "r")
+
+ xmlTree = ElementTree.parse(phyloXmlFile)
+ xmlRoot = xmlTree.getroot()[0]
+ self.nameSpaceIndex = xmlRoot.tag.rfind("}") + 1 # used later by the clean tag method to remove the name space in every element.tag
+
+ phyloRoot = None
+ for child in xmlRoot:
+ childTag = self.cleanTag(child.tag)
+ if childTag == "clade":
+ phyloRoot = child
+ elif childTag == "name":
+ self.phyloTree.title = child.text
+
+ self.phyloTree.root = self.parseNode(phyloRoot, 0)
+ jsonDict = self.phyloTree.generateJsonableDict()
+ return [jsonDict], "Success"
+
+
+ def parseNode(self, node, depth):
+ """Parses any node within a phyloxml tree and looks out for claude, which signals the creation of
+ nodes - internal OR leaf"""
+
+ tag = self.cleanTag(node.tag)
+ if not tag == "clade":
+ return None
+ hasInnerClade = False
+
+ # peeking once for parent and once for child to check if the node is internal
+ for child in node:
+ childTag = self.cleanTag(child.tag)
+ if childTag == "clade":
+ hasInnerClade = True
+ break
+
+ if hasInnerClade: # this node is an internal node
+ currentNode = self._makeInternalNode(node, depth= depth)
+ for child in node:
+ child = self.parseNode(child, depth + 1)
+ if isinstance(child, Node):
+ currentNode.addChildNode(child)
+
+ else: # this node is a leaf node
+ currentNode = self._makeLeafNode(node, depth=depth+1)
+
+ return currentNode
+
+
+ def _makeLeafNode(self, leafNode, depth = 0 ):
+ """Makes leaf nodes by calling Phylotree methods"""
+ node = {}
+ for child in leafNode:
+ childTag = self.cleanTag(child.tag)
+ if childTag in self.tagsOfInterest:
+ key = self.tagsOfInterest[childTag] # need to map phyloxml terms to ours
+ node[key] = child.text
+
+ node["depth"] = depth
+ return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
+
+ def _getNodeName(self, node, depth=-1):
+ """Gets the name of a claude. It handles the case where a taxonomy node is involved"""
+
+ def getTagFromTaxonomyNode(node):
+ """Returns the name of a taxonomy node. A taxonomy node have to be treated differently as the name
+ is embedded one level deeper"""
+ phyloxmlTaxoNames = {
+ "common_name" : "",
+ "scientific_name" : "",
+ "code" : ""
+ }
+ for child in node:
+ childTag = self.cleanTag(child.tag)
+ if childTag in phyloxmlTaxoNames:
+ return child.text
+ return ""
+
+ nodeName = ""
+ for child in node:
+ childTag = self.cleanTag(child.tag)
+ if childTag == "name" :
+ nodeName = child.text
+ break
+ elif childTag == "taxonomy":
+ nodeName = getTagFromTaxonomyNode(child)
+ break
+
+ return nodeName
+
+
+ def _makeInternalNode(self, internalNode, depth=0):
+ """ Makes an internal node from an element object that is guranteed to be a parent node.
+ Gets the value of interests like events and appends it to a custom node object that will be passed to PhyloTree to make nodes
+ """
+ node = {}
+ for child in internalNode:
+ childTag = self.cleanTag(child.tag)
+ if childTag == "clade":
+ continue
+ elif childTag in self.tagsOfInterest:
+ if childTag == "events": # events is nested 1 more level deeper than others
+ key, text = "events", self.cleanTag(child[0].tag)
+ else:
+ key = self.tagsOfInterest[childTag]
+ text = child.text
+ node[key] = text
+
+
+ return self.phyloTree.makeNode(self._getNodeName(internalNode, depth), **node)
+
+
+ def cleanTag(self, tagString):
+ return tagString[self.nameSpaceIndex:]
+
\ No newline at end of file
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/__init__.py
--- a/lib/galaxy/visualization/phyloviz/__init__.py
+++ /dev/null
@@ -1,1 +0,0 @@
-__author__ = 'Tomithy'
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/baseparser.py
--- a/lib/galaxy/visualization/phyloviz/baseparser.py
+++ /dev/null
@@ -1,125 +0,0 @@
-import json
-
-class Node(object):
- """Node class of PhyloTree, which represents a CLAUDE in a phylogenetic tree"""
- def __init__(self, nodeName, **kwargs):
- """Creates a node and adds in the typical annotations"""
- self.name, self.id = nodeName, kwargs.get("id", 0)
- self.depth = kwargs.get("depth", 0)
- self.children = []
-
- self.isInternal = kwargs.get("isInternal", 0)
- self.length, self.bootstrap = kwargs.get("length", 0), kwargs.get("bootstrap", None)
- self.events = kwargs.get("events", "")
-
- # clean up boot strap values
- if self.bootstrap == -1:
- self.bootstrap = None
-
- def addChildNode(self, child):
- """Adds a child node to the current node"""
- if isinstance(child, Node):
- self.children.append(child)
- else:
- self.children += child
-
-
- def __str__(self):
- return self.name + " id:" + str(self.id) + ", depth: " + str(self.depth)
-
-
- def toJson(self):
- """Converts the data in the node to a dict representation of json"""
- thisJson = {
- "name" : self.name,
- "id" : self.id,
- "depth" : self.depth,
- "dist" : self.length
- }
- thisJson = self.addChildrenToJson(thisJson)
- thisJson = self.addMiscToJson(thisJson)
- return thisJson
-
- def addChildrenToJson(self, jsonDict):
- """Needs a special method to addChildren, such that the key does not appear in the Jsondict when the children is empty
- this requirement is due to the layout algorithm used by d3 layout for hiding subtree """
- if len(self.children) > 0:
- children = [ node.toJson() for node in self.children]
- jsonDict["children"] = children
- return jsonDict
-
-
- def addMiscToJson(self, jsonDict):
- """Adds other misc attributes to json if they are present"""
- if not self.events == "":
- jsonDict["events"] = self.events
- if not self.bootstrap == None:
- jsonDict["bootstrap"] = self.bootstrap
- return jsonDict
-
-
-
-class PhyloTree(object):
- """Standardized python based class to represent the phylogenetic tree parsed from different
- phylogenetic file formats."""
-
- def __init__(self):
- self.root, self.rootAttr = None, {}
- self.nodes = {}
- self.title = None
- self.id = 1
-
- def addAttributesToRoot(self, attrDict):
- """Adds attributes to root, but first we put it in a temp store and bind it with root when .toJson is called"""
- for key, value in attrDict.items():
- self.rootAttr[key] = value
-
- def makeNode(self, nodeName, **kwargs):
- """Called to make a node within PhyloTree, arbitrary kwargs can be passed to annotate nodes
- Tracks the number of nodes via internally incremented id"""
- kwargs["id"] = self.id
- self.id += 1
- return Node(nodeName, **kwargs)
-
- def addRoot(self, root):
- """Creates a root for phyloTree"""
- assert isinstance(root, Node)
- root.parent = None
- self.root = root
-
- def generateJsonableDict(self):
- """Changes itself into a dictonary by recurssively calling the tojson on all its nodes. Think of it
- as a dict in an array of dict in an array of dict and so on..."""
- jsonTree = ""
- if self.root:
- assert isinstance(self.root, Node)
- jsonTree = self.root.toJson()
- for key, value in self.rootAttr.items():
- # transfer temporary stored attr to root
- jsonTree[key] = value
- else:
- raise Exception("Root is not assigned!")
- return jsonTree
-
-
-
-class Base_Parser(object):
- """Base parsers contain all the methods to handle phylogeny tree creation and
- converting the data to json that all parsers should have"""
-
- def __init__(self):
- self.phyloTrees = []
-
- def parseFile(self, filePath):
- """Base method that all phylogeny file parser should have"""
- raise Exception("Base method for phylogeny file parsers is not implemented")
-
- def toJson(self, jsonDict):
- """Convenience method to get a json string from a python json dict"""
- return json.dumps(jsonDict)
-
- def _writeJsonToFile(self, filepath, json):
- """Writes the file out to the system"""
- f = open(filepath, "w")
- f.writelines(json)
- f.close()
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/newickparser.py
--- a/lib/galaxy/visualization/phyloviz/newickparser.py
+++ /dev/null
@@ -1,185 +0,0 @@
-from baseparser import Base_Parser, PhyloTree
-import re
-
-class Newick_Parser(Base_Parser):
- """For parsing trees stored in the newick format (.nhx)
- It is necessarily more complex because this parser is later extended by Nexus for parsing newick as well.."""
-
-
- def __init__(self):
- super(Newick_Parser, self).__init__()
-
-
- def parseFile(self, filePath):
- """Parses a newick file to obtain the string inside. Returns: jsonableDict"""
- with open(filePath, "r") as newickFile:
- newickString = newickFile.read()
- newickString = newickString.replace("\n", "").replace("\r", "")
- return [self.parseData(newickString)], "Success"
-
-
- def parseData(self, newickString):
- """To be called on a newickString directly to parse it. Returns: jsonableDict"""
- return self._parseNewickToJson(newickString)
-
-
- def _parseNewickToJson(self, newickString, treeName=None, nameMap=None):
- """parses a newick representation of a tree into a PhyloTree data structure,
- which can be easily converted to json"""
- self.phyloTree = PhyloTree()
- newickString = self.cleanNewickString(newickString)
- if nameMap:
- newickString = self._mapName(newickString, nameMap)
-
- self.phyloTree.root = self.parseNode(newickString, 0)
- if nameMap:
- self.phyloTree.addAttributesToRoot({"treeName": treeName})
-
- return self.phyloTree.generateJsonableDict()
-
-
- def cleanNewickString(self, rawNewick):
- """removing semi colon, and illegal json characters (\,',") and white spaces"""
- return re.sub(r'\s|;|\"|\'|\\', '', rawNewick)
-
-
- def _makeNodesFromString(self, string, depth):
- """elements separated by comma could be empty"""
-
- if string.find("(") != -1:
- raise Exception("Tree is not well form, location: " + string)
-
- childrenString = string.split(",")
- childrenNodes = []
-
- for childString in childrenString:
- if len(childString) == 0:
- continue
- nodeInfo = childString.split(":")
- name, length, bootstrap = "", None, -1
- if len(nodeInfo) == 2: # has length info
- length = nodeInfo[1]
- # checking for bootstap values
- name = nodeInfo[0]
- try: # Nexus may bootstrap in names position
- name = float(name)
- if 0<= name <= 1:
- bootstrap = name
- elif 1 <= name <= 100:
- bootstrap = name / 100
- name = ""
- except ValueError:
- name = nodeInfo[0]
- else:
- name = nodeInfo[0] # string only contains name
- node = self.phyloTree.makeNode(name, length=length, depth=depth, bootstrap= bootstrap)
- childrenNodes += [node]
- return childrenNodes
-
-
-
- def _mapName(self, newickString, nameMap):
- """
- Necessary to replace names of terms inside nexus representation
- Also, its here because Mailaud's doesnt deal with id_strings outside of quotes(" ")
- """
- newString = ""
- start = 0
- end = 0
-
- for i in xrange(len(newickString)):
- if newickString[i] == "(" or newickString[i] == ",":
- if re.match(r"[,(]", newickString[i+1:]):
- continue
- else:
- end = i + 1
- # i now refers to the starting position of the term to be replaced,
- # we will next find j which is the ending pos of the term
- for j in xrange(i+1, len(newickString)):
- enclosingSymbol = newickString[j] # the immediate symbol after a common or left bracket which denotes the end of a term
- if enclosingSymbol == ")" or enclosingSymbol == ":" or enclosingSymbol == ",":
- termToReplace = newickString[end:j]
-
- newString += newickString[start : end] + nameMap[termToReplace] #+ "'" "'" +
- start = j
- break
-
- newString += newickString[start:]
- return newString
-
-
- def parseNode(self, string, depth):
- """ Recursive method for parsing newick string, works by stripping down the string into substring
- of newick contained with brackers, which is used to call itself.
- Eg ... ( A, B, (D, E)C, F, G ) ...
- We will make the preceeding nodes first A, B, then the internal node C, its children D, E,
- and finally the succeeding nodes F, G"""
-
- # Base case where there is only an empty string
- if string == "":
- return
- # Base case there its only an internal claude
- if string.find("(") == -1:
- return self._makeNodesFromString(string, depth)
-
- nodes, children = [], [] # nodes refer to the nodes on this level, children refers to the child of the
- start = 0
- lenOfPreceedingInternalNodeString = 0
- bracketStack = []
-
- for j in xrange(len(string)):
- if string[j] == "(": #finding the positions of all the open brackets
- bracketStack.append(j)
- continue
- if string[j] == ")": #finding the positions of all the closed brackets to extract claude
- i = bracketStack.pop()
-
- if len(bracketStack) == 0: # is child of current node
-
- InternalNode = None
-
- #First flat call to make nodes of the same depth but from the preceeding string.
- startSubstring = string[start + lenOfPreceedingInternalNodeString: i]
- preceedingNodes = self._makeNodesFromString(startSubstring, depth)
- nodes += preceedingNodes
-
- # Then We will try to see if the substring has any internal nodes first, make it then make nodes preceeding it and succeeding it.
- if j + 1 < len(string):
- stringRightOfBracket = string[j+1:] # Eg. '(b:0.4,a:0.3)c:0.3, stringRightOfBracket = c:0.3
- match = re.search(r"[\)\,\(]", stringRightOfBracket)
- if match:
- indexOfNextSymbol = match.start()
- stringRepOfInternalNode = stringRightOfBracket[:indexOfNextSymbol]
- internalNodes = self._makeNodesFromString( stringRepOfInternalNode, depth)
- if len(internalNodes) > 0:
- InternalNode = internalNodes[0]
- lenOfPreceedingInternalNodeString = len(stringRepOfInternalNode)
- else: # sometimes the node can be the last element of a string
- InternalNode = self._makeNodesFromString(string[j+1:], depth)[0]
- lenOfPreceedingInternalNodeString = len(string) - j
- if InternalNode == None: #creating a generic node if it is unnamed
- InternalNode = self.phyloTree.makeNode( "", depth=depth, isInternal=True ) #"internal-" + str(depth)
- lenOfPreceedingInternalNodeString = 0
-
- # recussive call to make the internal claude
- childSubString = string[ i + 1 : j ]
- InternalNode.addChildNode(self.parseNode(childSubString, depth + 1))
-
- nodes.append(InternalNode) # we append the internal node later to preserve order
-
- start = j + 1
- continue
-
- if depth == 0: # if its the root node, we do nothing about it and return
- return nodes[0]
-
- # Adding last most set of children
- endString = string[start:]
- if string[start-1] == ")": # if the symbol belongs to an internal node which is created previously, then we remove it from the string left to parse
- match = re.search(r"[\)\,\(]", endString)
- if match:
- endOfNodeName = start + match.start() + 1
- endString = string[endOfNodeName:]
- nodes += self._makeNodesFromString(endString, depth)
-
- return nodes
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/nexusparser.py
--- a/lib/galaxy/visualization/phyloviz/nexusparser.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from newickparser import Newick_Parser
-import re
-
-MAX_READLINES = 200000
-
-
-class Nexus_Parser(Newick_Parser):
-
- def __init__(self):
- super(Nexus_Parser, self).__init__()
-
- def parseFile(self, filePath):
- """passes a file and extracts its Nexus content."""
- return self.parseNexus(filePath)
-
-
- def parseNexus(self, filename):
- """ Nexus data is stored in blocks between a line starting with begin and another line starting with end;
- Commends inside square brackets are to be ignored,
- For more information: http://wiki.christophchamp.com/index.php/NEXUS_file_format
- Nexus can store multiple trees
- """
-
- with open( filename, "rt") as nex_file:
- nexlines = nex_file.readlines()
-
- rowCount = 0
- inTreeBlock = False # sentinel to check if we are in a tree block
- intranslateBlock = False # sentinel to check if we are in the translate region of the tree. Stores synonyms of the labellings
- self.inCommentBlock = False
- self.nameMapping = None # stores mapping representation used in nexus format
- treeNames = []
-
- for line in nexlines:
- line = line.replace(";\n", "")
- lline = line.lower()
-
- if rowCount > MAX_READLINES or (not nex_file) :
- break
- rowCount +=1
- # We are only interested in the tree block.
- if "begin" in lline and "tree" in lline and not inTreeBlock:
- inTreeBlock = True
- continue
- if inTreeBlock and "end" in lline[:3]:
- inTreeBlock, currPhyloTree = False, None
- continue
-
- if inTreeBlock:
-
- if "title" in lline: # Adding title to the tree
- titleLoc = lline.find("title")
- title = line[titleLoc + 5:].replace(" ", "")
-
- continue
-
- if "translate" in lline:
- intranslateBlock = True
- self.nameMapping = {}
- continue
-
- if intranslateBlock:
- mappingLine = self.splitLinebyWhitespaces(line)
- key, value = mappingLine[1], mappingLine[2].replace(",", "").replace("'","") #replacing illegal json characters
- self.nameMapping[key] = value
-
- # Extracting newick Trees
- if "tree" in lline:
- intranslateBlock = False
-
- treeLineCols = self.splitLinebyWhitespaces(line)
- treeName, newick = treeLineCols[2], treeLineCols[-1]
-
- if newick == "": # Empty lines can be found in tree blocks
- continue
-
- currPhyloTree = self._parseNewickToJson(newick, treeName, nameMap=self.nameMapping)
-
- self.phyloTrees.append(currPhyloTree)
- treeIndex = len(self.phyloTrees) - 1
- treeNames.append( (treeName, treeIndex) ) # appending name of tree, and its index
- continue
-
- return self.phyloTrees, treeNames
-
-
- def splitLinebyWhitespaces(self, line):
- """replace tabs and write spaces to a single write space, so we can properly split it."""
- return re.split(r"\s+", line)
-
-
- def checkComments(self, line):
- """Check to see if the line/lines is a comment."""
- if not self.inCommentBlock:
- if "[" in line:
- if "]" not in line:
- self.inCommentBlock = True
- else:
- return "Nextline" # need to move on to the nextline after getting out of comment
- else :
- if "]" in line:
- if line.rfind("[") > line.rfind("]"):
- pass # a comment block is closed but another is open.
- else:
- self.inCommentBlock = False
- return "Nextline" # need to move on to the nextline after getting out of comment
- return ""
\ No newline at end of file
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- a/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from newickparser import Newick_Parser
-from nexusparser import Nexus_Parser
-from phyloxmlparser import Phyloxml_Parser
-from galaxy.visualization.data_providers.basic import BaseDataProvider
-
-class Phyloviz_DataProvider( BaseDataProvider ):
-
- def __init__( self, original_dataset=None ):
- super( BaseDataProvider, self ).__init__( original_dataset=original_dataset )
-
- def get_data( self, **kwargs ):
- """returns [trees], meta
- Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
- """
- jsonDicts, meta = [], {}
- try:
- if fileExt == "nhx": # parses newick files
- newickParser = Newick_Parser()
- jsonDicts, parseMsg = newickParser.parseFile(filepath)
- elif fileExt == "phyloxml": # parses phyloXML files
- phyloxmlParser = Phyloxml_Parser()
- jsonDicts, parseMsg = phyloxmlParser.parseFile(filepath)
- elif fileExt == "nex": # parses nexus files
- nexusParser = Nexus_Parser()
- jsonDicts, parseMsg = nexusParser.parseFile(filepath)
- meta["trees"] = parseMsg
- else:
- raise Exception("File type is not supported")
-
- meta["msg"] = parseMsg
-
- except Exception:
- jsonDicts, meta["msg"] = [], "Parse failed"
-
- return jsonDicts, meta
-
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/visualization/phyloviz/phyloxmlparser.py
--- a/lib/galaxy/visualization/phyloviz/phyloxmlparser.py
+++ /dev/null
@@ -1,134 +0,0 @@
-from baseparser import Base_Parser, PhyloTree, Node
-from xml.etree import ElementTree
-
-class Phyloxml_Parser(Base_Parser):
- """Parses a phyloxml file into a json file that will be passed to PhyloViz for display"""
-
- def __init__(self):
- super(Phyloxml_Parser, self).__init__()
- self.phyloTree = PhyloTree()
- self.tagsOfInterest = {
- "clade": "",
- "name" : "name",
- "branch_length" : "length",
- "confidence" : "bootstrap",
- "events" : "events"
- }
-
- def parseFile(self, filePath):
- """passes a file and extracts its Phylogeny Tree content."""
- phyloXmlFile = open(filePath, "r")
-
- xmlTree = ElementTree.parse(phyloXmlFile)
- xmlRoot = xmlTree.getroot()[0]
- self.nameSpaceIndex = xmlRoot.tag.rfind("}") + 1 # used later by the clean tag method to remove the name space in every element.tag
-
- phyloRoot = None
- for child in xmlRoot:
- childTag = self.cleanTag(child.tag)
- if childTag == "clade":
- phyloRoot = child
- elif childTag == "name":
- self.phyloTree.title = child.text
-
- self.phyloTree.root = self.parseNode(phyloRoot, 0)
- jsonDict = self.phyloTree.generateJsonableDict()
- return [jsonDict], "Success"
-
-
- def parseNode(self, node, depth):
- """Parses any node within a phyloxml tree and looks out for claude, which signals the creation of
- nodes - internal OR leaf"""
- assert isinstance(node, etree._Element)
-
- tag = self.cleanTag(node.tag)
- if not tag == "clade":
- return None
- hasInnerClade = False
-
- # peeking once for parent and once for child to check if the node is internal
- for child in node:
- childTag = self.cleanTag(child.tag)
- if childTag == "clade":
- hasInnerClade = True
- break
-
- if hasInnerClade: # this node is an internal node
- currentNode = self._makeInternalNode(node, depth= depth)
- for child in node:
- child = self.parseNode(child, depth + 1)
- if isinstance(child, Node):
- currentNode.addChildNode(child)
-
- else: # this node is a leaf node
- currentNode = self._makeLeafNode(node, depth=depth+1)
-
- return currentNode
-
-
- def _makeLeafNode(self, leafNode, depth = 0 ):
- """Makes leaf nodes by calling Phylotree methods"""
- node = {}
- for child in leafNode:
- childTag = self.cleanTag(child.tag)
- if childTag in self.tagsOfInterest:
- key = self.tagsOfInterest[childTag] # need to map phyloxml terms to ours
- node[key] = child.text
-
- node["depth"] = depth
- return self.phyloTree.makeNode(self._getNodeName(leafNode), **node)
-
- def _getNodeName(self, node, depth=-1):
- """Gets the name of a claude. It handles the case where a taxonomy node is involved"""
-
- def getTagFromTaxonomyNode(node):
- """Returns the name of a taxonomy node. A taxonomy node have to be treated differently as the name
- is embedded one level deeper"""
- phyloxmlTaxoNames = {
- "common_name" : "",
- "scientific_name" : "",
- "code" : ""
- }
- for child in node:
- childTag = self.cleanTag(child.tag)
- if childTag in phyloxmlTaxoNames:
- return child.text
- return ""
-
- nodeName = ""
- for child in node:
- childTag = self.cleanTag(child.tag)
- if childTag == "name" :
- nodeName = child.text
- break
- elif childTag == "taxonomy":
- nodeName = getTagFromTaxonomyNode(child)
- break
-
- return nodeName
-
-
- def _makeInternalNode(self, internalNode, depth=0):
- """ Makes an internal node from an element object that is guranteed to be a parent node.
- Gets the value of interests like events and appends it to a custom node object that will be passed to PhyloTree to make nodes
- """
- node = {}
- for child in internalNode:
- childTag = self.cleanTag(child.tag)
- if childTag == "clade":
- continue
- elif childTag in self.tagsOfInterest:
- if childTag == "events": # events is nested 1 more level deeper than others
- key, text = "events", self.cleanTag(child[0].tag)
- else:
- key = self.tagsOfInterest[childTag]
- text = child.text
- node[key] = text
-
-
- return self.phyloTree.makeNode(self._getNodeName(internalNode, depth), **node)
-
-
- def cleanTag(self, tagString):
- return tagString[self.nameSpaceIndex:]
-
\ No newline at end of file
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/webapps/galaxy/controllers/phyloviz.py
--- a/lib/galaxy/webapps/galaxy/controllers/phyloviz.py
+++ /dev/null
@@ -1,97 +0,0 @@
-import pkg_resources
-pkg_resources.require( "bx-python" )
-
-from galaxy.util.json import to_json_string, from_json_string
-from galaxy.web.base.controller import *
-from galaxy.visualization.phyloviz.phyloviz_dataprovider import Phyloviz_DataProvider
-
-
-class PhyloVizController( BaseUIController, UsesVisualizationMixin, UsesHistoryDatasetAssociationMixin, SharableMixin ):
- """
- Controller for phyloViz browser interface.
- """
- def __init__(self, app ):
- BaseUIController.__init__( self, app )
-
- @web.expose
- @web.require_login()
- def index( self, trans, dataset_id = None, **kwargs ):
- """
- The index method is called using phyloviz/ with a dataset id passed in.
- The relevant data set is then retrieved via get_json_from_datasetId which interfaces with the parser
- The json representation of the phylogenetic tree along with the config is then written in the .mako template and passed back to the user
- """
- json, config = self.get_json_from_datasetId(trans, dataset_id)
- config["saved_visualization"] = False
- return trans.fill_template( "visualization/phyloviz.mako", data = json, config=config)
-
-
- @web.expose
- def visualization(self, trans, id):
- """
- Called using a viz_id (id) to retrieved stored visualization data (in json format) and all the viz_config
- """
- viz = self.get_visualization(trans, id)
- config = self.get_visualization_config(trans, viz)
- config["saved_visualization"] = True
- data = config["root"]
-
- return trans.fill_template( "visualization/phyloviz.mako", data = data, config=config)
-
-
- @web.expose
- @web.json
- def load_visualization_json(self, trans, viz_id):
- """
- Though not used in current implementation, this provides user with a convenient method to retrieve the viz_data & viz_config via json.
- """
- viz = self.get_visualization(trans, viz_id)
- viz_config = self.get_visualization_config(trans, viz)
- viz_config["saved_visualization"] = True
- return {
- "data" : viz_config["root"],
- "config" : viz_config
- }
-
-
- @web.expose
- @web.json
- def getJsonData(self, trans, dataset_id, treeIndex=0):
- """
- Method to retrieve data asynchronously via json format. Retriving from here rather than
- making a direct datasets/ call allows for some processing and event capturing
- """
- treeIndex = int(treeIndex)
- json, config = self.get_json_from_datasetId(trans, dataset_id, treeIndex)
- packedJson = {
- "data" : json,
- "config" : config
- }
-
- return packedJson
-
-
- def get_json_from_datasetId(self, trans, dataset_id, treeIndex=0):
- """
- For interfacing phyloviz controllers with phyloviz visualization data provider (parsers)
- """
- dataset = self.get_dataset(trans, dataset_id)
- fileExt, filepath = dataset.ext, dataset.file_name # .name stores the name of the dataset from the orginal upload
- json, config = "", {} # config contains properties of the tree and file
-
- if fileExt == "json":
- something, json = self.get_data(dataset)
- else:
- try:
- pd = Phyloviz_DataProvider()
- json, config = pd.get_data(filepath, fileExt)
- json = json[treeIndex]
- except Exception:
- pass
-
- config["title"] = dataset.display_name()
- config["ext"] = fileExt
- config["dataset_id"] = dataset_id
- config["treeIndex"] = treeIndex
-
- return json, config
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -7,7 +7,7 @@
from galaxy.util.sanitize_html import sanitize_html
from galaxy.visualization.genomes import decode_dbkey
from galaxy.visualization.genome.visual_analytics import get_dataset_job
-from galaxy.visualization.data_providers.basic import ColumnDataProvider
+from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider
from .library import LibraryListGrid
@@ -824,6 +824,25 @@
historyID=history_id,
kwargs=kwargs )
+ @web.expose
+ def phyloviz( self, trans, dataset_id, tree_index=0, **kwargs ):
+ # Get HDA.
+ hda = self.get_dataset( trans, dataset_id, check_ownership=False, check_accessible=True )
+
+ # Get data.
+ pd = PhylovizDataProvider( original_dataset=hda )
+ json, config = pd.get_data()
+ json = json[tree_index]
+
+ config["title"] = hda.display_name()
+ config["ext"] = hda.datatype.file_ext
+ config["dataset_id"] = dataset_id
+ config["treeIndex"] = tree_index
+ config["saved_visualization"] = False
+
+ # Return viz.
+ return trans.fill_template_mako( "visualization/phyloviz.mako", data = json, config=config )
+
@web.json
def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
if hda_id:
diff -r 569f7d07801074a4e5207493afe72354ccb8d3ad -r 6f9ce8692bb6aae38d720abd489f8e0edfd5f8c3 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -229,8 +229,8 @@
visualizations = data.get_visualizations()
## HACK: if there are visualizations, only provide trackster for now
## since others are not ready. - comment out to see all WIP visualizations
- if visualizations:
- visualizations = [ vis for vis in visualizations if vis in [ 'trackster' ] ]
+ #if visualizations:
+ # visualizations = [ vis for vis in visualizations if vis in [ 'trackster' ] ]
%>
%if visualizations:
<a href="${h.url_for( controller='visualization' )}"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Refactor PhyloViz components to work in new visualization framework and add sniffers for datatypes.
by Bitbucket 04 Oct '12
by Bitbucket 04 Oct '12
04 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/569f7d078010/
changeset: 569f7d078010
user: jgoecks
date: 2012-10-04 15:11:04
summary: Refactor PhyloViz components to work in new visualization framework and add sniffers for datatypes.
affected #: 6 files
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -252,6 +252,7 @@
<sniffer type="galaxy.datatypes.binary:TwoBit"/><sniffer type="galaxy.datatypes.binary:Bam"/><sniffer type="galaxy.datatypes.binary:Sff"/>
+ <sniffer type="galaxy.datatypes.xml:Phyloxml"/><sniffer type="galaxy.datatypes.xml:GenericXml"/><sniffer type="galaxy.datatypes.sequence:Maf"/><sniffer type="galaxy.datatypes.sequence:Lav"/>
@@ -272,6 +273,8 @@
<sniffer type="galaxy.datatypes.tabular:Pileup"/><sniffer type="galaxy.datatypes.interval:Interval"/><sniffer type="galaxy.datatypes.tabular:Sam"/>
+ <sniffer type="galaxy.datatypes.data:Newick"/>
+ <sniffer type="galaxy.datatypes.data:Nexus"/><sniffer type="galaxy.datatypes.images:Jpg"/><sniffer type="galaxy.datatypes.images:Png"/><sniffer type="galaxy.datatypes.images:Tiff"/>
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -734,38 +734,39 @@
"""New Hampshire/Newick Format"""
file_ext = "nhx"
- MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True )
-
def __init__(self, **kwd):
"""Initialize foobar datatype"""
- Text.__init__(self, **kwd)
+ Text.__init__( self, **kwd )
def init_meta( self, dataset, copy_from=None ):
Text.init_meta( self, dataset, copy_from=copy_from )
-
def sniff( self, filename ):
""" Returning false as the newick format is too general and cannot be sniffed."""
return False
+ def get_visualizations( self, dataset ):
+ """
+ Returns a list of visualizations for datatype.
+ """
+
+ return [ 'phyloviz' ]
+
class Nexus( Text ):
"""Nexus format as used By Paup, Mr Bayes, etc"""
file_ext = "nex"
- MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True )
-
def __init__(self, **kwd):
"""Initialize foobar datatype"""
- Text.__init__(self, **kwd)
+ Text.__init__( self, **kwd )
def init_meta( self, dataset, copy_from=None ):
Text.init_meta( self, dataset, copy_from=copy_from )
-
def sniff( self, filename ):
"""All Nexus Files Simply puts a '#NEXUS' in its first line"""
- f = open(filename, "r")
+ f = open( filename, "r" )
firstline = f.readline().upper()
f.close()
@@ -774,6 +775,13 @@
else:
return False
+ def get_visualizations( self, dataset ):
+ """
+ Returns a list of visualizations for datatype.
+ """
+
+ return [ 'phyloviz' ]
+
# ------------- Utility methods --------------
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad lib/galaxy/datatypes/xml.py
--- a/lib/galaxy/datatypes/xml.py
+++ b/lib/galaxy/datatypes/xml.py
@@ -91,9 +91,18 @@
def sniff( self, filename ):
""""Checking for keyword - 'phyloxml' always in lowercase in the first few lines"""
- f = open(filename, "r")
- firstlines = "".join(f.readlines(5))
+
+ f = open( filename, "r" )
+ firstlines = "".join( f.readlines(5) )
f.close()
+
if "phyloxml" in firstlines:
return True
- return False
\ No newline at end of file
+ return False
+
+ def get_visualizations( self, dataset ):
+ """
+ Returns a list of visualizations for datatype.
+ """
+
+ return [ 'phyloviz' ]
\ No newline at end of file
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
--- a/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
+++ b/lib/galaxy/visualization/phyloviz/phyloviz_dataprovider.py
@@ -3,15 +3,12 @@
from phyloxmlparser import Phyloxml_Parser
from galaxy.visualization.data_providers.basic import BaseDataProvider
-# TODO: bring this class into line with BaseDataProvider by
-# using BaseDataProvider.init() and providing original dataset
-# and then reading from dataset rather than filepath.
class Phyloviz_DataProvider( BaseDataProvider ):
- def __init__( self ):
- pass
+ def __init__( self, original_dataset=None ):
+ super( BaseDataProvider, self ).__init__( original_dataset=original_dataset )
- def get_data( self, filepath, fileExt ):
+ def get_data( self, **kwargs ):
"""returns [trees], meta
Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
"""
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -388,17 +388,6 @@
_.each( $(".visualize-icon"), function(icon) {
init_viz_icon(icon);
});
-
- function init_phyloviz_links() {
- // PhyloViz links
- // Add to trackster browser functionality
- $(".phyloviz-add").live("click", function() {
- var dataset = this,
- dataset_jquery = $(this);
- window.parent.location = dataset_jquery.attr("new-url");
- });
- }
- init_phyloviz_links();
// History rename functionality.
async_save_text("history-name-container", "history-name", "${h.url_for( controller="/history", action="rename_async", id=trans.security.encode_id(history.id) )}", "new_name", 18);
diff -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b -r 569f7d07801074a4e5207493afe72354ccb8d3ad templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -29,9 +29,6 @@
## Render the dataset `data` as history item, using `hid` as the displayed id
<%def name="render_dataset( data, hid, show_deleted_on_refresh = False, for_editing = True, display_structured = False )"><%
-
- from galaxy.datatypes.xml import Phyloxml
- from galaxy.datatypes.data import Newick, Nexus
dataset_id = trans.security.encode_id( data.id )
if data.state in ['no state','',None]:
@@ -245,14 +242,6 @@
%endif
visualizations="${','.join(visualizations)}"></a>
%endif
- <%
- isPhylogenyData = isinstance(data.datatype, (Phyloxml, Nexus, Newick))
- %>
- %if isPhylogenyData:
- <a href="javascript:void(0)" class="icon-button chart_curve phyloviz-add"
- action-url="${h.url_for( controller='phyloviz', action='-', dataset_id=dataset_id)}"
- new-url="${h.url_for( controller='phyloviz', action='index', dataset_id=dataset_id)}" title="View in Phyloviz"></a>
- %endif
%if trans.user:
%if not display_structured:
<div style="float: right">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fix issues with genomes API: handle periods in keys, fix typo, and list genomes when querying.
by Bitbucket 03 Oct '12
by Bitbucket 03 Oct '12
03 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e8fc8e57dadc/
changeset: e8fc8e57dadc
user: jgoecks
date: 2012-10-03 23:49:30
summary: Fix issues with genomes API: handle periods in keys, fix typo, and list genomes when querying.
affected #: 2 files
diff -r f78de6cddd0f240aede5f02911379db7102981e2 -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -236,7 +236,7 @@
if not len_ds:
genome = self.genomes[ dbkey ]
else:
- gneome = Genome( dbkey, len_file=len_ds.file_name )
+ genome = Genome( dbkey, len_file=len_ds.file_name )
return genome.to_dict( num=num, chrom=chrom, low=low )
diff -r f78de6cddd0f240aede5f02911379db7102981e2 -r e8fc8e57dadc8a1254d994e4e05f40271d7e856b lib/galaxy/webapps/galaxy/api/genomes.py
--- a/lib/galaxy/webapps/galaxy/api/genomes.py
+++ b/lib/galaxy/webapps/galaxy/api/genomes.py
@@ -5,6 +5,12 @@
def is_true ( a_str ):
return is_true == True or a_str in [ 'True', 'true', 'T', 't' ]
+def get_id( base, format ):
+ if format:
+ return "%s.%s" % ( base, format )
+ else:
+ return base
+
class GenomesController( BaseAPIController ):
"""
RESTful controller for interactions with genome data.
@@ -16,7 +22,7 @@
GET /api/genomes: returns a list of installed genomes
"""
- return []
+ return self.app.genomes.get_dbkeys_with_chrom_info( trans )
@web.json
def show( self, trans, id, num=None, chrom=None, low=None, high=None, **kwd ):
@@ -27,6 +33,7 @@
"""
# Process kwds.
+ id = get_id( id, kwd.get( 'format', None ) )
reference = is_true( kwd.get( 'reference', False ) )
# Return info.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: smcmanus: Added tool exit code to the UI. Modified stdout, stderr, and exit_code retrieval to reference same method.
by Bitbucket 03 Oct '12
by Bitbucket 03 Oct '12
03 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f78de6cddd0f/
changeset: f78de6cddd0f
user: smcmanus
date: 2012-10-03 23:07:40
summary: Added tool exit code to the UI. Modified stdout, stderr, and exit_code retrieval to reference same method.
affected #: 2 files
diff -r e4e1c621b025b4f0b09846e9d4410d2630394470 -r f78de6cddd0f240aede5f02911379db7102981e2 lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -154,12 +154,24 @@
stored_list_grid = HistoryDatasetAssociationListGrid()
+ def _get_job_for_dataset( self, trans, dataset_id ):
+ '''
+ Return the job for the given dataset. This will throw an error if the
+ dataset is either nonexistent or inaccessible to the user. This looks
+ up the job by mapping the dataset to an HDA and then mapping the HDA
+ to its job. This will throw exceptions so that the caller can determine
+ the appropriate response.
+ '''
+ hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
+ assert hda and trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset )
+ return hda.creating_job_associations[0].job
+
@web.expose
def errors( self, trans, id ):
hda = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id )
return trans.fill_template( "dataset/errors.mako", hda=hda )
@web.expose
- def stdout( self, trans, dataset_id=None, **kwargs ):
+ def stdoutX( self, trans, dataset_id=None, **kwargs ):
trans.response.set_content_type( 'text/plain' )
try:
hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
@@ -168,16 +180,40 @@
except:
return "Invalid dataset ID or you are not allowed to access this dataset"
return job.stdout
+
@web.expose
+ def stdout( self, trans, dataset_id=None, **kwargs ):
+ trans.response.set_content_type( 'text/plain' )
+ stdout = ""
+ try:
+ job = self._get_job_for_dataset( trans, dataset_id )
+ stdout = job.stdout
+ except:
+ stdout = "Invalid dataset ID or you are not allowed to access this dataset"
+ return stdout
+
+ @web.expose
+ # TODO: Migrate stderr and stdout to use _get_job_for_dataset; it wasn't tested.
def stderr( self, trans, dataset_id=None, **kwargs ):
trans.response.set_content_type( 'text/plain' )
+ stderr = ""
try:
- hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
- assert hda and trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset )
- job = hda.creating_job_associations[0].job
+ job = self._get_job_for_dataset( trans, dataset_id )
+ stderr = job.stderr
except:
- return "Invalid dataset ID or you are not allowed to access this dataset"
- return job.stderr
+ stderr = "Invalid dataset ID or you are not allowed to access this dataset"
+ return stderr
+
+ @web.expose
+ def exit_code( self, trans, dataset_id=None, **kwargs ):
+ trans.response.set_content_type( 'text/plain' )
+ exit_code = ""
+ try:
+ job = _get_job_for_dataset( dataset_id )
+ exit_code = job.exit_code
+ except:
+ exit_code = "Invalid dataset ID or you are not allowed to access this dataset"
+ return exit_code
@web.expose
def report_error( self, trans, id, email='', message="" ):
smtp_server = trans.app.config.smtp_server
@@ -986,7 +1022,7 @@
pass
inherit_chain = source_dataset_chain(hda, [])
- return trans.fill_template( "show_params.mako", inherit_chain=inherit_chain, history=trans.get_history(), hda=hda, tool=tool, params_objects=params_objects )
+ return trans.fill_template( "show_params.mako", inherit_chain=inherit_chain, history=trans.get_history(), hda=hda, job=job, tool=tool, params_objects=params_objects )
@web.expose
def copy_datasets( self, trans, source_history=None, source_dataset_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ):
diff -r e4e1c621b025b4f0b09846e9d4410d2630394470 -r f78de6cddd0f240aede5f02911379db7102981e2 templates/show_params.mako
--- a/templates/show_params.mako
+++ b/templates/show_params.mako
@@ -60,6 +60,7 @@
<tr><td>Tool Version:</td><td>${hda.tool_version}</td></tr><tr><td>Tool Standard Output:</td><td><a href="${h.url_for( controller='dataset', action='stdout')}">stdout</a></td></tr><tr><td>Tool Standard Error:</td><td><a href="${h.url_for( controller='dataset', action='stderr')}">stderr</a></td></tr>
+ <tr><td>Tool Exit Code:</td><td>${job.exit_code}</td></tr>
%if trans.user_is_admin() or trans.app.config.expose_dataset_path:
<tr><td>Full Path:</td><td>${hda.file_name}</td></tr>
%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Only include the username in the job name when not running as a system user.
by Bitbucket 03 Oct '12
by Bitbucket 03 Oct '12
03 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e4e1c621b025/
changeset: e4e1c621b025
user: natefoo
date: 2012-10-03 19:43:38
summary: Only include the username in the job name when not running as a system user.
affected #: 1 file
diff -r f16e60e79638a9ebe18f9daedb3d82ce405a1986 -r e4e1c621b025b4f0b09846e9d4410d2630394470 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -184,7 +184,9 @@
ofile = "%s.drmout" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
efile = "%s.drmerr" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
ecfile = "%s.drmec" % os.path.join(job_wrapper.working_directory, job_wrapper.get_id_tag())
- job_name = "g%s_%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id, job_wrapper.user )
+ job_name = "g%s_%s" % ( job_wrapper.job_id, job_wrapper.tool.id )
+ if self.external_runJob_script is None:
+ job_name = "%s_%s" % ( job_name, job_wrapper.user )
job_name = ''.join( map( lambda x: x if x in ( string.letters + string.digits + '_' ) else '_', job_name ) )
jt = self.ds.createJobTemplate()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Fix reporting in the set_user_disk_usage.py script.
by Bitbucket 03 Oct '12
by Bitbucket 03 Oct '12
03 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f16e60e79638/
changeset: f16e60e79638
user: natefoo
date: 2012-10-03 19:40:54
summary: Fix reporting in the set_user_disk_usage.py script.
affected #: 1 file
diff -r 82ef383af0e1135d300afe67aa767f73a3a781ae -r f16e60e79638a9ebe18f9daedb3d82ce405a1986 scripts/set_user_disk_usage.py
--- a/scripts/set_user_disk_usage.py
+++ b/scripts/set_user_disk_usage.py
@@ -90,10 +90,10 @@
if new in ( current, None ):
print 'none'
else:
- op = '-'
if new > current:
- op = '+'
- print '%s%s' % ( op, nice_size( new ) )
+ print '+%s' % ( nice_size( new - current ) )
+ else:
+ print '-%s' % ( nice_size( current - new ) )
if not options.dryrun and engine != 'postgres':
user.set_disk_usage( new )
sa_session.add( user )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/82ef383af0e1/
changeset: 82ef383af0e1
user: natefoo
date: 2012-10-03 19:05:35
summary: Remove BWA from Main tool_conf.
affected #: 1 file
diff -r 62fbfc687692dc173a9db4c9f6b86f1691afb281 -r 82ef383af0e1135d300afe67aa767f73a3a781ae tool_conf.xml.main
--- a/tool_conf.xml.main
+++ b/tool_conf.xml.main
@@ -257,14 +257,12 @@
<section name="NGS: Mapping" id="ngs_mapping"><label text="Illumina" id="illumina"/><tool file="sr_mapping/bowtie_wrapper.xml" />
- <tool file="sr_mapping/bwa_wrapper.xml" /><label text="Roche-454" id="roche_454"/><tool file="sr_mapping/lastz_wrapper.xml" /><tool file="metag_tools/megablast_wrapper.xml" /><tool file="metag_tools/megablast_xml_parser.xml" /><label text="AB-SOLiD" id="ab_solid"/><tool file="sr_mapping/bowtie_color_wrapper.xml" />
- <tool file="sr_mapping/bwa_color_wrapper.xml" /></section><section name="NGS: SAM Tools" id="samtools"><tool file="samtools/sam_bitwise_flag_filter.xml" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Fix passing of job name when running jobs as a system user.
by Bitbucket 03 Oct '12
by Bitbucket 03 Oct '12
03 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/62fbfc687692/
changeset: 62fbfc687692
user: natefoo
date: 2012-10-03 17:51:07
summary: Fix passing of job name when running jobs as a system user.
affected #: 2 files
diff -r 5b6ed441a320a133a6931fab43227f40321a53b0 -r 62fbfc687692dc173a9db4c9f6b86f1691afb281 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -68,7 +68,7 @@
return inspect.currentframe().f_back.f_code.co_filename
DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification',
- 'name','email','project' ]
+ 'jobName','email','project' ]
class DRMAAJobState( object ):
def __init__( self ):
diff -r 5b6ed441a320a133a6931fab43227f40321a53b0 -r 62fbfc687692dc173a9db4c9f6b86f1691afb281 scripts/drmaa_external_runner.py
--- a/scripts/drmaa_external_runner.py
+++ b/scripts/drmaa_external_runner.py
@@ -25,7 +25,7 @@
import drmaa
DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification',
- 'name','email','project' ]
+ 'jobName','email','project' ]
def load_job_template_from_file(jt, filename):
f = open(filename,'r')
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/74c2c49d5180/
changeset: 74c2c49d5180
user: jmchilton
date: 2012-09-20 06:41:22
summary: Allow tool shed upload URLs that start with hg:// or hgs://. For URLs
of this form, replace hg with http (or hgs with https) and copy
external mercurial repository contents into tool shed repository
essentially as if it were a tar file.
The tool shed doesn't display a mercurial URL to push to until an
initial upload has occurred and pasting in bitbucket download URLs
(e.g. https://bitbucket.org/<user>/<repo>/get/<rev>.tar.gz) result in
an extra directory being added to the top level of the tool shed
repository. Hence this mechanism enables the rapid creation of a tool
shed repository from an existing bitbucket repository.
affected #: 1 file
diff -r be1f6ebb8d6bd3864e5ca08c59f8edaa4f471004 -r 74c2c49d51808d0f3f8a4b41b385be2d38245bc3 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -42,11 +42,19 @@
# receive them. One scenario occurs when the first change set is produced for the repository.
# See the handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new
+ uploaded_directory = None
if params.get( 'upload_button', False ):
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
uploaded_file = None
+ elif url and url.startswith("hg"):
+ # Use mercurial clone to fetch repository, contents will then
+ # be copied over.
+ uploaded_directory = tempfile.mkdtemp()
+ repo_url = "http%s" % url[len("hg"):]
+ repo_url = repo_url.encode('ascii', 'replace')
+ commands.clone(get_configured_ui(), repo_url, uploaded_directory)
elif url:
valid_url = True
try:
@@ -72,29 +80,34 @@
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
- if uploaded_file:
+ if uploaded_file or uploaded_directory:
+ ok = True
isgzip = False
isbz2 = False
- if uncompress_file:
- isgzip = is_gzip( uploaded_file_name )
- if not isgzip:
- isbz2 = is_bz2( uploaded_file_name )
- ok = True
- if isempty:
- tar = None
- istar = False
- else:
- # Determine what we have - a single file or an archive
- try:
- if ( isgzip or isbz2 ) and uncompress_file:
- # Open for reading with transparent compression.
- tar = tarfile.open( uploaded_file_name, 'r:*' )
- else:
- tar = tarfile.open( uploaded_file_name )
- istar = True
- except tarfile.ReadError, e:
+ if uploaded_file:
+
+ if uncompress_file:
+ isgzip = is_gzip( uploaded_file_name )
+ if not isgzip:
+ isbz2 = is_bz2( uploaded_file_name )
+ if isempty:
tar = None
istar = False
+ else:
+ # Determine what we have - a single file or an archive
+ try:
+ if ( isgzip or isbz2 ) and uncompress_file:
+ # Open for reading with transparent compression.
+ tar = tarfile.open( uploaded_file_name, 'r:*' )
+ else:
+ tar = tarfile.open( uploaded_file_name )
+ istar = True
+ except tarfile.ReadError, e:
+ tar = None
+ istar = False
+ else:
+ # Uploaded directory
+ istar = False
if istar:
ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_tar( trans,
repository,
@@ -104,6 +117,14 @@
remove_repo_files_not_in_tar,
commit_message,
new_repo_alert )
+ elif uploaded_directory:
+ ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_directory( trans,
+ repository,
+ uploaded_directory,
+ upload_point,
+ remove_repo_files_not_in_tar,
+ commit_message,
+ new_repo_alert )
else:
if ( isgzip or isbz2 ) and uncompress_file:
uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
@@ -144,7 +165,13 @@
uncompress_str = ' uncompressed and '
else:
uncompress_str = ' '
- message = "The file '%s' has been successfully%suploaded to the repository. " % ( uploaded_file_filename, uncompress_str )
+ if uploaded_directory:
+ source_type = "repository"
+ source = url
+ else:
+ source_type = "file"
+ source = uploaded_file_filename
+ message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str )
if istar and ( undesirable_dirs_removed or undesirable_files_removed ):
items_removed = undesirable_dirs_removed + undesirable_files_removed
message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed
@@ -177,19 +204,54 @@
remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
message=message,
status=status )
+ def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ undesirable_dirs_removed = 0
+ undesirable_files_removed = 0
+
+ if upload_point is not None:
+ full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
+ else:
+ full_path = os.path.abspath( repo_dir )
+
+ filenames_in_archive = []
+ for root, dirs, files in os.walk( uploaded_directory ):
+ for uploaded_file in files:
+ relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file))
+ ok = os.path.basename( uploaded_file ) not in undesirable_files
+ if ok:
+ for file_path_item in relative_path.split( '/' ):
+ if file_path_item in undesirable_dirs:
+ undesirable_dirs_removed += 1
+ ok = False
+ break
+ else:
+ undesirable_files_removed += 1
+ if ok:
+ repo_path = os.path.join(full_path, relative_path)
+ repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir))
+ if not os.path.exists(repo_basedir):
+ os.makedirs(repo_basedir)
+ if os.path.exists(repo_path):
+ if os.path.isdir(repo_path):
+ shutil.rmtree(repo_path)
+ else:
+ os.remove(repo_path)
+ shutil.move(os.path.join(uploaded_directory, relative_path), repo_path)
+ filenames_in_archive.append( relative_path )
+ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- files_to_remove = []
- content_alert_str = ''
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, message = self.__check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
- return ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ return ok, message, [], '', undesirable_dirs_removed, undesirable_files_removed
else:
if upload_point is not None:
full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
@@ -208,70 +270,76 @@
undesirable_files_removed += 1
if ok:
filenames_in_archive.append( tarinfo_obj.name )
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
# Extract the uploaded tar to the load_point within the repository hierarchy.
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- if remove_repo_files_not_in_tar and not repository.is_new:
- # We have a repository that is not new (it contains files), so discover
- # those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.__check_file_content( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
+ def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new:
+ # We have a repository that is not new (it contains files), so discover
+ # those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = check_file_contents( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += self.__check_file_content( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
self.__handle_gzip( repository, uploaded_file_name )
https://bitbucket.org/galaxy/galaxy-central/changeset/5b6ed441a320/
changeset: 5b6ed441a320
user: greg
date: 2012-10-03 16:29:09
summary: Merged in jmchilton/galaxy-central-tool-shed-hg-urls (pull request #69)
affected #: 1 file
diff -r ae683c38bdf2aa61403dc220e91561945a855158 -r 5b6ed441a320a133a6931fab43227f40321a53b0 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -42,11 +42,19 @@
# receive them. One scenario occurs when the first change set is produced for the repository.
# See the handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new
+ uploaded_directory = None
if params.get( 'upload_button', False ):
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
uploaded_file = None
+ elif url and url.startswith("hg"):
+ # Use mercurial clone to fetch repository, contents will then
+ # be copied over.
+ uploaded_directory = tempfile.mkdtemp()
+ repo_url = "http%s" % url[len("hg"):]
+ repo_url = repo_url.encode('ascii', 'replace')
+ commands.clone(get_configured_ui(), repo_url, uploaded_directory)
elif url:
valid_url = True
try:
@@ -72,29 +80,34 @@
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
- if uploaded_file:
+ if uploaded_file or uploaded_directory:
+ ok = True
isgzip = False
isbz2 = False
- if uncompress_file:
- isgzip = is_gzip( uploaded_file_name )
- if not isgzip:
- isbz2 = is_bz2( uploaded_file_name )
- ok = True
- if isempty:
- tar = None
- istar = False
- else:
- # Determine what we have - a single file or an archive
- try:
- if ( isgzip or isbz2 ) and uncompress_file:
- # Open for reading with transparent compression.
- tar = tarfile.open( uploaded_file_name, 'r:*' )
- else:
- tar = tarfile.open( uploaded_file_name )
- istar = True
- except tarfile.ReadError, e:
+ if uploaded_file:
+
+ if uncompress_file:
+ isgzip = is_gzip( uploaded_file_name )
+ if not isgzip:
+ isbz2 = is_bz2( uploaded_file_name )
+ if isempty:
tar = None
istar = False
+ else:
+ # Determine what we have - a single file or an archive
+ try:
+ if ( isgzip or isbz2 ) and uncompress_file:
+ # Open for reading with transparent compression.
+ tar = tarfile.open( uploaded_file_name, 'r:*' )
+ else:
+ tar = tarfile.open( uploaded_file_name )
+ istar = True
+ except tarfile.ReadError, e:
+ tar = None
+ istar = False
+ else:
+ # Uploaded directory
+ istar = False
if istar:
ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_tar( trans,
repository,
@@ -104,6 +117,14 @@
remove_repo_files_not_in_tar,
commit_message,
new_repo_alert )
+ elif uploaded_directory:
+ ok,message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = self.upload_directory( trans,
+ repository,
+ uploaded_directory,
+ upload_point,
+ remove_repo_files_not_in_tar,
+ commit_message,
+ new_repo_alert )
else:
if ( isgzip or isbz2 ) and uncompress_file:
uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
@@ -144,7 +165,13 @@
uncompress_str = ' uncompressed and '
else:
uncompress_str = ' '
- message = "The file '%s' has been successfully%suploaded to the repository. " % ( uploaded_file_filename, uncompress_str )
+ if uploaded_directory:
+ source_type = "repository"
+ source = url
+ else:
+ source_type = "file"
+ source = uploaded_file_filename
+ message = "The %s '%s' has been successfully%suploaded to the repository. " % ( source_type, source, uncompress_str )
if istar and ( undesirable_dirs_removed or undesirable_files_removed ):
items_removed = undesirable_dirs_removed + undesirable_files_removed
message += " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) were removed from the archive. " % items_removed
@@ -176,19 +203,54 @@
remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
message=message,
status=status )
+ def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ undesirable_dirs_removed = 0
+ undesirable_files_removed = 0
+
+ if upload_point is not None:
+ full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
+ else:
+ full_path = os.path.abspath( repo_dir )
+
+ filenames_in_archive = []
+ for root, dirs, files in os.walk( uploaded_directory ):
+ for uploaded_file in files:
+ relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file))
+ ok = os.path.basename( uploaded_file ) not in undesirable_files
+ if ok:
+ for file_path_item in relative_path.split( '/' ):
+ if file_path_item in undesirable_dirs:
+ undesirable_dirs_removed += 1
+ ok = False
+ break
+ else:
+ undesirable_files_removed += 1
+ if ok:
+ repo_path = os.path.join(full_path, relative_path)
+ repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir))
+ if not os.path.exists(repo_basedir):
+ os.makedirs(repo_basedir)
+ if os.path.exists(repo_path):
+ if os.path.isdir(repo_path):
+ shutil.rmtree(repo_path)
+ else:
+ os.remove(repo_path)
+ shutil.move(os.path.join(uploaded_directory, relative_path), repo_path)
+ filenames_in_archive.append( relative_path )
+ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- files_to_remove = []
- content_alert_str = ''
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, message = self.__check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
- return ok, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ return ok, message, [], '', undesirable_dirs_removed, undesirable_files_removed
else:
if upload_point is not None:
full_path = os.path.abspath( os.path.join( repo_dir, upload_point ) )
@@ -207,70 +269,76 @@
undesirable_files_removed += 1
if ok:
filenames_in_archive.append( tarinfo_obj.name )
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
# Extract the uploaded tar to the load_point within the repository hierarchy.
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- if remove_repo_files_not_in_tar and not repository.is_new:
- # We have a repository that is not new (it contains files), so discover
- # those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = check_file_contents( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.__check_file_content( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
+ def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new:
+ # We have a repository that is not new (it contains files), so discover
+ # those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = check_file_contents( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += self.__check_file_content( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
self.__handle_gzip( repository, uploaded_file_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0