galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: jgoecks: Create and use mixin method for getting dataset's genome data.
by Bitbucket 10 Oct '12
by Bitbucket 10 Oct '12
10 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/220b6683e8da/
changeset: 220b6683e8da
user: jgoecks
date: 2012-10-10 05:49:01
summary: Create and use mixin method for getting dataset's genome data.
affected #: 4 files
diff -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 -r 220b6683e8dad3028939ff6e5a7acd76d9e994b1 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -186,7 +186,6 @@
chrom_data = self.get_data( chrom, 0, chrom_len, **kwargs )
if chrom_data:
chrom_data[ 'region' ] = "%s:%i-%i" % ( chrom, 0, chrom_len )
- chrom_data[ 'dataset_type' ] = self.dataset_type
genome_data.append( chrom_data )
return {
diff -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 -r 220b6683e8dad3028939ff6e5a7acd76d9e994b1 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -602,6 +602,33 @@
return visualization
+ def _get_genome_data( self, trans, dataset, dbkey=None ):
+ """
+ Returns genome-wide data for dataset if available; if not, message is returned.
+ """
+ rval = None
+
+ # Get data sources.
+ data_sources = dataset.get_datasources( trans )
+ query_dbkey = dataset.dbkey
+ if query_dbkey == "?":
+ query_dbkey = dbkey
+ chroms_info = self.app.genomes.chroms( trans, dbkey=query_dbkey )
+
+ # If there are no messages (messages indicate data is not ready/available), preload data.
+ messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
+ message = get_highest_priority_msg( messages_list )
+ if message:
+ rval = message
+ else:
+ data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+ original_dataset=dataset,
+ source='index' )
+ # HACK: pass in additional params, which are only used for summary tree data, not BBI data.
+ rval = data_provider.get_genome_data( chroms_info, level=4, detail_cutoff=0, draw_cutoff=0 )
+
+ return rval
+
class UsesStoredWorkflowMixin( SharableItemSecurityMixin ):
""" Mixin for controllers that use StoredWorkflow objects. """
def get_stored_workflow( self, trans, id, check_ownership=True, check_accessible=False ):
diff -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 -r 220b6683e8dad3028939ff6e5a7acd76d9e994b1 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -51,6 +51,8 @@
rval = self._raw_data( trans, dataset, **kwd )
elif data_type == 'track_config':
rval = self.get_new_track_config( trans, dataset )
+ elif data_type == 'genome_data':
+ rval = self._get_genome_data( self, trans, dataset, kwd.get('dbkey', None) )
else:
# Default: return dataset as API value.
rval = dataset.get_api_value()
diff -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 -r 220b6683e8dad3028939ff6e5a7acd76d9e994b1 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -772,18 +772,10 @@
# Add genome-wide summary tree data to each track in viz.
tracks = viz_config.get( 'tracks', [] )
for track in tracks:
- # Get dataset and data sources.
dataset = self.get_hda_or_ldda( trans, track[ 'hda_ldda'], track[ 'dataset_id' ] )
- data_sources = dataset.get_datasources( trans )
-
- # If there are no messages (messages indicate data is not ready/available), preload data.
- messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
- if not get_highest_priority_msg( messages_list ):
- data_provider = trans.app.data_provider_registry.get_data_provider( trans,
- original_dataset=dataset,
- source='index' )
- # HACK: pass in additional params, which are only used for summary tree data, not BBI data.
- track[ 'preloaded_data' ] = data_provider.get_genome_data( chroms_info, level=4, detail_cutoff=0, draw_cutoff=0 )
+ genome_data = self._get_genome_data( trans, dataset, dbkey )
+ if not isinstance( genome_data, str ):
+ track[ 'preloaded_data' ] = genome_data
return trans.fill_template( 'visualization/circster.mako', viz_config=viz_config, genome=genome )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: carlfeberhard: history.js: fix to HistoryView tagging functionality
by Bitbucket 09 Oct '12
by Bitbucket 09 Oct '12
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3231b9ca8d82/
changeset: 3231b9ca8d82
user: carlfeberhard
date: 2012-10-10 01:10:30
summary: history.js: fix to HistoryView tagging functionality
affected #: 4 files
diff -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 static/scripts/mvc/history.js
--- a/static/scripts/mvc/history.js
+++ b/static/scripts/mvc/history.js
@@ -558,6 +558,14 @@
parent.append( this._render_displayApps() );
parent.append( this._render_peek() );
+
+ //TODO??: still needed?
+ //// If Mozilla, hide scrollbars in hidden items since they cause animation bugs
+ //if ( $.browser.mozilla ) {
+ // $( "div.historyItemBody" ).each( function() {
+ // if ( !$(this).is(":visible") ) { $(this).find( "pre.peek" ).css( "overflow", "hidden" ); }
+ // });
+ //}
},
_render_body : function(){
@@ -704,7 +712,13 @@
toggleBodyVisibility : function( visible ){
var $body = this.$el.find( '.historyItemBody' );
- $body.toggle();
+ if( visible === undefined ){
+ $body.toggle();
+ } else if( visible ){
+ $body.show();
+ } else {
+ $body.hide();
+ }
this.trigger( 'toggleBodyVisibility', this.model.get( 'id' ), $body.is( ':visible' ) );
},
@@ -715,9 +729,7 @@
}
});
-
//------------------------------------------------------------------------------
-//HistoryItemView.templates = InDomTemplateLoader.getTemplates({
HistoryItemView.templates = {
warningMsg : Handlebars.templates[ 'template-warningmessagesmall' ],
@@ -875,7 +887,6 @@
//------------------------------------------------------------------------------
// view for the HistoryCollection (as per current right hand panel)
-//var HistoryView = BaseView.extend( LoggableMixin ).extend( UsesStorageMixin ) .extend({
var HistoryView = BaseView.extend( LoggableMixin ).extend({
// uncomment this out see log messages
@@ -894,16 +905,20 @@
);
// set up the individual history items/datasets
this.initializeItems();
+
},
initializeItems : function(){
this.itemViews = {};
var historyPanel = this;
+
+ // set up a view for each item, init with model and listeners, cache to map ( model.id : view )
this.model.items.each( function( item ){
var itemId = item.get( 'id' ),
+ visible = historyPanel.storage.get( 'visibleItems' ).get( itemId ),
itemView = new HistoryItemView({
- model: item, visible:
- historyPanel.storage.get( 'visibleItems' ).get( itemId )
+ model: item,
+ visible: visible
});
historyPanel.setUpItemListeners( itemView );
historyPanel.itemViews[ itemId ] = itemView;
@@ -912,6 +927,7 @@
setUpItemListeners : function( itemView ){
var HistoryPanel = this;
+
// use storage to maintain a list of items whose bodies are visible
itemView.bind( 'toggleBodyVisibility', function( id, visible ){
if( visible ){
@@ -930,7 +946,34 @@
this.itemsDiv = this.$el.find( '#' + this.model.get( 'id' ) + '-datasets' );
//TODO: set up widgets, tooltips, etc.
+ async_save_text(
+ "history-name-container",
+ "history-name",
+ this.model.get( 'renameURL' ),
+ "new_name",
+ 18
+ );
+ this.$el.find( '.tooltip' ).tooltip();
+ var historyAnnotationArea = this.$el.find( '#history-annotation-area' );
+ $( '#history-annotate' ).click( function() {
+ if ( historyAnnotationArea.is( ":hidden" ) ) {
+ historyAnnotationArea.slideDown( "fast" );
+ } else {
+ historyAnnotationArea.slideUp( "fast" );
+ }
+ return false;
+ });
+ async_save_text(
+ "history-annotation-container",
+ "history-annotation",
+ this.model.get( 'annotateURL' ),
+ "new_annotation",
+ 18,
+ true,
+ 4
+ );
+
if( this.model.items.length ){
// render to temp, move all at once, remove temp holder
var tempDiv = this._render_items();
@@ -950,6 +993,53 @@
return div;
},
+ events : {
+ 'click #history-collapse-all' : 'hideAllItemBodies',
+ 'click #history-tag' : 'loadAndDisplayTags'
+ },
+
+ hideAllItemBodies : function(){
+ _.each( this.itemViews, function( item ){
+ item.toggleBodyVisibility( false );
+ });
+ },
+
+ loadAndDisplayTags : function( event ){
+ //BUG: broken with latest
+ //TODO: this is a drop in from history.mako - should use MV as well
+ this.log( this + '.loadAndDisplayTags', event );
+ var tagArea = this.$el.find( '#history-tag-area' ),
+ tagElt = tagArea.find( '.tag-elt' );
+ this.log( '\t tagArea', tagArea, ' tagElt', tagElt );
+
+ // Show or hide tag area; if showing tag area and it's empty, fill it.
+ if( tagArea.is( ":hidden" ) ){
+ if( !jQuery.trim( tagElt.html() ) ){
+ var view = this;
+ // Need to fill tag element.
+ $.ajax({
+ //TODO: the html from this breaks a couple of times
+ url: this.model.get( 'tagURL' ),
+ error: function() { alert( "Tagging failed" ); },
+ success: function(tag_elt_html) {
+ view.log( view + ' tag elt html (ajax)', tag_elt_html );
+ tagElt.html(tag_elt_html);
+ tagElt.find(".tooltip").tooltip();
+ tagArea.slideDown("fast");
+ }
+ });
+ } else {
+ // Tag element is filled; show.
+ tagArea.slideDown("fast");
+ }
+
+ } else {
+ // Hide.
+ tagArea.slideUp("fast");
+ }
+ return false;
+ },
+
toString : function(){
var nameString = this.model.get( 'name' ) || '';
return 'HistoryView(' + nameString + ')';
diff -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 static/scripts/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/templates/compiled/template-history-historyPanel.js
@@ -23,21 +23,13 @@
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(6, program6, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n class=\"icon-button tags tooltip\" target=\"galaxy_main\" href=\"";
- foundHelper = helpers.tagURL;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{}}); }
- else { stack1 = depth0.tagURL; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- buffer += escapeExpression(stack1) + "\"></a>\n <a id=\"history-annotate\" title=\"";
+ buffer += "\"\n class=\"icon-button tags tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n <a id=\"history-annotate\" title=\"";
foundHelper = helpers.local;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(8, program8, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(8, program8, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n class=\"icon-button annotate tooltip\" target=\"galaxy_main\" href=\"";
- foundHelper = helpers.annotateURL;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{}}); }
- else { stack1 = depth0.annotateURL; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- buffer += escapeExpression(stack1) + "\"></a>\n </div>\n ";
+ buffer += "\"\n class=\"icon-button annotate tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n </div>\n ";
return buffer;}
function program6(depth0,data) {
@@ -143,9 +135,9 @@
var buffer = "", stack1;
buffer += "\n";
- buffer += "\n<div style=\"margin: 0px 5px 10px 5px\">\n\n <div id=\"history-tag-area\" style=\"display: none\">\n <b>Tags:</b>\n ";
+ buffer += "\n<div style=\"margin: 0px 5px 10px 5px\">\n\n <div id=\"history-tag-area\" style=\"display: none\">\n ";
buffer += "\n ";
- buffer += "\n </div>\n\n <div id=\"history-annotation-area\" style=\"display: none\">\n <strong>Annotation / Notes:</strong>\n <div id=\"history-annotation-container\">\n <div id=\"history-annotation\" class=\"tooltip editable-text\" title=\"Click to edit annotation\">\n ";
+ buffer += "\n <strong>Tags:</strong>\n <div class=\"tag-elt\"></div>\n </div>\n\n <div id=\"history-annotation-area\" style=\"display: none\">\n <strong>Annotation / Notes:</strong>\n <div id=\"history-annotation-container\">\n <div id=\"history-annotation\" class=\"tooltip editable-text\" title=\"Click to edit annotation\">\n ";
stack1 = depth0.annotation;
stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.program(27, program27, data),fn:self.program(25, program25, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
@@ -228,7 +220,7 @@
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(3, program3, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "' class='icon-button toggle tooltip' href='#' style=\"display: none\"></a>\n ";
+ buffer += "' id=\"history-collapse-all\"\n class='icon-button toggle tooltip' href='javascript:void(0);'></a>\n ";
stack1 = depth0.userRoles;
stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(5, program5, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
@@ -252,7 +244,6 @@
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n\n";
buffer += "\n";
- buffer += "\n";
stack1 = depth0.userRoles;
stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(24, program24, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
diff -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 static/scripts/templates/history-templates.html
--- a/static/scripts/templates/history-templates.html
+++ b/static/scripts/templates/history-templates.html
@@ -103,13 +103,14 @@
<script type="text/template" class="template-history" id="template-history-historyPanel"><div id="top-links" class="historyLinks"><a title="{{#local}}refresh{{/local}}" class="icon-button arrow-circle tooltip" href="{{baseURL}}"></a>
- <a title='{{#local}}collapse all{{/local}}' class='icon-button toggle tooltip' href='#' style="display: none"></a>
+ <a title='{{#local}}collapse all{{/local}}' id="history-collapse-all"
+ class='icon-button toggle tooltip' href='javascript:void(0);'></a>
{{#if userRoles}}
<div style="width: 40px; float: right; white-space: nowrap;"><a id="history-tag" title="{{#local}}Edit history tags{{/local}}"
- class="icon-button tags tooltip" target="galaxy_main" href="{{tagURL}}"></a>
+ class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a><a id="history-annotate" title="{{#local}}Edit history annotation{{/local}}"
- class="icon-button annotate tooltip" target="galaxy_main" href="{{annotateURL}}"></a>
+ class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a></div>
{{/if}}
</div>
@@ -148,15 +149,15 @@
{{/if}}
{{! tags and annotations }}
-{{! TODO: wire these to js events }}
{{#if userRoles}}
{{! TODO: move inline styles out }}
<div style="margin: 0px 5px 10px 5px"><div id="history-tag-area" style="display: none">
- <b>Tags:</b>
{{! load via js render_individual_tagging_element }}
{{! render_individual_tagging_element(user=trans.get_user(), tagged_item=history, elt_context="history.mako", use_toggle_link=False, input_size="20") }}
+ <strong>Tags:</strong>
+ <div class="tag-elt"></div></div><div id="history-annotation-area" style="display: none">
@@ -202,6 +203,3 @@
</div>
{{/if}}
</script>
-
-
-
diff -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d -r 3231b9ca8d82219cbef87cfeb4fc57013c11f3c5 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -291,9 +291,10 @@
##TODO: move to API
for_editing = True
+ encoded_history_id = trans.security.encode_id( history.id )
context_dict = {
'history' : {
- 'id' : trans.security.encode_id( history.id ),
+ 'id' : encoded_history_id,
'name' : history.name,
'status' : status,
@@ -324,11 +325,13 @@
'annotation' : h.to_unicode( annotation ),
##TODO: broken
- 'baseURL' : h.url_for( 'history', show_deleted=show_deleted ),
- 'hideDeletedURL' : h.url_for( 'history', show_deleted=False ),
- 'hideHiddenURL' : h.url_for( 'history', show_hidden=False ),
- 'tagURL' : h.url_for( controller='history', action='tag' ),
- 'annotateURL' : h.url_for( controller='history', action='annotate' )
+ 'baseURL' : h.url_for( controller="/history", show_deleted=show_deleted ),
+ 'hideDeletedURL' : h.url_for( controller="/history", show_deleted=False ),
+ 'hideHiddenURL' : h.url_for( controller="/history", show_hidden=False ),
+ 'renameURL' : h.url_for( controller="/history", action="rename_async", id=encoded_history_id ),
+ 'tagURL' : h.url_for( controller='tag', action='get_tagging_elt_async',
+ item_class=history.__class__.__name__, item_id=encoded_history_id ),
+ 'annotateURL' : h.url_for( controller="/history", action="annotate_async", id=encoded_history_id )
},
'hdas' : [ prep_hda( hda, for_editing ) for hda in datasets ],
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9635e2cc9a24/
changeset: 9635e2cc9a24
user: jgoecks
date: 2012-10-09 18:33:02
summary: Trackster: accurately calculate track/tile heights by clearing previously set height before calculating new height.
affected #: 1 file
diff -r 79bb2432133f9b18450e142fc6b80ddab364b57e -r 9635e2cc9a249aea5d939c0f4ca955c5faa3dd58 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -3020,13 +3020,10 @@
* an existing tile rather than reshowing it.
*/
show_tile: function(tile, parent_element, w_scale) {
- var
- track = this,
+ var track = this,
tile_element = tile.html_elt;
- //
- // Show/move tile element.
- //
+ // -- Show/move tile element. --
tile.predisplay_actions();
@@ -3046,14 +3043,12 @@
// Showing new tile.
parent_element.append(tile_element);
}
+
+ // -- Update track, tile heights based on new tile. --
- track.after_show_tile(tile);
- },
-
- /**
- * Actions to be taken after showing tile.
- */
- after_show_tile: function(tile) {
+ // Clear any previous height settings for tile.
+ tile.html_elt.height('auto');
+
// Update max height based on current tile.
this.max_height_px = Math.max(this.max_height_px, tile.html_elt.height());
https://bitbucket.org/galaxy/galaxy-central/changeset/437b0d9ccb60/
changeset: 437b0d9ccb60
user: jgoecks
date: 2012-10-09 20:16:16
summary: For Circster: only preload data if indexing is complete.
affected #: 1 file
diff -r 9635e2cc9a249aea5d939c0f4ca955c5faa3dd58 -r 437b0d9ccb60141d9378642893ce13bd50187cea lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -772,14 +772,18 @@
# Add genome-wide summary tree data to each track in viz.
tracks = viz_config.get( 'tracks', [] )
for track in tracks:
- # Get dataset and indexed datatype.
+ # Get dataset and data sources.
dataset = self.get_hda_or_ldda( trans, track[ 'hda_ldda'], track[ 'dataset_id' ] )
data_sources = dataset.get_datasources( trans )
- data_provider = trans.app.data_provider_registry.get_data_provider( trans,
- original_dataset=dataset,
- source='index' )
- # HACK: pass in additional params, which are only used for summary tree data, not BBI data.
- track[ 'preloaded_data' ] = data_provider.get_genome_data( chroms_info, level=4, detail_cutoff=0, draw_cutoff=0 )
+
+ # If there are no messages (messages indicate data is not ready/available), preload data.
+ messages_list = [ data_source_dict[ 'message' ] for data_source_dict in data_sources.values() ]
+ if not get_highest_priority_msg( messages_list ):
+ data_provider = trans.app.data_provider_registry.get_data_provider( trans,
+ original_dataset=dataset,
+ source='index' )
+ # HACK: pass in additional params, which are only used for summary tree data, not BBI data.
+ track[ 'preloaded_data' ] = data_provider.get_genome_data( chroms_info, level=4, detail_cutoff=0, draw_cutoff=0 )
return trans.fill_template( 'visualization/circster.mako', viz_config=viz_config, genome=genome )
https://bitbucket.org/galaxy/galaxy-central/changeset/dcb22217c175/
changeset: dcb22217c175
user: jgoecks
date: 2012-10-09 20:16:27
summary: Merge
affected #: 2 files
diff -r 437b0d9ccb60141d9378642893ce13bd50187cea -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -345,13 +345,13 @@
def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd ):
if values is None and context:
- column_range = range( 1, context.columns+1, 1 )
+ column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
values = zip( column_range, column_range )
return RangeParameter.get_html_field( self, value=value, context=context, other_values=other_values, values=values, **kwd )
def get_html( self, value, context={}, other_values={}, values=None, **kwd ):
if values is None and context:
- column_range = range( 1, context.columns+1, 1 )
+ column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
values = zip( column_range, column_range )
return RangeParameter.get_html( self, value, context=context, other_values=other_values, values=values, **kwd )
diff -r 437b0d9ccb60141d9378642893ce13bd50187cea -r dcb22217c17527427c17f4ca3edeb5d6a98f6f4d lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -274,12 +274,18 @@
column_names = dataset.metadata.column_names
elif hasattr(dataset.datatype, 'column_names'):
column_names = dataset.datatype.column_names
+ column_types = dataset.metadata.column_types
+ if not column_types:
+ column_types = []
+ column_number = dataset.metadata.columns
+ if column_number is None:
+ column_number = 'null'
return trans.fill_template( "/dataset/tabular_chunked.mako",
dataset = dataset,
chunk = self.get_chunk(trans, dataset, 0),
- column_number = dataset.metadata.columns,
+ column_number = column_number,
column_names = column_names,
- column_types = dataset.metadata.column_types)
+ column_types = column_types )
def set_peek( self, dataset, line_count=None, is_multi_byte=False):
super(Tabular, self).set_peek( dataset, line_count=line_count, is_multi_byte=is_multi_byte)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Fix for displaying metadata ColumnParameter type when columns is None.
by Bitbucket 09 Oct '12
by Bitbucket 09 Oct '12
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6f017bc991a6/
changeset: 6f017bc991a6
user: dan
date: 2012-10-09 19:57:19
summary: Fix for displaying metadata ColumnParameter type when columns is None.
affected #: 1 file
diff -r 8ce7bbba361a93a1a4a50029a3fd6a6266c1934d -r 6f017bc991a6e4617488768532adb56d49c1a688 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -345,13 +345,13 @@
def get_html_field( self, value=None, context={}, other_values={}, values=None, **kwd ):
if values is None and context:
- column_range = range( 1, context.columns+1, 1 )
+ column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
values = zip( column_range, column_range )
return RangeParameter.get_html_field( self, value=value, context=context, other_values=other_values, values=values, **kwd )
def get_html( self, value, context={}, other_values={}, values=None, **kwd ):
if values is None and context:
- column_range = range( 1, context.columns+1, 1 )
+ column_range = range( 1, ( context.columns or 0 ) + 1, 1 )
values = zip( column_range, column_range )
return RangeParameter.get_html( self, value, context=context, other_values=other_values, values=values, **kwd )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Fix for preview display of tabular items when column_number is None.
by Bitbucket 09 Oct '12
by Bitbucket 09 Oct '12
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8ce7bbba361a/
changeset: 8ce7bbba361a
user: dan
date: 2012-10-09 19:50:47
summary: Fix for preview display of tabular items when column_number is None.
affected #: 1 file
diff -r 259a4e01f105b6f7396292db3ae0971d049b6ce7 -r 8ce7bbba361a93a1a4a50029a3fd6a6266c1934d lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -277,10 +277,13 @@
column_types = dataset.metadata.column_types
if not column_types:
column_types = []
+ column_number = dataset.metadata.columns
+ if column_number is None:
+ column_number = 'null'
return trans.fill_template( "/dataset/tabular_chunked.mako",
dataset = dataset,
chunk = self.get_chunk(trans, dataset, 0),
- column_number = dataset.metadata.columns,
+ column_number = column_number,
column_names = column_names,
column_types = column_types )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Fix for preview display of tabular items when column_types is None.
by Bitbucket 09 Oct '12
by Bitbucket 09 Oct '12
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/259a4e01f105/
changeset: 259a4e01f105
user: dan
date: 2012-10-09 19:46:19
summary: Fix for preview display of tabular items when column_types is None.
affected #: 1 file
diff -r 79bb2432133f9b18450e142fc6b80ddab364b57e -r 259a4e01f105b6f7396292db3ae0971d049b6ce7 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -274,12 +274,15 @@
column_names = dataset.metadata.column_names
elif hasattr(dataset.datatype, 'column_names'):
column_names = dataset.datatype.column_names
+ column_types = dataset.metadata.column_types
+ if not column_types:
+ column_types = []
return trans.fill_template( "/dataset/tabular_chunked.mako",
dataset = dataset,
chunk = self.get_chunk(trans, dataset, 0),
column_number = dataset.metadata.columns,
column_names = column_names,
- column_types = dataset.metadata.column_types)
+ column_types = column_types )
def set_peek( self, dataset, line_count=None, is_multi_byte=False):
super(Tabular, self).set_peek( dataset, line_count=line_count, is_multi_byte=is_multi_byte)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/79bb2432133f/
changeset: 79bb2432133f
user: jgoecks
date: 2012-10-09 17:59:59
summary: Fix typo in datasets API controller.
affected #: 1 file
diff -r 04229b267b3e8dd112d3e66fd6826d2f66404424 -r 79bb2432133f9b18450e142fc6b80ddab364b57e lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -46,7 +46,7 @@
elif data_type == 'data':
rval = self._data( trans, dataset, **kwd )
elif data_type == 'features':
- rval = self._search_features( trans, dataset, kwd.get( 'query ' ) )
+ rval = self._search_features( trans, dataset, kwd.get( 'query' ) )
elif data_type == 'raw_data':
rval = self._raw_data( trans, dataset, **kwd )
elif data_type == 'track_config':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04229b267b3e/
changeset: 04229b267b3e
user: dan
date: 2012-10-09 16:27:54
summary: Some cleanup for shed_util.py
affected #: 1 file
diff -r 429e895ee1b3bfb2fea1632d34a01d54faeac866 -r 04229b267b3e8dd112d3e66fd6826d2f66404424 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -387,8 +387,6 @@
else:
tmp_url = repository_clone_url
return tmp_url
-def tool_shed_from_repository_clone_url( repository_clone_url ):
- return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -1772,42 +1770,6 @@
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
-def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
- shed_conf_dict = repository.get_shed_config_dict( app )
- shed_tool_conf = shed_conf_dict[ 'config_filename' ]
- tool_path = shed_conf_dict[ 'tool_path' ]
-
- #hack for 'trans.app' used in lots of places. These places should just directly use app
- trans = util.bunch.Bunch()
- trans.app = app
-
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
- repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
- tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
- owner = repository.owner
- if not owner:
- owner = get_repository_owner( cleaned_repository_clone_url )
- guid_to_tool_elem_dict = {}
- for tool_config_filename, guid, tool in repository_tools_tups:
- guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
- config_elems = []
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
@@ -2016,6 +1978,8 @@
elif c not in [ '\r' ]:
translated.append( '' )
return ''.join( translated )
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def translate_string( raw_text, to_html=True ):
if raw_text:
if to_html:
@@ -2086,6 +2050,42 @@
sa_session.delete( tool_dependency )
sa_session.flush()
return new_tool_dependency
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/921a612db28a/
changeset: 921a612db28a
user: dan
date: 2012-10-09 16:24:58
summary: Enhance Galaxy's model.ToolShedRepository to handle shed_conf_dict easier.
affected #: 1 file
diff -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 -r 921a612db28a69a33dc1ea8bef5ed20ffa63bba2 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2987,6 +2987,78 @@
return relative_path
return None
@property
+ def tool_shed_path_name( self ):
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
+ def get_tool_relative_path( self, app ):
+ shed_conf_dict = self.get_shed_config_dict( app )
+ tool_path = None
+ relative_path = None
+ if shed_conf_dict:
+ tool_path = shed_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ return tool_path, relative_path
+ def get_shed_config_filename( self ):
+ shed_config_filename = None
+ if self.metadata:
+ shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
+ return shed_config_filename
+ def set_shed_config_filename( self, value ):
+ self.metadata[ 'shed_config_filename' ] = value
+ shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
+ def guess_shed_config( self, app, default=None ):
+ tool_ids = []
+ metadata = self.metadata or {}
+ for tool in metadata.get( 'tools', [] ):
+ tool_ids.append( tool.get( 'guid' ) )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ name = shed_tool_conf_dict[ 'config_filename' ]
+ for elem in shed_tool_conf_dict[ 'config_elems' ]:
+ if elem.tag == 'tool':
+ for sub_elem in elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ elif elem.tag == "section":
+ for tool_elem in elem.findall( 'tool' ):
+ for sub_elem in tool_elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ if self.includes_datatypes:
+ #we need to search by filepaths here, which is less desirable
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ tool_shed = tool_shed_url.rstrip( '/' )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ if os.path.exists( relative_path ):
+ self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
+ return shed_tool_conf_dict
+ #if self.dist_to_shed:
+ # #return ./migrated_tools.xml
+ return default
+ def get_shed_config_dict( self, app, default=None ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+ in the shed_tool_conf_dict.
+ """
+ if not self.shed_config_filename:
+ self.guess_shed_config( app, default=default )
+ if self.shed_config_filename:
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
+ return shed_tool_conf_dict
+ return default
+ @property
def can_install( self ):
return self.status == self.installation_status.NEW
@property
https://bitbucket.org/galaxy/galaxy-central/changeset/0263d37b08e6/
changeset: 0263d37b08e6
user: dan
date: 2012-10-09 16:24:58
summary: Add a helper method to the toolbox to provide a shed_config_dict based upon the provided filename.
affected #: 1 file
diff -r 921a612db28a69a33dc1ea8bef5ed20ffa63bba2 -r 0263d37b08e610e572b5f08940c19a6f19f0931d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -150,6 +150,11 @@
tool_path=tool_path,
config_elems=config_elems )
self.shed_tool_confs.append( shed_tool_conf_dict )
+ def get_shed_config_dict_by_filename( self, filename, default=None ):
+ for shed_config_dict in self.shed_tool_confs:
+ if shed_config_dict[ 'config_filename' ] == filename:
+ return shed_config_dict
+ return default
def __add_tool_to_tool_panel( self, tool_id, panel_component, section=False ):
# See if a version of this tool is already loaded into the tool panel. The value of panel_component
# will be a ToolSection (if the value of section=True) or self.tool_panel (if section=False).
https://bitbucket.org/galaxy/galaxy-central/changeset/97eb5bde48da/
changeset: 97eb5bde48da
user: dan
date: 2012-10-09 16:24:59
summary: Enhance Galaxy's handling of Tool Shed Repositories to work with hierarchical relative paths.
affected #: 3 files
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -33,9 +33,12 @@
root = tree.getroot()
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
+ index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
for repository_elem in root:
self.install_repository( repository_elem, install_dependencies )
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
+ if self.shed_config_dict.get( 'tool_path' ):
+ relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
found = False
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
@@ -122,6 +125,10 @@
def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ):
"""Generate the metadata for the installed tool shed repository, among other things."""
tool_panel_dict_for_display = odict()
+ if self.tool_path:
+ repo_install_dir = os.path.join( self.tool_path, relative_install_dir )
+ else:
+ repo_install_dir = relative_install_dir
for tool_elem in repository_elem:
# The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
tool_config = tool_elem.get( 'file' )
@@ -135,6 +142,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -150,7 +158,7 @@
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files )
+ copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
@@ -163,14 +171,14 @@
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
+ copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Install tool dependencies.
update_tool_shed_repository_status( self.app,
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -195,10 +203,10 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
@@ -224,13 +232,15 @@
description = repository_elem.get( 'description' )
installed_changeset_revision = repository_elem.get( 'changeset_revision' )
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
+ relative_clone_dir = os.path.join( self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
+ clone_dir = os.path.join( self.tool_path, relative_clone_dir )
if self.__isinstalled( clone_dir ):
print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
else:
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( clone_dir, name )
+ relative_install_dir = os.path.join( relative_clone_dir, name )
+ install_dir = os.path.join( clone_dir, name )
ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
tool_shed_repository = create_or_update_tool_shed_repository( app=self.app,
@@ -245,7 +255,7 @@
owner=self.repository_owner,
dist_to_shed=True )
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -387,6 +387,8 @@
else:
tmp_url = repository_clone_url
return tmp_url
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -421,7 +423,7 @@
# Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
if not os.path.exists( os.path.join( dest_path, copied_file ) ):
shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
-def copy_sample_files( app, sample_files, sample_files_copied=None, dest_path=None ):
+def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
are contained in sample_files_copied. The default value for dest_path is ~/tool-data.
@@ -429,6 +431,8 @@
sample_files_copied = util.listify( sample_files_copied )
for filename in sample_files:
if filename not in sample_files_copied:
+ if tool_path:
+ filename=os.path.join( tool_path, filename )
copy_sample_file( app, filename, dest_path=dest_path )
def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
repo_info_dict = {}
@@ -504,6 +508,9 @@
def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
+ shed_config_dict = tool_shed_repository.get_shed_config_dict( app )
+ if shed_config_dict.get( 'tool_path' ):
+ relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
try:
@@ -601,7 +608,7 @@
else:
tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return tool_dependencies_dict
-def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, relative_install_dir=None, repository_files_dir=None,
+def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
resetting_all_metadata_on_repository=False, updating_installed_repository=False ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
@@ -616,7 +623,7 @@
else:
original_repository_metadata = None
readme_file_names = get_readme_file_names( repository.name )
- metadata_dict = {}
+ metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
invalid_file_tups = []
invalid_tool_configs = []
tool_dependencies_config = None
@@ -637,6 +644,8 @@
work_dir = tempfile.mkdtemp()
# All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
app.config.tool_data_path = work_dir
app.config.tool_data_table_config_path = work_dir
# Handle proprietary datatypes, if any.
@@ -645,6 +654,7 @@
metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
# Get the relative path to all sample files included in the repository for storage in the repository's metadata.
sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
+ tool_path=shed_config_dict.get( 'tool_path' ),
relative_install_dir=relative_install_dir,
resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
if sample_file_metadata_paths:
@@ -674,6 +684,8 @@
relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
else:
relative_path_to_readme = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
metadata_dict[ 'readme' ] = relative_path_to_readme
# See if we have a tool config.
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
@@ -711,6 +723,8 @@
relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
else:
relative_path_to_tool_config = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
else:
for tup in invalid_files_and_errors_tups:
@@ -844,13 +858,35 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
+def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
+ if tool_section is not None:
+ tool_elem = SubElement( tool_section, 'tool' )
+ else:
+ tool_elem = Element( 'tool' )
+ tool_elem.attrib[ 'file' ] = tool_file_path
+ tool_elem.attrib[ 'guid' ] = tool.guid
+ tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ id_elem = SubElement( tool_elem, 'id' )
+ id_elem.text = tool.id
+ version_elem = SubElement( tool_elem, 'version' )
+ version_elem.text = tool.version
+ return tool_elem
+
def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ):
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
tool_elem = None
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ cleaned_repository_clone_url = clean_repository_clone_url( repository_clone_url )
if not owner:
- owner = get_repository_owner( tmp_url )
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ tool_shed = cleaned_repository_clone_url.split( 'repos' )[ 0 ].rstrip( '/' )
for guid, tool_section_dicts in tool_panel_dict.items():
for tool_section_dict in tool_section_dicts:
tool_section = None
@@ -874,23 +910,9 @@
if tup_guid == guid:
break
if inside_section:
- tool_elem = SubElement( tool_section, 'tool' )
+ tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section )
else:
- tool_elem = Element( 'tool' )
- tool_elem.attrib[ 'file' ] = tool_file_path
- tool_elem.attrib[ 'guid' ] = guid
- tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
- tool_shed_elem.text = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
- repository_name_elem = SubElement( tool_elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- id_elem = SubElement( tool_elem, 'id' )
- id_elem.text = tool.id
- version_elem = SubElement( tool_elem, 'version' )
- version_elem.text = tool.version
+ tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, None )
if inside_section:
if section_in_elem_list:
elem_list[ index ] = tool_section
@@ -1253,18 +1275,21 @@
return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
repository_tools_tups = []
+ index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
if 'tools' in metadata_dict:
for tool_dict in metadata_dict[ 'tools' ]:
- relative_path = tool_dict.get( 'tool_config', None )
+ load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
+ if shed_conf_dict.get( 'tool_path' ):
+ load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
guid = tool_dict.get( 'guid', None )
if relative_path and guid:
- tool = app.toolbox.load_tool( os.path.abspath( relative_path ), guid=guid )
+ tool = app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
else:
tool = None
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_sample_files_from_disk( repository_files_dir, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
if resetting_all_metadata_on_repository:
# Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
work_dir = repository_files_dir
@@ -1287,6 +1312,9 @@
else:
relative_path_to_sample_file = os.path.join( root, name )
sample_file_copy_paths.append( relative_path_to_sample_file )
+ if tool_path and relative_install_dir:
+ if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
def get_shed_tool_conf_dict( app, shed_tool_conf ):
@@ -1327,46 +1355,16 @@
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
relative_install_dir = None
- for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
- shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
- if repository.dist_to_shed:
- # The repository is owned by devteam and contains tools migrated from the Galaxy distribution to the tool shed, so
- # the reserved tool panel config is migrated_tools_conf.xml, to which app.config.migrated_tools_config refers.
- if shed_tool_conf == app.config.migrated_tools_config:
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- elif repository.uninstalled:
- # Since the repository is uninstalled we don't know what tool panel config was originally used to
- # define the tools in the repository, so we'll just make sure not to use the reserved migrated_tools_conf.xml.
- if shed_tool_conf != app.config.migrated_tools_config:
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- else:
- if repository.includes_tools:
- metadata = repository.metadata
- for tool_dict in metadata[ 'tools' ]:
- # Parse the tool panel config to get the entire set of config_elems. # We'll check config_elems until we
- # find an element that matches one of the tools in the repository's metadata.
- tool_panel_config = shed_tool_conf_dict[ 'config_filename' ]
- tree = util.parse_xml( tool_panel_config )
- root = tree.getroot()
- tool_path, relative_install_dir = get_tool_path_install_dir( partial_install_dir,
- shed_tool_conf_dict,
- tool_dict,
- root )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- else:
- # Nothing will be loaded into the tool panel, so look for the installed repository on disk.
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir and os.path.isdir( relative_install_dir ):
- return shed_tool_conf, tool_path, relative_install_dir
- return None, None, None
+ shed_config_dict = repository.get_shed_config_dict( app )
+ if not shed_config_dict:
+ #just pick a semi-random shed config
+ for shed_config_dict in app.toolbox.shed_tool_confs:
+ if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ relative_install_dir = partial_install_dir
+ return shed_tool_conf, tool_path, relative_install_dir
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -1774,6 +1772,42 @@
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -655,15 +655,17 @@
update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
- relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ clone_dir = os.path.join( tool_path, relative_clone_dir )
+ relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+ install_dir = os.path.join( tool_path, relative_install_dir )
+ cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( relative_install_dir ) )
+ repo = hg.repository( get_configured_ui(), path=os.path.abspath( install_dir ) )
pull_repository( repo, repository_clone_url, current_changeset_revision )
update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
@@ -704,7 +706,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -736,6 +738,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -749,7 +752,7 @@
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files )
+ copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
@@ -762,7 +765,7 @@
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied )
+ copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
repository_clone_url=repository_clone_url,
@@ -805,6 +808,8 @@
repository_id = kwd[ 'id' ]
operation = kwd.get( 'operation', None )
repository = get_repository( trans, repository_id )
+ if not repository:
+ return trans.show_error_message( 'Invalid repository specified.' )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
@@ -822,7 +827,7 @@
description = util.restore_text( params.get( 'description', repository.description ) )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
repo_files_dir = None
if repository.in_error_state:
@@ -1447,20 +1452,27 @@
repository = get_repository( trans, id )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
repository_clone_url = self.__generate_clone_url( trans, repository )
- relative_install_dir = repository.repo_path( trans.app )
+ tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
+ original_metadata_dict = repository.metadata
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
updating_installed_repository=False )
repository.metadata = metadata_dict
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
- status = 'done'
+ if metadata_dict != original_metadata_dict:
+ update_in_shed_tool_config( trans.app, repository )#def update_in_shed_tool_config( trans, shed_tool_conf_dict, elem_list ):
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
+ else:
+ message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
else:
message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name
status = 'error'
@@ -1618,7 +1630,10 @@
else:
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
+ if tool_path:
+ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
+ else:
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
repo = hg.repository( get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
pull_repository( repo, repository_clone_url, latest_ctx_rev )
@@ -1628,6 +1643,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -1667,8 +1683,13 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
metadata = repository.metadata
+ shed_config_dict = repository.get_shed_config_dict( trans.app )
+ tool_path = shed_config_dict.get( 'tool_path', None )
if metadata and 'readme' in metadata:
- f = open( metadata[ 'readme' ], 'r' )
+ readme_filename = metadata[ 'readme' ]
+ if tool_path:
+ readme_filename = os.path.join( tool_path, readme_filename )
+ f = open( readme_filename, 'r' )
raw_text = f.read()
f.close()
readme_text = translate_string( raw_text, to_html=True )
@@ -1691,6 +1712,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, repository_id )
repository_metadata = repository.metadata
+ shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
tool_lineage = []
tool = None
@@ -1698,7 +1720,10 @@
for tool_metadata_dict in repository_metadata[ 'tools' ]:
if tool_metadata_dict[ 'id' ] == tool_id:
tool_metadata = tool_metadata_dict
- tool = trans.app.toolbox.load_tool( os.path.abspath( tool_metadata[ 'tool_config' ] ), guid=tool_metadata[ 'guid' ] )
+ tool_config = tool_metadata[ 'tool_config' ]
+ if shed_config_dict and shed_config_dict.get( 'tool_path' ):
+ tool_config = os.path.join( shed_config_dict.get( 'tool_path' ), tool_config )
+ tool = trans.app.toolbox.load_tool( os.path.abspath( tool_config ), guid=tool_metadata[ 'guid' ] )
if tool:
tool_lineage = self.get_versions_of_tool( trans.app, tool.id )
break
https://bitbucket.org/galaxy/galaxy-central/changeset/429e895ee1b3/
changeset: 429e895ee1b3
user: dan
date: 2012-10-09 16:24:59
summary: Enhance manage_repository.mako to handle null repository metadata.
affected #: 1 file
diff -r 97eb5bde48da4575e8de4fed64fe910a4df45dce -r 429e895ee1b3bfb2fea1632d34a01d54faeac866 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -95,7 +95,7 @@
<div class="toolFormTitle">${repository.name}</div><div class="toolFormBody"><%
- metadata = repository.metadata
+ metadata = repository.metadata or {}
missing_tool_dependencies = repository.missing_tool_dependencies
installed_tool_dependencies = repository.installed_tool_dependencies
%>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/40b252052087/
changeset: 40b252052087
user: chapmanb
date: 2012-10-04 21:31:16
summary: Correctly set history and handle output datasets for error cases in tool API. Allow specification of dataset name during uploads, exposing through API
affected #: 3 files
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -209,7 +209,7 @@
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
- return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
#return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
@@ -227,14 +227,13 @@
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
continue # non-url line, ignore
- precreated_name = line
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- yield Bunch( type='url', path=line, name=precreated_name )
+ yield Bunch( type='url', path=line, name=dataset_name )
#yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -55,6 +55,17 @@
tool = trans.app.toolbox.get_tool( tool_id )
if not tool:
return { "message": { "type": "error", "text" : messages.NO_TOOL } }
+
+ # Set running history from payload parameters.
+ # History not set correctly as part of this API call for
+ # dataset upload.
+ history_id = payload.get("history_id", None)
+ if history_id:
+ target_history = trans.sa_session.query(trans.app.model.History).get(
+ trans.security.decode_id(history_id))
+ trans.galaxy_session.current_history = target_history
+ else:
+ target_history = None
# Set up inputs.
inputs = payload[ 'inputs' ]
@@ -62,10 +73,10 @@
inputs['runtool_btn'] = 'Execute'
# TODO: encode data ids and decode ids.
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__ )
-
+ template, vars = tool.handle_input( trans, params.__dict__, history=target_history)
+
# TODO: check for errors and ensure that output dataset(s) are available.
- output_datasets = vars[ 'out_data' ].values()
+ output_datasets = vars.get('out_data', {}).values()
rval = {
"outputs": []
}
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -41,6 +41,7 @@
<param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."><option value="Yes">Yes</option></param>
+ <param name="NAME" type="hidden" help="Name for dataset in upload"></param></upload_dataset><param name="dbkey" type="genomebuild" label="Genome" /><conditional name="files_metadata" title="Specify metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />
https://bitbucket.org/galaxy/galaxy-central/changeset/413cf15e4065/
changeset: 413cf15e4065
user: jgoecks
date: 2012-10-09 05:44:02
summary: Merged in chapmanb/galaxy-central-apiupload (pull request #74)
affected #: 3 files
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -209,7 +209,7 @@
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
- return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
#return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
@@ -227,14 +227,13 @@
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
continue # non-url line, ignore
- precreated_name = line
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- yield Bunch( type='url', path=line, name=precreated_name )
+ yield Bunch( type='url', path=line, name=dataset_name )
#yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -55,6 +55,17 @@
tool = trans.app.toolbox.get_tool( tool_id )
if not tool:
return { "message": { "type": "error", "text" : messages.NO_TOOL } }
+
+ # Set running history from payload parameters.
+ # History not set correctly as part of this API call for
+ # dataset upload.
+ history_id = payload.get("history_id", None)
+ if history_id:
+ target_history = trans.sa_session.query(trans.app.model.History).get(
+ trans.security.decode_id(history_id))
+ trans.galaxy_session.current_history = target_history
+ else:
+ target_history = None
# Set up inputs.
inputs = payload[ 'inputs' ]
@@ -62,10 +73,10 @@
inputs['runtool_btn'] = 'Execute'
# TODO: encode data ids and decode ids.
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__ )
-
+ template, vars = tool.handle_input( trans, params.__dict__, history=target_history)
+
# TODO: check for errors and ensure that output dataset(s) are available.
- output_datasets = vars[ 'out_data' ].values()
+ output_datasets = vars.get('out_data', {}).values()
rval = {
"outputs": []
}
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -41,6 +41,7 @@
<param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."><option value="Yes">Yes</option></param>
+ <param name="NAME" type="hidden" help="Name for dataset in upload"></param></upload_dataset><param name="dbkey" type="genomebuild" label="Genome" /><conditional name="files_metadata" title="Specify metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0