galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
December 2012
- 1 participants
- 142 discussions
commit/galaxy-central: greg: Fix for retrieving tool dependencies and missing tool dependencies for display in Galaxy.
by Bitbucket 19 Dec '12
by Bitbucket 19 Dec '12
19 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ddff35a4a9ab/
changeset: ddff35a4a9ab
user: greg
date: 2012-12-20 00:59:07
summary: Fix for retrieving tool dependencies and missing tool dependencies for display in Galaxy.
affected #: 1 file
diff -r 10b6b20bfa0731813d17a0d246f5f57c7f5098da -r ddff35a4a9ab3cf22dad10c3b413f919d8d68022 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -490,15 +490,22 @@
if all_tool_dependencies:
tool_dependencies = {}
missing_tool_dependencies = {}
- for td_key, td_info_dict in all_tool_dependencies.items():
- name = td_info_dict[ 'name' ]
- version = td_info_dict[ 'version' ]
- type = td_info_dict[ 'type' ]
- tool_dependency = get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type )
- if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
- tool_dependencies[ td_key ] = td_info_dict
+ for td_key, val in all_tool_dependencies.items():
+ if td_key in [ 'set_environment' ]:
+ for td_info_dict in val:
+ name = td_info_dict[ 'name' ]
+ version = None
+ type = td_info_dict[ 'type' ]
+ tool_dependency = get_tool_dependency_by_name_type_repository( trans, repository, name, type )
else:
- missing_tool_dependencies[ td_key ] = td_info_dict
+ name = val[ 'name' ]
+ version = val[ 'version' ]
+ type = val[ 'type' ]
+ tool_dependency = get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type )
+ if tool_dependency and tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
+ tool_dependencies[ td_key ] = val
+ else:
+ missing_tool_dependencies[ td_key ] = val
else:
tool_dependencies = None
missing_tool_dependencies = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history_panel: do not re-render panel when an hda is deleted
by Bitbucket 19 Dec '12
by Bitbucket 19 Dec '12
19 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/10b6b20bfa07/
changeset: 10b6b20bfa07
user: carlfeberhard
date: 2012-12-19 23:08:21
summary: history_panel: do not re-render panel when an hda is deleted
affected #: 4 files
diff -r cfab4734097b388776035fdd2741ccf6a370b95c -r 10b6b20bfa0731813d17a0d246f5f57c7f5098da static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -546,9 +546,12 @@
}
},
-
- remove : function(){
-
+ remove : function( callback ){
+ var hdaView = this;
+ this.$el.fadeOut( 'fast', function(){
+ hdaView.$el.remove();
+ if( callback ){ callback(); }
+ });
},
// ......................................................................... MISC
diff -r cfab4734097b388776035fdd2741ccf6a370b95c -r 10b6b20bfa0731813d17a0d246f5f57c7f5098da static/scripts/mvc/history/history-panel.js
--- a/static/scripts/mvc/history/history-panel.js
+++ b/static/scripts/mvc/history/history-panel.js
@@ -166,20 +166,18 @@
// when a hda model is (un)deleted or (un)hidden, re-render entirely
//TODO??: purged
//TODO??: could be more selective here
- this.model.hdas.bind( 'change:deleted change:hidden', this.render, this );
+ this.model.hdas.bind( 'change:deleted', this.handleHdaDeletionChange, this );
+ this.model.hdas.bind( 'change:hidden', this.render, this );
// if an a hidden hda is created (gen. by a workflow), moves thru the updater to the ready state,
// then: remove it from the collection if the panel is set to NOT show hidden datasets
- this.model.hdas.bind( 'change:state',
- function( hda, newState, changedList ){
- //TODO: magic string here - somehow use HDA.states
- if( ( hda.inReadyState() )
- && ( !hda.get( 'visible' ) )
- && ( !this.storage.get( 'show_hidden' ) ) ){
- this.removeHda( hda );
- }
- },
- this );
+ this.model.hdas.bind( 'change:state', function( hda, newState, changedList ){
+ if( ( hda.inReadyState() )
+ && ( !hda.get( 'visible' ) )
+ && ( !this.storage.get( 'show_hidden' ) ) ){
+ this.removeHdaView( hda.get( 'id' ) );
+ }
+ }, this );
//this.bind( 'all', function(){
// this.log( arguments );
@@ -253,16 +251,27 @@
this.render();
},
- /** Remove a view from the panel and the assoc. model from the collection
- * @param {HistoryDataAssociation} the hda to remove
+ /** If this hda is deleted and we're not showing deleted hdas, remove the view
+ * @param {HistoryDataAssociation} the hda to check
*/
- removeHda : function( hdaModel, callback ){
- var hdaView = this.hdaViews[ hdaModel.get( 'id' ) ];
- hdaView.$el.fadeOut( 'fast', function(){
- hdaView.$el.remove();
- if( callback ){ callback(); }
- });
- this.model.hdas.remove( hdaModel );
+ handleHdaDeletionChange : function( hda ){
+ if( hda.get( 'deleted' ) && !this.storage.get( 'show_deleted' ) ){
+ this.removeHdaView( hda.get( 'id' ) );
+ } // otherwise, the hdaView rendering should handle it
+ },
+
+ /** Remove a view from the panel and if the panel is now empty, re-render
+ * @param {Int} the id of the hdaView to remove
+ */
+ removeHdaView : function( id, callback ){
+ var hdaView = this.hdaViews[ id ];
+ if( !hdaView ){ return; }
+
+ hdaView.remove( callback );
+ delete this.hdaViews[ id ];
+ if( _.isEmpty( this.hdaViews ) ){
+ this.render();
+ }
},
// ......................................................................... RENDERING
diff -r cfab4734097b388776035fdd2741ccf6a370b95c -r 10b6b20bfa0731813d17a0d246f5f57c7f5098da static/scripts/packed/mvc/dataset/hda-base.js
--- a/static/scripts/packed/mvc/dataset/hda-base.js
+++ b/static/scripts/packed/mvc/dataset/hda-base.js
@@ -1,1 +1,1 @@
-var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urlTemplates=a.urlTemplates;this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);this.urls=this._renderUrls(this.urlTemplates,this.model.toJSON());a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this._setUpBehaviors(a);this.body=$(this._render_body());a.append(this.body);this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{try{c[f]=_.template(e,a)}catch(g){throw (b+"._renderUrls error: "+g+"\n rendering:"+e+"\n with "+JSON.stringify(a))}}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},_setUpBehaviors:function(a){a=a||this.$el;make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"})},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: none");if(this.expanded){this._render_body_html(a);a.show()}return a},_render_body_html:function(a){a.html("");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NEW:break;case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.PAUSED:this._render_body_paused(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+this.model.get("state")+'".</div>'))}a.append('<div style="clear: both"></div>');this._setUpBehaviors(a)},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_paused:function(a){a.append($("<div>"+_l("Job is paused. Use the history menu to unpause")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this;this.expanded=(a===undefined)?(!this.body.is(":visible")):(a);if(this.expanded){b._render_body_html(b.body);this.body.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{this.body.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},remove:function(){},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetadata"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
+var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urlTemplates=a.urlTemplates;this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);this.urls=this._renderUrls(this.urlTemplates,this.model.toJSON());a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this._setUpBehaviors(a);this.body=$(this._render_body());a.append(this.body);this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{try{c[f]=_.template(e,a)}catch(g){throw (b+"._renderUrls error: "+g+"\n rendering:"+e+"\n with "+JSON.stringify(a))}}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},_setUpBehaviors:function(a){a=a||this.$el;make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"})},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: none");if(this.expanded){this._render_body_html(a);a.show()}return a},_render_body_html:function(a){a.html("");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NEW:break;case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.PAUSED:this._render_body_paused(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+this.model.get("state")+'".</div>'))}a.append('<div style="clear: both"></div>');this._setUpBehaviors(a)},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_paused:function(a){a.append($("<div>"+_l("Job is paused. Use the history menu to unpause")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this;this.expanded=(a===undefined)?(!this.body.is(":visible")):(a);if(this.expanded){b._render_body_html(b.body);this.body.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{this.body.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},remove:function(b){var a=this;this.$el.fadeOut("fast",function(){a.$el.remove();if(b){b()}})},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetadata"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
diff -r cfab4734097b388776035fdd2741ccf6a370b95c -r 10b6b20bfa0731813d17a0d246f5f57c7f5098da static/scripts/packed/mvc/history/history-panel.js
--- a/static/scripts/packed/mvc/history/history-panel.js
+++ b/static/scripts/packed/mvc/history/history-panel.js
@@ -1,1 +1,1 @@
-var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.model.hdas.bind("change:deleted change:hidden",this.render,this);this.model.hdas.bind("change:state",function(c,d,b){if((c.inReadyState())&&(!c.get("visible"))&&(!this.storage.get("show_hidden"))){this.removeHda(c)}},this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log(this+" (prev) storage:",JSON.stringify(this.storage.get(),null,2));if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log(this+" (init'd) storage:",this.storage.get())},add:function(a){this.render()},addAll:function(){this.render()},removeHda:function(a,c){var b=this.hdaViews[a.get("id")];b.$el.fadeOut("fast",function(){b.$el.remove();if(c){c()}});this.model.hdas.remove(a)},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates,logger:a.logger});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render();return this.storage.get("show_deleted")},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render();return this.storage.get("show_hidden")},collapseAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
+var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.model.hdas.bind("change:deleted",this.handleHdaDeletionChange,this);this.model.hdas.bind("change:hidden",this.render,this);this.model.hdas.bind("change:state",function(c,d,b){if((c.inReadyState())&&(!c.get("visible"))&&(!this.storage.get("show_hidden"))){this.removeHdaView(c.get("id"))}},this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log(this+" (prev) storage:",JSON.stringify(this.storage.get(),null,2));if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log(this+" (init'd) storage:",this.storage.get())},add:function(a){this.render()},addAll:function(){this.render()},handleHdaDeletionChange:function(a){if(a.get("deleted")&&!this.storage.get("show_deleted")){this.removeHdaView(a.get("id"))}},removeHdaView:function(c,b){var a=this.hdaViews[c];if(!a){return}a.remove(b);delete this.hdaViews[c];if(_.isEmpty(this.hdaViews)){this.render()}},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates,logger:a.logger});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render();return this.storage.get("show_deleted")},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render();return this.storage.get("show_hidden")},collapseAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Fix for displaying an installed repository that includes one or more readme files.
by Bitbucket 19 Dec '12
by Bitbucket 19 Dec '12
19 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/cfab4734097b/
changeset: cfab4734097b
user: inithello
date: 2012-12-19 22:48:09
summary: Fix for displaying an installed repository that includes one or more readme files.
affected #: 3 files
diff -r 759d96f950b8825bcce157cdf5efbabaa0221b11 -r cfab4734097b388776035fdd2741ccf6a370b95c lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -57,13 +57,16 @@
requirements_dict[ 'install_dir' ] = install_dir
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
-def build_readme_files_dict( metadata ):
+def build_readme_files_dict( metadata, tool_path=None ):
"""Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata."""
readme_files_dict = {}
if metadata and 'readme_files' in metadata:
for relative_path_to_readme_file in metadata[ 'readme_files' ]:
readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
- full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
+ if tool_path:
+ full_path_to_readme_file = os.path.abspath( os.path.join( tool_path, relative_path_to_readme_file ) )
+ else:
+ full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
try:
f = open( full_path_to_readme_file, 'r' )
text = f.read()
@@ -103,7 +106,7 @@
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
if readme_files_dict:
- folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
if repository_dependencies:
diff -r 759d96f950b8825bcce157cdf5efbabaa0221b11 -r cfab4734097b388776035fdd2741ccf6a370b95c lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -881,7 +881,7 @@
datatypes = metadata.get( 'datatypes', None )
invalid_tools = metadata.get( 'invalid_tools', None )
if repository.has_readme_files:
- readme_files_dict = suc.build_readme_files_dict( repository.metadata )
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata, tool_path )
else:
readme_files_dict = None
repository_dependencies = metadata.get( 'repository_dependencies', None )
@@ -1515,17 +1515,19 @@
if repository_dependencies:
# We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
# shed repository metadata.
- root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
- repository.name,
- repository.owner,
- repository.installed_changeset_revision )
+ root_key = container_util.generate_repository_dependencies_key_for_repository( tool_shed_repository.tool_shed,
+ tool_shed_repository.name,
+ tool_shed_repository.owner,
+ tool_shed_repository.installed_changeset_revision )
rd_tups_for_display = []
rd_tups = repository_dependencies[ 'repository_dependencies' ]
repository_dependencies_dict_for_display[ 'root_key' ] = root_key
repository_dependencies_dict_for_display[ root_key ] = rd_tups
repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
all_tool_dependencies = metadata.get( 'tool_dependencies', None )
- tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans,
+ tool_shed_repository,
+ all_tool_dependencies )
valid_tools = metadata.get( 'tools', None )
workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
diff -r 759d96f950b8825bcce157cdf5efbabaa0221b11 -r cfab4734097b388776035fdd2741ccf6a370b95c templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -388,7 +388,7 @@
<a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a></div>
%else:
- <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a>
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">${tool.name | h}</a>
%endif
%else:
${tool.name | h}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add support for installing and administering simple repository dependencies for tool shed repositories installed into a Galaxy instance.
by Bitbucket 19 Dec '12
by Bitbucket 19 Dec '12
19 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/759d96f950b8/
changeset: 759d96f950b8
user: greg
date: 2012-12-19 21:57:09
summary: Add support for installing and administering simple repository dependencies for tool shed repositories installed into a Galaxy instance.
affected #: 27 files
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -23,6 +23,7 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.name = 'galaxy'
self.new_installation = False
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3148,6 +3148,9 @@
def can_reinstall_or_activate( self ):
return self.deleted
@property
+ def has_repository_dependencies( self ):
+ return self.metadata and 'repository_dependencies' in self.metadata
+ @property
def includes_tools( self ):
return self.metadata and 'tools' in self.metadata
@property
@@ -3211,6 +3214,15 @@
uninstalled_tool_dependencies.append( tool_dependency )
return uninstalled_tool_dependencies
+class RepositoryRepositoryDependencyAssociation( object ):
+ def __init__( self, tool_shed_repository_id=None, repository_dependency_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+ self.repository_dependency_id = repository_dependency_id
+
+class RepositoryDependency( object ):
+ def __init__( self, tool_shed_repository_id=None ):
+ self.tool_shed_repository_id = tool_shed_repository_id
+
class ToolDependency( object ):
installation_status = Bunch( NEVER_INSTALLED='Never installed',
INSTALLING='Installing',
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -409,6 +409,19 @@
Column( "status", TrimmedString( 255 ) ),
Column( "error_message", TEXT ) )
+RepositoryRepositoryDependencyAssociation.table = Table( 'repository_repository_dependency_association', metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+ Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+RepositoryDependency.table = Table( "repository_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
ToolDependency.table = Table( "tool_dependency", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -1744,7 +1757,19 @@
tool_dependencies=relation( ToolDependency,
primaryjoin=( ToolShedRepository.table.c.id == ToolDependency.table.c.tool_shed_repository_id ),
order_by=ToolDependency.table.c.name,
- backref='tool_shed_repository' ) ) )
+ backref='tool_shed_repository' ),
+ repository_dependencies=relation( RepositoryRepositoryDependencyAssociation,
+ primaryjoin=( ToolShedRepository.table.c.id == RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id ) ) ) )
+
+assign_mapper( context, RepositoryRepositoryDependencyAssociation, RepositoryRepositoryDependencyAssociation.table,
+ properties=dict( repository=relation( ToolShedRepository,
+ primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ),
+ repository_dependency=relation( RepositoryDependency,
+ primaryjoin=( RepositoryRepositoryDependencyAssociation.table.c.repository_dependency_id == RepositoryDependency.table.c.id ) ) ) )
+
+assign_mapper( context, RepositoryDependency, RepositoryDependency.table,
+ properties=dict( repository=relation( ToolShedRepository,
+ primaryjoin=( RepositoryDependency.table.c.tool_shed_repository_id == ToolShedRepository.table.c.id ) ) ) )
assign_mapper( context, ToolDependency, ToolDependency.table )
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0109_add_repository_dependency_tables.py
@@ -0,0 +1,58 @@
+"""
+Migration script to add the repository_dependency and repository_repository_dependency_association tables.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+import sys, logging
+from galaxy.model.custom_types import *
+from sqlalchemy.exc import *
+import datetime
+now = datetime.datetime.utcnow
+
+log = logging.getLogger( __name__ )
+log.setLevel( logging.DEBUG )
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+
+RepositoryDependency_table = Table( "repository_dependency", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) )
+
+RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ),
+ Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ try:
+ RepositoryDependency_table.create()
+ except Exception, e:
+ log.debug( "Creating repository_dependency table failed: %s" % str( e ) )
+ try:
+ RepositoryRepositoryDependencyAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ try:
+ RepositoryRepositoryDependencyAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) )
+ try:
+ RepositoryDependency_table.drop()
+ except Exception, e:
+ log.debug( "Dropping repository_dependency table failed: %s" % str( e ) )
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -283,19 +283,19 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
relative_install_dir = os.path.join( relative_clone_dir, name )
install_dir = os.path.join( clone_dir, name )
- ctx_rev = shed_util.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
+ ctx_rev = suc.get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
- tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=self.app,
- name=name,
- description=description,
- installed_changeset_revision=installed_changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=self.app.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=None,
- owner=self.repository_owner,
- dist_to_shed=True )
+ tool_shed_repository = suc.create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=installed_changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=self.app.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=self.repository_owner,
+ dist_to_shed=True )
shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-import galaxy.util.shed_util
+import galaxy.util.shed_util_common
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -890,11 +890,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return galaxy.util.shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return galaxy.util.shed_util_common.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,6 +1,9 @@
import os, tempfile, shutil, logging, urllib2
from galaxy.datatypes import checkers
+from galaxy.web import url_for
from galaxy import util
+from galaxy.util.json import from_json_string, to_json_string
+from galaxy.webapps.community.util import container_util
import shed_util_common as suc
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
@@ -171,6 +174,79 @@
# Attempt to ensure we're copying an appropriate file.
if is_data_index_sample_file( filename ):
suc.copy_sample_file( app, filename, dest_path=dest_path )
+def create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False ):
+ """
+ Discover all repository dependencies and make sure all tool_shed_repository and associated repository_dependency records exist as well as
+ the dependency relationships between installed repositories. This method is called when new repositories are being installed into a Galaxy
+ instance and when uninstalled repositories are being reinstalled.
+ """
+ message = ''
+ created_or_updated_tool_shed_repositories = []
+ # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
+ filtered_repo_info_dicts = []
+ # Discover all repository dependencies and retrieve information for installing them.
+ all_repo_info_dicts = get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
+ for repo_info_dict in all_repo_info_dicts:
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ clone_dir = os.path.join( tool_path, generate_tool_path( repository_clone_url, changeset_revision ) )
+ relative_install_dir = os.path.join( clone_dir, name )
+ # Make sure the repository was not already installed.
+ installed_tool_shed_repository, installed_changeset_revision = \
+ repository_was_previously_installed( trans, tool_shed_url, name, repo_info_tuple, clone_dir )
+ if installed_tool_shed_repository:
+ if reinstalling:
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ can_update = True
+ name = installed_tool_shed_repository.name
+ description = installed_tool_shed_repository.description
+ installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
+ metadata_dict = installed_tool_shed_repository.metadata
+ dist_to_shed = installed_tool_shed_repository.dist_to_shed
+ else:
+ # There is a repository already installed which is a dependency of the repository being reinstalled.
+ can_update = False
+ else:
+ # An attempt is being made to install a tool shed repository into a Galaxy instance when the same repository was previously installed.
+ message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner )
+ if installed_changeset_revision != changeset_revision:
+ message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision
+ else:
+ message += "was previously installed. "
+ if installed_tool_shed_repository.uninstalled:
+ message += "The repository has been uninstalled, however, so reinstall the original repository instead of installing it again. "
+ elif installed_tool_shed_repository.deleted:
+ message += "The repository has been deactivated, however, so activate the original repository instead of installing it again. "
+ if installed_changeset_revision != changeset_revision:
+ message += "You can get the latest updates for the repository using the <b>Get updates</b> option from the repository's "
+ message += "<b>Repository Actions</b> pop-up menu. "
+ if len( repo_info_dicts ) == 1:
+ return created_or_updated_tool_shed_repositories, all_repo_info_dicts, filtered_repo_info_dicts, message
+ else:
+ # A tool shed repository is being installed into a Galaxy instance for the first time. We may have the case where a repository
+ # is being reinstalled where because the repository being newly installed here may be a dependency of the repository being reinstalled.
+ can_update = True
+ installed_changeset_revision = changeset_revision
+ metadata_dict={}
+ dist_to_shed = False
+ if can_update:
+ log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
+ tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=changeset_revision,
+ owner=repository_owner,
+ dist_to_shed=False )
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ return created_or_updated_tool_shed_repositories, all_repo_info_dicts, filtered_repo_info_dicts, message
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
repository_name=name,
@@ -179,62 +255,8 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
- status, current_changeset_revision=None, owner='', dist_to_shed=False ):
- # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
- # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
- if current_changeset_revision is None:
- # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository
- # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior
- # to it being uninstalled.
- current_changeset_revision = installed_changeset_revision
- sa_session = app.model.context.current
- tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
- if not owner:
- owner = get_repository_owner_from_clone_url( repository_clone_url )
- includes_datatypes = 'datatypes' in metadata_dict
- if status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
- deleted = True
- uninstalled = False
- elif status in [ app.model.ToolShedRepository.installation_status.UNINSTALLED ]:
- deleted = True
- uninstalled = True
- else:
- deleted = False
- uninstalled = False
- tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app,
- tool_shed,
- name,
- owner,
- installed_changeset_revision )
- if tool_shed_repository:
- tool_shed_repository.description = description
- tool_shed_repository.changeset_revision = current_changeset_revision
- tool_shed_repository.ctx_rev = ctx_rev
- tool_shed_repository.metadata = metadata_dict
- tool_shed_repository.includes_datatypes = includes_datatypes
- tool_shed_repository.deleted = deleted
- tool_shed_repository.uninstalled = uninstalled
- tool_shed_repository.status = status
- else:
- tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
- name=name,
- description=description,
- owner=owner,
- installed_changeset_revision=installed_changeset_revision,
- changeset_revision=current_changeset_revision,
- ctx_rev=ctx_rev,
- metadata=metadata_dict,
- includes_datatypes=includes_datatypes,
- dist_to_shed=dist_to_shed,
- deleted=deleted,
- uninstalled=uninstalled,
- status=status )
- sa_session.add( tool_shed_repository )
- sa_session.flush()
- return tool_shed_repository
def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
- # Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
+ """Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository."""
tool_dependency_objects = []
shed_config_dict = tool_shed_repository.get_shed_config_dict( app )
if shed_config_dict.get( 'tool_path' ):
@@ -355,6 +377,20 @@
tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections )
tool_panel_dict[ guid ] = tool_section_dicts
return tool_panel_dict
+def generate_tool_path( repository_clone_url, changeset_revision ):
+ """
+ Generate a tool path that guarantees repositories with the same name will always be installed
+ in different directories. The tool path will be of the form:
+ <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision>
+ http://test@bx.psu.edu:9009/repos/test/filter
+ """
+ tmp_url = suc.clean_repository_clone_url( repository_clone_url )
+ # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column
+ items = tmp_url.split( 'repos' )
+ tool_shed_url = items[ 0 ]
+ repo_path = items[ 1 ]
+ tool_shed_url = suc.clean_tool_shed_url( tool_shed_url )
+ return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
def generate_tool_section_dicts( tool_config=None, tool_sections=None ):
tool_section_dicts = []
if tool_config is None:
@@ -438,12 +474,6 @@
if converter_path and display_path:
break
return converter_path, display_path
-def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
- url = suc.url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- ctx_rev = response.read()
- response.close()
- return ctx_rev
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""Returns a list with the first 'count' lines split by 'sep'."""
headers = []
@@ -456,6 +486,23 @@
if idx == count:
break
return headers
+def get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies ):
+ if all_tool_dependencies:
+ tool_dependencies = {}
+ missing_tool_dependencies = {}
+ for td_key, td_info_dict in all_tool_dependencies.items():
+ name = td_info_dict[ 'name' ]
+ version = td_info_dict[ 'version' ]
+ type = td_info_dict[ 'type' ]
+ tool_dependency = get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type )
+ if tool_dependency.status == trans.model.ToolDependency.installation_status.INSTALLED:
+ tool_dependencies[ td_key ] = td_info_dict
+ else:
+ missing_tool_dependencies[ td_key ] = td_info_dict
+ else:
+ tool_dependencies = None
+ missing_tool_dependencies = None
+ return tool_dependencies, missing_tool_dependencies
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -466,6 +513,54 @@
tmp_url = suc.clean_repository_clone_url( repository_clone_url )
tool_shed = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
return get_repository_owner( tmp_url )
+def get_required_repo_info_dicts( tool_shed_url, repo_info_dicts ):
+ """
+ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
+ repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
+ this methid is required to retrieve all repository dependencies.
+ """
+ if repo_info_dicts:
+ all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
+ # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
+ required_repository_tups = []
+ for repo_info_dict in repo_info_dicts:
+ for repository_name, repo_info_tup in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tup )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+ components_list = [ toolshed, name, owner, changeset_revision ]
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ for components_list in val:
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ if required_repository_tups:
+ # The value of required_repository_tups is a list of tuples, so we need to encode it.
+ encoded_required_repository_tups = []
+ for required_repository_tup in required_repository_tups:
+ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+ encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+ encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ required_repo_info_dict = from_json_string( text )
+ required_repo_info_dicts = []
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ if required_repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ return all_repo_info_dicts
def get_tool_index_sample_files( sample_files ):
"""Try to return the list of all appropriate tool data sample files included in the repository."""
tool_index_sample_files = []
@@ -477,6 +572,19 @@
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
+def get_tool_dependency_by_name_type_repository( trans, repository, name, type ):
+ return trans.sa_session.query( trans.model.ToolDependency ) \
+ .filter( and_( trans.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+ trans.model.ToolDependency.table.c.name == name,
+ trans.model.ToolDependency.table.c.type == type ) ) \
+ .first()
+def get_tool_dependency_by_name_version_type_repository( trans, repository, name, version, type ):
+ return trans.sa_session.query( trans.model.ToolDependency ) \
+ .filter( and_( trans.model.ToolDependency.table.c.tool_shed_repository_id == repository.id,
+ trans.model.ToolDependency.table.c.name == name,
+ trans.model.ToolDependency.table.c.version == version,
+ trans.model.ToolDependency.table.c.type == type ) ) \
+ .first()
def get_tool_dependency_ids( as_string=False, **kwd ):
tool_dependency_id = kwd.get( 'tool_dependency_id', None )
tool_dependency_ids = util.listify( kwd.get( 'tool_dependency_ids', None ) )
@@ -502,30 +610,6 @@
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
return None, None
-def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
- # This method is used only in Galaxy, not the tool shed.
- sa_session = app.model.context.current
- if tool_shed.find( '//' ) > 0:
- tool_shed = tool_shed.split( '//' )[1]
- tool_shed = tool_shed.rstrip( '/' )
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
- .first()
-def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
- # This method is used only in Galaxy, not the tool shed.
- sa_session = app.model.context.current
- if tool_shed.find( '//' ) > 0:
- tool_shed = tool_shed.split( '//' )[1]
- tool_shed = tool_shed.rstrip( '/' )
- return sa_session.query( app.model.ToolShedRepository ) \
- .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
- app.model.ToolShedRepository.table.c.name == name,
- app.model.ToolShedRepository.table.c.owner == owner,
- app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
- .first()
def get_tool_version( app, tool_id ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolVersion ) \
@@ -908,6 +992,35 @@
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
+def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple, clone_dir ):
+ """
+ Handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
+ in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
+ that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled.
+ """
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
+ # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
+ # revision to see if it was previously installed using one of them.
+ url = suc.url_join( tool_shed_url,
+ 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
+ ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ #clone_path, clone_directory = os.path.split( clone_dir )
+ changeset_revisions = util.listify( text )
+ for previous_changeset_revision in changeset_revisions:
+ tool_shed_repository = suc.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
+ tool_shed,
+ repository_name,
+ repository_owner,
+ previous_changeset_revision )
+ if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
+ return tool_shed_repository, previous_changeset_revision
+ return None, None
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -57,26 +57,24 @@
requirements_dict[ 'install_dir' ] = install_dir
tool_dependencies[ dependency_key ] = requirements_dict
return tool_dependencies
-def build_readme_files_dict( repository_metadata ):
+def build_readme_files_dict( metadata ):
"""Return a dictionary of valid readme file name <-> readme file content pairs for all readme files contained in the received repository_metadata."""
readme_files_dict = {}
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata and 'readme_files' in metadata:
- for relative_path_to_readme_file in metadata[ 'readme_files' ]:
- readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
- full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
- try:
- f = open( full_path_to_readme_file, 'r' )
- text = f.read()
- f.close()
- readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False )
- except Exception, e:
- log.debug( "Error reading README file '%s' defined in metadata for repository '%s', revision '%s': %s" % \
- ( str( relative_path_to_readme_file ), str( repository_name ), str( changeset_revision ), str( e ) ) )
+ if metadata and 'readme_files' in metadata:
+ for relative_path_to_readme_file in metadata[ 'readme_files' ]:
+ readme_file_name = os.path.split( relative_path_to_readme_file )[ 1 ]
+ full_path_to_readme_file = os.path.abspath( relative_path_to_readme_file )
+ try:
+ f = open( full_path_to_readme_file, 'r' )
+ text = f.read()
+ f.close()
+ readme_files_dict[ readme_file_name ] = translate_string( text, to_html=False )
+ except Exception, e:
+ log.debug( "Error reading README file '%s' defined in metadata: %s" % ( str( relative_path_to_readme_file ), str( e ) ) )
return readme_files_dict
-def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision,
- readme_files_dict, repository_dependencies, tool_dependencies ):
+def build_repository_containers_for_galaxy( trans, toolshed_base_url, repository_name, repository_owner, changeset_revision, repository, datatypes,
+ invalid_tools, missing_tool_dependencies, readme_files_dict, repository_dependencies, tool_dependencies,
+ valid_tools, workflows ):
"""Return a dictionary of containers for the received repository's dependencies and readme files for display during installation to Galaxy."""
containers_dict = dict( readme_files=None, repository_dependencies=None, tool_dependencies=None )
if readme_files_dict or repository_dependencies or tool_dependencies:
@@ -91,9 +89,23 @@
tool_dependencies )
try:
folder_id = 0
+ # Datatypes container.
+ if datatypes:
+ folder_id, datatypes_root_folder = container_util.build_datatypes_folder( folder_id, datatypes )
+ containers_dict[ 'datatypes' ] = datatypes_root_folder
+ # Invalid tools container.
+ if invalid_tools:
+ folder_id, invalid_tools_root_folder = container_util.build_invalid_tools_folder( folder_id,
+ invalid_tools,
+ changeset_revision,
+ repository=repository,
+ label='Invalid tools' )
+ containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
+ # Readme files container.
if readme_files_dict:
folder_id, readme_files_root_folder = build_readme_files_folder( folder_id, readme_files_dict )
containers_dict[ 'readme_files' ] = readme_files_root_folder
+ # Repository dependencies container.
if repository_dependencies:
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
repository_name=repository_name,
@@ -102,9 +114,28 @@
folder_id=folder_id,
repository_dependencies=repository_dependencies )
containers_dict[ 'repository_dependencies' ] = repository_dependencies_root_folder
+ # Tool dependencies container.
if tool_dependencies:
folder_id, tool_dependencies_root_folder = container_util.build_tool_dependencies_folder( folder_id, tool_dependencies, for_galaxy=True )
containers_dict[ 'tool_dependencies' ] = tool_dependencies_root_folder
+ # Missing tool dependencies container.
+ if missing_tool_dependencies:
+ folder_id, missing_tool_dependencies_root_folder = \
+ container_util.build_tool_dependencies_folder( folder_id, missing_tool_dependencies, label='Missing tool dependencies', for_galaxy=True )
+ containers_dict[ 'missing_tool_dependencies' ] = missing_tool_dependencies_root_folder
+ # Valid tools container.
+ if valid_tools:
+ folder_id, valid_tools_root_folder = container_util.build_tools_folder( folder_id,
+ valid_tools,
+ repository,
+ changeset_revision,
+ label='Valid tools',
+ description='click the name to inspect the tool metadata' )
+ containers_dict[ 'valid_tools' ] = valid_tools_root_folder
+ # Workflows container.
+ if workflows:
+ folder_id, workflows_root_folder = container_util.build_workflows_folder( folder_id, workflows, repository_metadata, label='Workflows' )
+ containers_dict[ 'workflows' ] = workflows_root_folder
except Exception, e:
log.debug( "Exception in build_repository_containers_for_galaxy: %s" % str( e ) )
finally:
@@ -140,9 +171,10 @@
label='Invalid tools' )
containers_dict[ 'invalid_tools' ] = invalid_tools_root_folder
# Readme files container.
- readme_files_dict = build_readme_files_dict( repository_metadata )
- folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
- containers_dict[ 'readme_files' ] = readme_files_root_folder
+ if metadata and 'readme_files' in metadata:
+ readme_files_dict = build_readme_files_dict( metadata )
+ folder_id, readme_files_root_folder = container_util.build_readme_files_folder( folder_id, readme_files_dict )
+ containers_dict[ 'readme_files' ] = readme_files_root_folder
# Repository dependencies container.
toolshed_base_url = str( url_for( '/', qualified=True ) ).rstrip( '/' )
folder_id, repository_dependencies_root_folder = container_util.build_repository_dependencies_folder( toolshed_base_url=toolshed_base_url,
@@ -177,6 +209,64 @@
finally:
lock.release()
return containers_dict
+def build_repository_dependency_relationships( trans, repo_info_dicts, tool_shed_repositories ):
+ """
+ Build relationships between installed tool shed repositories and other installed tool shed repositories upon which they depend. These
+ relationships are defined in the repository_dependencies entry for each dictionary in the received list of repo_info_dicts. Each of
+ these dictionaries is associated with a repository in the received tool_shed_repositories list.
+ """
+ for repo_info_dict in repo_info_dicts:
+ for name, repo_info_tuple in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ get_repo_info_tuple_contents( repo_info_tuple )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ dependent_repository = None
+ dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision = container_util.get_components_from_key( key )
+ for tsr in tool_shed_repositories:
+ # Get the the tool_shed_repository defined by name, owner and changeset_revision. This is the repository that will be
+ # dependent upon each of the tool shed repositories contained in val.
+ # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
+ if tsr.name == dependent_name and tsr.owner == dependent_owner and tsr.changeset_revision == dependent_changeset_revision:
+ dependent_repository = tsr
+ break
+ if dependent_repository is None:
+ # The dependent repository is not in the received list so look in the database.
+ dependent_repository = get_or_create_tool_shed_repository( trans, dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision )
+ # Process each repository_dependency defined for the current dependent repository.
+ for repository_dependency_components_list in val:
+ required_repository = None
+ rd_toolshed, rd_name, rd_owner, rd_changeset_revision = repository_dependency_components_list
+ # Get the the tool_shed_repository defined by rd_name, rd_owner and rd_changeset_revision. This is the repository that will be
+ # required by the current dependent_repository.
+ # TODO: Check tool_shed_repository.tool_shed as well when repository dependencies across tool sheds is supported.
+ for tsr in tool_shed_repositories:
+ if tsr.name == rd_name and tsr.owner == rd_owner and tsr.changeset_revision == rd_changeset_revision:
+ required_repository = tsr
+ break
+ if required_repository is None:
+ # The required repository is not in the received list so look in the database.
+ required_repository = get_or_create_tool_shed_repository( trans, rd_toolshed, rd_name, rd_owner, rd_changeset_revision )
+ # Ensure there is a repository_dependency relationship between dependent_repository and required_repository.
+ rrda = None
+ for rd in dependent_repository.repository_dependencies:
+ if rd.repository_dependency.tool_shed_repository_id == required_repository.id:
+ rrda = rd
+ break
+ if not rrda:
+ # Make sure required_repository is in the repository_dependency table.
+ repository_dependency = get_repository_dependency_by_repository_id( trans, required_repository.id )
+ if not repository_dependency:
+ repository_dependency = trans.model.RepositoryDependency( tool_shed_repository_id=required_repository.id )
+ trans.sa_session.add( repository_dependency )
+ trans.sa_session.flush()
+ # Build the relationship between the dependent_repository and the required_repository.
+ rrda = trans.model.RepositoryRepositoryDependencyAssociation( tool_shed_repository_id=dependent_repository.id,
+ repository_dependency_id=repository_dependency.id )
+ trans.sa_session.add( rrda )
+ trans.sa_session.flush()
def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
"""Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
@@ -546,6 +636,60 @@
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
return repository_metadata
+def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
+ status, current_changeset_revision=None, owner='', dist_to_shed=False ):
+ # The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
+ # to be in the Galaxy distribution, but have been moved to the main Galaxy tool shed.
+ if current_changeset_revision is None:
+ # The current_changeset_revision is not passed if a repository is being installed for the first time. If a previously installed repository
+ # was later uninstalled, this value should be received as the value of that change set to which the repository had been updated just prior
+ # to it being uninstalled.
+ current_changeset_revision = installed_changeset_revision
+ sa_session = app.model.context.current
+ tool_shed = get_tool_shed_from_clone_url( repository_clone_url )
+ if not owner:
+ owner = get_repository_owner_from_clone_url( repository_clone_url )
+ includes_datatypes = 'datatypes' in metadata_dict
+ if status in [ app.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ deleted = True
+ uninstalled = False
+ elif status in [ app.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ deleted = True
+ uninstalled = True
+ else:
+ deleted = False
+ uninstalled = False
+ tool_shed_repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app,
+ tool_shed,
+ name,
+ owner,
+ installed_changeset_revision )
+ if tool_shed_repository:
+ tool_shed_repository.description = description
+ tool_shed_repository.changeset_revision = current_changeset_revision
+ tool_shed_repository.ctx_rev = ctx_rev
+ tool_shed_repository.metadata = metadata_dict
+ tool_shed_repository.includes_datatypes = includes_datatypes
+ tool_shed_repository.deleted = deleted
+ tool_shed_repository.uninstalled = uninstalled
+ tool_shed_repository.status = status
+ else:
+ tool_shed_repository = app.model.ToolShedRepository( tool_shed=tool_shed,
+ name=name,
+ description=description,
+ owner=owner,
+ installed_changeset_revision=installed_changeset_revision,
+ changeset_revision=current_changeset_revision,
+ ctx_rev=ctx_rev,
+ metadata=metadata_dict,
+ includes_datatypes=includes_datatypes,
+ dist_to_shed=dist_to_shed,
+ deleted=deleted,
+ uninstalled=uninstalled,
+ status=status )
+ sa_session.add( tool_shed_repository )
+ sa_session.flush()
+ return tool_shed_repository
def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
repository_metadata=None, metadata=None, repository_dependencies=None ):
"""
@@ -585,6 +729,20 @@
repository_dependencies,
metadata.get( 'tool_dependencies', None ) )
return repo_info_dict
+def ensure_required_repositories_exist_for_reinstall( trans, repository_dependencies ):
+ """
+ Inspect the received repository_dependencies dictionary and make sure tool_shed_repository objects exist in the database for each entry. These
+ tool_shed_repositories do not necessarily have to exist on disk, and if they do not, their status will be marked appropriately. They must exist
+ in the database in order for repository dependency relationships to be properly built.
+ """
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ tool_shed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+ repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision )
+ for repository_components_list in val:
+ tool_shed, name, owner, changeset_revision = repository_components_list
+ repository = get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision )
def generate_clone_url_for_installed_repository( app, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( app, repository )
@@ -765,9 +923,18 @@
# See if we have a repository dependencies defined.
if name == 'repository_dependencies.xml':
path_to_repository_dependencies_config = os.path.join( root, name )
- metadata_dict, error_message = generate_repository_dependency_metadata( app, path_to_repository_dependencies_config, metadata_dict )
- if error_message:
- invalid_file_tups.append( ( name, error_message ) )
+ if app.name == 'community':
+ metadata_dict, error_message = generate_repository_dependency_metadata_for_tool_shed( app,
+ path_to_repository_dependencies_config,
+ metadata_dict )
+ if error_message:
+ invalid_file_tups.append( ( name, error_message ) )
+ elif app.name == 'galaxy':
+ metadata_dict, error_message = generate_repository_dependency_metadata_for_installed_repository( app,
+ path_to_repository_dependencies_config,
+ metadata_dict )
+ if error_message:
+ invalid_file_tups.append( ( name, error_message ) )
# See if we have one or more READ_ME files.
elif name.lower() in readme_file_names:
relative_path_to_readme = get_relative_path_to_repository_file( root,
@@ -860,8 +1027,42 @@
if requirements_dict:
tool_dependencies_dict[ dependency_key ] = requirements_dict
return tool_dependencies_dict
-def generate_repository_dependency_metadata( app, repository_dependencies_config, metadata_dict ):
- """Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config."""
+def generate_repository_dependency_metadata_for_installed_repository( app, repository_dependencies_config, metadata_dict ):
+ """
+ Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method
+ is called only from Galaxy.
+ """
+ repository_dependencies_tups = []
+ error_message = ''
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = tree.getroot()
+ is_valid = root.tag == 'repositories'
+ except Exception, e:
+ error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) )
+ log.debug( error_message )
+ is_valid = False
+ if is_valid:
+ sa_session = app.model.context.current
+ for repository_elem in root.findall( 'repository' ):
+ toolshed = repository_elem.attrib[ 'toolshed' ]
+ name = repository_elem.attrib[ 'name' ]
+ owner = repository_elem.attrib[ 'owner']
+ changeset_revision = repository_elem.attrib[ 'changeset_revision' ]
+ repository_dependencies_tup = ( toolshed, name, owner, changeset_revision )
+ if repository_dependencies_tup not in repository_dependencies_tups:
+ repository_dependencies_tups.append( repository_dependencies_tup )
+ if repository_dependencies_tups:
+ repository_dependencies_dict = dict( description=root.get( 'description' ),
+ repository_dependencies=repository_dependencies_tups )
+ metadata_dict[ 'repository_dependencies' ] = repository_dependencies_dict
+ return metadata_dict, error_message
+def generate_repository_dependency_metadata_for_tool_shed( app, repository_dependencies_config, metadata_dict ):
+ """
+ Generate a repository dependencies dictionary based on valid information defined in the received repository_dependencies_config. This method
+ is called only from the tool shed.
+ """
repository_dependencies_tups = []
error_message = ''
try:
@@ -1111,6 +1312,12 @@
# quiet = True
_ui.setconfig( 'ui', 'quiet', True )
return _ui
+def get_ctx_rev( tool_shed_url, name, owner, changeset_revision ):
+ url = url_join( tool_shed_url, 'repository/get_ctx_rev?name=%s&owner=%s&changeset_revision=%s' % ( name, owner, changeset_revision ) )
+ response = urllib2.urlopen( url )
+ ctx_rev = response.read()
+ response.close()
+ return ctx_rev
def get_ctx_file_path_from_manifest( filename, repo, changeset_revision ):
"""Get the ctx file path for the latest revision of filename from the repository manifest up to the value of changeset_revision."""
stripped_filename = strip_path( filename )
@@ -1223,6 +1430,25 @@
# We've found the changeset in the changelog for which we need to get the next downloadable changset.
found_after_changeset_revision = True
return None
+def get_or_create_tool_shed_repository( trans, tool_shed, name, owner, changeset_revision ):
+ repository = get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
+ if not repository:
+ tool_shed_url = get_url_from_tool_shed( trans.app, tool_shed )
+ repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
+ ctx_rev = get_ctx_rev( tool_shed_url, name, owner, installed_changeset_revision )
+ print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
+ repository = create_or_update_tool_shed_repository( app=self.app,
+ name=name,
+ description=None,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=self.app.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=None,
+ owner=sowner,
+ dist_to_shed=False )
+ return repository
def get_ordered_downloadable_changeset_revisions( repository, repo ):
"""Return an ordered list of changeset_revisions defined by a repository changelog."""
changeset_tups = []
@@ -1366,6 +1592,23 @@
repository_dependency[ 1 ],
repository_dependency[ 2 ],
repository_dependency[ 3] )
+def get_repository_dependency_by_repository_id( trans, decoded_repository_id ):
+ return trans.sa_session.query( trans.model.RepositoryDependency ) \
+ .filter( trans.model.RepositoryDependency.table.c.tool_shed_repository_id == decoded_repository_id ) \
+ .first()
+def get_repository_for_dependency_relationship( app, tool_shed, name, owner, changeset_revision ):
+ repository = get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app=app,
+ tool_shed=tool_shed,
+ name=name,
+ owner=owner,
+ installed_changeset_revision=changeset_revision )
+ if not repository:
+ repository = get_tool_shed_repository_by_shed_name_owner_changeset_revision( app=app,
+ tool_shed=tool_shed,
+ name=name,
+ owner=owner,
+ changeset_revision=changeset_revision )
+ return repository
def get_repository_file_contents( file_path ):
if checkers.is_gzip( file_path ):
safe_str = to_safe_string( '\ngzip compressed file\n' )
@@ -1512,6 +1755,30 @@
tool_path = shed_config_dict[ 'tool_path' ]
relative_install_dir = partial_install_dir
return shed_tool_conf, tool_path, relative_install_dir
+def get_tool_shed_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
+ # This method is used only in Galaxy, not the tool shed.
+ sa_session = app.model.context.current
+ if tool_shed.find( '//' ) > 0:
+ tool_shed = tool_shed.split( '//' )[1]
+ tool_shed = tool_shed.rstrip( '/' )
+ return sa_session.query( app.model.ToolShedRepository ) \
+ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.model.ToolShedRepository.table.c.name == name,
+ app.model.ToolShedRepository.table.c.owner == owner,
+ app.model.ToolShedRepository.table.c.changeset_revision == changeset_revision ) ) \
+ .first()
+def get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( app, tool_shed, name, owner, installed_changeset_revision ):
+ # This method is used only in Galaxy, not the tool shed.
+ sa_session = app.model.context.current
+ if tool_shed.find( '//' ) > 0:
+ tool_shed = tool_shed.split( '//' )[1]
+ tool_shed = tool_shed.rstrip( '/' )
+ return sa_session.query( app.model.ToolShedRepository ) \
+ .filter( and_( app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
+ app.model.ToolShedRepository.table.c.name == name,
+ app.model.ToolShedRepository.table.c.owner == owner,
+ app.model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision ) ) \
+ .first()
def get_tool_shed_from_clone_url( repository_clone_url ):
tmp_url = clean_repository_clone_url( repository_clone_url )
return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -1557,6 +1824,16 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
+def get_url_from_tool_shed( app, tool_shed ):
+ # The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
+ # http://toolshed.g2.bx.psu.edu/
+ for shed_name, shed_url in app.tool_shed_registry.tool_sheds.items():
+ if shed_url.find( tool_shed ) >= 0:
+ if shed_url.endswith( '/' ):
+ shed_url = shed_url.rstrip( '/' )
+ return shed_url
+ # The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
+ return None
def get_user_by_username( trans, username ):
"""Get a user from the database by username"""
return trans.sa_session.query( trans.model.User ) \
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -12,6 +12,7 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.name = "community"
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
self.config.check()
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1338,7 +1338,7 @@
changeset_revision = kwd[ 'changeset_revision' ]
repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- return suc.build_readme_files_dict( repository_metadata )
+ return suc.build_readme_files_dict( repository_metadata.metadata )
@web.json
def get_repository_dependencies( self, trans, **kwd ):
"""Return an encoded dictionary of all repositories upon which the contents of the received repository depends."""
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/community/util/container_util.py
--- a/lib/galaxy/webapps/community/util/container_util.py
+++ b/lib/galaxy/webapps/community/util/container_util.py
@@ -218,7 +218,7 @@
else:
repository_dependencies_root_folder = None
return folder_id, repository_dependencies_root_folder
-def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools' ):
+def build_tools_folder( folder_id, tool_dicts, repository, changeset_revision, valid=True, label='Valid tools', description=None ):
"""Return a folder hierarchy containing valid tools."""
if tool_dicts:
tool_id = 0
@@ -226,6 +226,8 @@
tools_root_folder = Folder( id=folder_id, key='root', label='root', parent=None )
folder_id += 1
folder = Folder( id=folder_id, key='tools', label=label, parent=tools_root_folder )
+ if description:
+ folder.description = description
tools_root_folder.folders.append( folder )
# Insert a header row.
tool_id += 1
@@ -239,6 +241,10 @@
repository_id='',
changeset_revision='' )
folder.valid_tools.append( tool )
+ if repository:
+ repository_id = repository.id
+ else:
+ repository_id = ''
for tool_dict in tool_dicts:
tool_id += 1
if 'requirements' in tool_dict:
@@ -256,7 +262,7 @@
description=tool_dict[ 'description' ],
version=tool_dict[ 'version' ],
requirements=requirements_str,
- repository_id=repository.id,
+ repository_id=repository_id,
changeset_revision=changeset_revision )
folder.valid_tools.append( tool )
else:
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/demo_sequencer/app.py
--- a/lib/galaxy/webapps/demo_sequencer/app.py
+++ b/lib/galaxy/webapps/demo_sequencer/app.py
@@ -6,6 +6,7 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.name = "demo_sequencer"
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
self.config.check()
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/demo_sequencer/buildapp.py
--- a/lib/galaxy/webapps/demo_sequencer/buildapp.py
+++ b/lib/galaxy/webapps/demo_sequencer/buildapp.py
@@ -55,7 +55,7 @@
sys.exit( 1 )
atexit.register( app.shutdown )
# Create the universe WSGI application
- webapp = galaxy.webapps.demo_sequencer.framework.WebApplication( app, session_cookie='galaxydemo_sequencersession' )
+ webapp = galaxy.webapps.demo_sequencer.framework.WebApplication( app, session_cookie='galaxydemo_sequencersession', name="demo_sequencer" )
add_ui_controllers( webapp, app )
# These two routes handle our simple needs at the moment
webapp.add_route( '/:controller/:action', action='index' )
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -547,20 +547,6 @@
galaxy_url = url_for( '/', qualified=True )
url = suc.url_join( tool_shed_url, 'repository/find_workflows?galaxy_url=%s' % galaxy_url )
return trans.response.send_redirect( url )
- def generate_tool_path( self, repository_clone_url, changeset_revision ):
- """
- Generate a tool path that guarantees repositories with the same name will always be installed
- in different directories. The tool path will be of the form:
- <tool shed url>/repos/<repository owner>/<repository name>/<installed changeset revision>
- http://test@bx.psu.edu:9009/repos/test/filter
- """
- tmp_url = suc.clean_repository_clone_url( repository_clone_url )
- # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column
- items = tmp_url.split( 'repos' )
- tool_shed_url = items[ 0 ]
- repo_path = items[ 1 ]
- tool_shed_url = suc.clean_tool_shed_url( tool_shed_url )
- return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -585,59 +571,11 @@
raw_text = response.read()
response.close()
if len( raw_text ) > 2:
- text = json.from_json_string( encoding_util.tool_shed_decode( raw_text ) )
- log.debug( text )
+ encoded_text = from_json_string( raw_text )
+ text = encoding_util.tool_shed_decode( encoded_text )
else:
text = ''
return text
- def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
- """
- Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
- repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
- this methid is required to retrieve all repository dependencies.
- """
- if repo_info_dicts:
- all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
- # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
- required_repository_tups = []
- for repo_info_dict in repo_info_dicts:
- for repository_name, repo_info_tup in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tup )
- if repository_dependencies:
- for key, val in repository_dependencies.items():
- if key in [ 'root_key', 'description' ]:
- continue
- toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
- components_list = [ toolshed, name, owner, changeset_revision ]
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- for components_list in val:
- if components_list not in required_repository_tups:
- required_repository_tups.append( components_list )
- if required_repository_tups:
- # The value of required_repository_tups is a list of tuples, so we need to encode it.
- encoded_required_repository_tups = []
- for required_repository_tup in required_repository_tups:
- encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
- encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
- encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- required_repo_info_dict = from_json_string( text )
- required_repo_info_dicts = []
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- if required_repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( required_repo_info_dict )
- return all_repo_info_dicts
def get_versions_of_tool( self, app, guid ):
tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -748,7 +686,7 @@
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ relative_clone_dir = shed_util.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
clone_dir = os.path.join( tool_path, relative_clone_dir )
relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
install_dir = os.path.join( tool_path, relative_install_dir )
@@ -908,13 +846,13 @@
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
+ tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
**kwd ) )
if repository.can_install and operation == 'install':
# Send a request to the tool shed to install the repository.
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
url = suc.url_join( tool_shed_url,
'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
@@ -939,10 +877,50 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "The repository information has been updated."
+ metadata = repository.metadata
+ datatypes = metadata.get( 'datatypes', None )
+ invalid_tools = metadata.get( 'invalid_tools', None )
+ if repository.has_readme_files:
+ readme_files_dict = suc.build_readme_files_dict( repository.metadata )
+ else:
+ readme_files_dict = None
+ repository_dependencies = metadata.get( 'repository_dependencies', None )
+ repository_dependencies_dict_for_display = {}
+ if repository_dependencies:
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
+ # shed repository metadata.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ rd_tups_for_display = []
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ repository_dependencies_dict_for_display[ 'root_key' ] = root_key
+ repository_dependencies_dict_for_display[ root_key ] = rd_tups
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ all_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ valid_tools = metadata.get( 'tools', None )
+ workflows = metadata.get( 'workflows', None )
+ containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
+ toolshed_base_url=tool_shed_url,
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ repository=repository,
+ datatypes=datatypes,
+ invalid_tools=invalid_tools,
+ missing_tool_dependencies=missing_tool_dependencies,
+ readme_files_dict=readme_files_dict,
+ repository_dependencies=repository_dependencies_dict_for_display,
+ tool_dependencies=tool_dependencies,
+ valid_tools=valid_tools,
+ workflows=workflows )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
repo_files_dir=repo_files_dir,
+ containers_dict=containers_dict,
message=message,
status=status )
@web.expose
@@ -1153,8 +1131,8 @@
shed_tool_conf = kwd[ 'shed_tool_conf' ]
else:
install_tool_dependencies = False
- # If installing a repository that includes no tools, get the relative tool_path from the file to which the
- # migrated_tools_config setting points.
+ # If installing a repository that includes no tools, get the relative tool_path from the file to which the migrated_tools_config
+ # setting points.
shed_tool_conf = trans.app.config.migrated_tools_config
# Get the tool path by searching the list of shed_tool_confs for the dictionary that contains the information about shed_tool_conf.
for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
@@ -1167,61 +1145,20 @@
if file_name == shed_tool_conf:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
- # Make sure all tool_shed_repository records exist.
- created_or_updated_tool_shed_repositories = []
- # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
- filtered_repo_info_dicts = []
- # Disciver all repository dependencies and retrieve information for installing them.
- repo_info_dicts = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
- for repo_info_dict in repo_info_dicts:
- for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, changeset_revision ) )
- relative_install_dir = os.path.join( clone_dir, name )
- # Make sure the repository was not already installed.
- installed_tool_shed_repository, installed_changeset_revision = self.repository_was_previously_installed( trans,
- tool_shed_url,
- name,
- repo_info_tuple,
- clone_dir )
- if installed_tool_shed_repository:
- message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner )
- if installed_changeset_revision != changeset_revision:
- message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision
- else:
- message += "was previously installed. "
- if installed_tool_shed_repository.uninstalled:
- message += "The repository has been uninstalled, however, so reinstall the original repository instead of installing it again. "
- elif installed_tool_shed_repository.deleted:
- message += "The repository has been deactivated, however, so activate the original repository instead of installing it again. "
- if installed_changeset_revision != changeset_revision:
- message += "You can get the latest updates for the repository using the <b>Get updates</b> option from the repository's "
- message += "<b>Repository Actions</b> pop-up menu. "
- message+= 'Click <a href="%s">here</a> to manage the repository. ' % \
- ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) )
- status = 'error'
- if len( repo_info_dicts ) == 1:
- new_kwd = dict( message=message, status=status )
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- **new_kwd ) )
- else:
- log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name )
- tool_shed_repository = shed_util.create_or_update_tool_shed_repository( app=trans.app,
- name=name,
- description=description,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=changeset_revision,
- owner=repository_owner,
- dist_to_shed=False )
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
+ created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts, message = \
+ shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False )
+ if message and len( repo_info_dicts ) == 1:
+ message+= 'Click <a href="%s">here</a> to manage the repository. ' % \
+ ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) )
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status='error' ) )
if created_or_updated_tool_shed_repositories:
+ if install_repository_dependencies:
+ # Build repository dependency relationships.
+ suc.build_repository_dependency_relationships( trans, repo_info_dicts, created_or_updated_tool_shed_repositories )
+ # Handle contained tools.
if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
@@ -1286,11 +1223,8 @@
repo_info_dict = repo_info_dicts[ 0 ]
name = repo_info_dict.keys()[ 0 ]
repo_info_tuple = repo_info_dict[ name ]
- if len( repo_info_tuple ) == 6:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- repository_dependencies = None
- elif len( repo_info_tuple ) == 7:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
url = suc.url_join( tool_shed_url,
'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
( name, repository_owner, changeset_revision ) )
@@ -1303,9 +1237,15 @@
repository_name=name,
repository_owner=repository_owner,
changeset_revision=changeset_revision,
+ repository=None,
+ datatypes=None,
+ invalid_tools=None,
+ missing_tool_dependencies=None,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ tool_dependencies=tool_dependencies,
+ valid_tools=None,
+ workflows=None )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle tool dependencies chack box.
@@ -1338,18 +1278,23 @@
@web.expose
@web.require_admin
def reinstall_repository( self, trans, **kwd ):
+ """
+ Reinstall a tool shed repository that has been previously uninstalled, making sure to handle all repository and tool dependencies of the
+ repository.
+ """
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
+ install_repository_dependencies = CheckboxField.is_checked( kwd.get( 'install_repository_dependencies', '' ) )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
- clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
+ clone_dir = os.path.join( tool_path, shed_util.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
tool_section = None
@@ -1406,28 +1351,34 @@
else:
tool_section = None
# The repository's status must be updated from 'Uninstall' to 'New' when initiating reinstall so the repository_installation_updater will function.
- tool_shed_repository = shed_util.create_or_update_tool_shed_repository( trans.app,
- tool_shed_repository.name,
- tool_shed_repository.description,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.ctx_rev,
- repository_clone_url,
- tool_shed_repository.metadata,
- trans.model.ToolShedRepository.installation_status.NEW,
- tool_shed_repository.installed_changeset_revision,
- tool_shed_repository.owner,
- tool_shed_repository.dist_to_shed )
- ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ tool_shed_repository = suc.create_or_update_tool_shed_repository( trans.app,
+ tool_shed_repository.name,
+ tool_shed_repository.description,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.ctx_rev,
+ repository_clone_url,
+ tool_shed_repository.metadata,
+ trans.model.ToolShedRepository.installation_status.NEW,
+ tool_shed_repository.installed_changeset_revision,
+ tool_shed_repository.owner,
+ tool_shed_repository.dist_to_shed )
+ ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ repo_info_dicts = []
repo_info_dict = kwd.get( 'repo_info_dict', None )
- # The repo_info_dict should be encoded.
- if not repo_info_dict:
+ if repo_info_dict:
+ # The repo_info_dict should be encoded.
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ else:
# Entering this if block used to happen only if the tool_shed_repository does not include any valid tools. After repository dependencies
- # were introduced, it may never happen, but will keep the block just in case.
- repository_dependencies = self.get_repository_dependencies( trans=trans,
- repository_id=repository_id,
- repository_name=tool_shed_repository.name,
- repository_owner=tool_shed_repository.owner,
- changeset_revision=tool_shed_repository.installed_changeset_revision )
+ # were introduced, it may never happen, but we'll keep the block just in case.
+ if install_repository_dependencies:
+ repository_dependencies = self.get_repository_dependencies( trans=trans,
+ repository_id=repository_id,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision )
+ else:
+ repository_dependencies = None
repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( tool_shed_repository.repo_path( trans.app ) ) )
repo_info_dict = suc.create_repo_info_dict( trans=trans,
repository_clone_url=repository_clone_url,
@@ -1440,6 +1391,16 @@
metadata=metadata,
repository_dependencies=repository_dependencies )
repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict )
+ repo_info_dicts.append( repo_info_dict )
+ # Make sure all tool_shed_repository records exist.
+ created_or_updated_tool_shed_repositories = [ tool_shed_repository ]
+ if install_repository_dependencies:
+ created_or_updated_tool_shed_repositories, repo_info_dicts, filtered_repo_info_dicts = \
+ shed_util.create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=True )
+ if len( created_or_updated_tool_shed_repositories ) > 1:
+ # Build repository dependency relationships.
+ suc.build_repository_dependency_relationships( trans, filtered_repo_info_dicts, created_or_updated_tool_shed_repositories )
+ encoded_repository_ids = [ trans.security.encode_id( r.id ) for r in created_or_updated_tool_shed_repositories ]
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
install_tool_dependencies=install_tool_dependencies,
@@ -1451,12 +1412,12 @@
tool_panel_section=tool_panel_section,
tool_path=tool_path,
tool_panel_section_key=tool_panel_section_key,
- tool_shed_repository_ids=[ repository_id ],
+ tool_shed_repository_ids=encoded_repository_ids,
tool_shed_url=tool_shed_url )
encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='initiate_repository_installation',
- shed_repository_ids=repository_id,
+ shed_repository_ids=encoded_repository_ids,
encoded_kwd=encoded_kwd,
reinstalling=True ) )
@web.json
@@ -1481,48 +1442,23 @@
return rval
@web.expose
@web.require_admin
- def repository_was_previously_installed( self, trans, tool_shed_url, repository_name, repo_info_tuple, clone_dir ):
- # Handle case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
- # in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
- # that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled.
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
- # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
- # revision to see if it was previously installed using one of them.
- url = suc.url_join( tool_shed_url,
- 'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
- ( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- #clone_path, clone_directory = os.path.split( clone_dir )
- changeset_revisions = util.listify( text )
- for previous_changeset_revision in changeset_revisions:
- tool_shed_repository = shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( trans.app,
- tool_shed,
- repository_name,
- repository_owner,
- previous_changeset_revision )
- if tool_shed_repository and tool_shed_repository.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
- return tool_shed_repository, previous_changeset_revision
- return None, None
- @web.expose
- @web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
message = ''
repository_id = kwd[ 'id' ]
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
- ctx_rev = shed_util.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
+ ctx_rev = suc.get_ctx_rev( tool_shed_url, tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans.app, tool_shed_repository )
repository_dependencies = self.get_repository_dependencies( trans=trans,
repository_id=repository_id,
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision )
+ if repository_dependencies:
+ includes_repository_dependencies = True
+ else:
+ includes_repository_dependencies = False
repo_info_dict = suc.create_repo_info_dict( trans=trans,
repository_clone_url=repository_clone_url,
changeset_revision=tool_shed_repository.installed_changeset_revision,
@@ -1561,24 +1497,51 @@
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
- if metadata and 'readme_files' in metadata:
- url = suc.url_join( tool_shed_url,
- 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
- response = urllib2.urlopen( url )
- raw_text = response.read()
- response.close()
- readme_files_dict = from_json_string( raw_text )
- tool_dependencies = metadata.get( 'tool_dependencies', None )
+ if metadata:
+ datatypes = metadata.get( 'datatypes', None )
+ invalid_tools = metadata.get( 'invalid_tools', None )
+ if tool_shed_repository.has_readme_files:
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_readme_files?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.installed_changeset_revision ) )
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ readme_files_dict = from_json_string( raw_text )
+ else:
+ readme_files_dict = None
repository_dependencies = metadata.get( 'repository_dependencies', None )
+ repository_dependencies_dict_for_display = {}
+ if repository_dependencies:
+ # We need to add a root_key entry to the repository_dependencies dictionary since it will not be included in the installed tool
+ # shed repository metadata.
+ root_key = container_util.generate_repository_dependencies_key_for_repository( repository.tool_shed,
+ repository.name,
+ repository.owner,
+ repository.installed_changeset_revision )
+ rd_tups_for_display = []
+ rd_tups = repository_dependencies[ 'repository_dependencies' ]
+ repository_dependencies_dict_for_display[ 'root_key' ] = root_key
+ repository_dependencies_dict_for_display[ root_key ] = rd_tups
+ repository_dependencies_dict_for_display[ 'description' ] = repository_dependencies[ 'description' ]
+ all_tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies, missing_tool_dependencies = shed_util.get_installed_and_missing_tool_dependencies( trans, repository, all_tool_dependencies )
+ valid_tools = metadata.get( 'tools', None )
+ workflows = metadata.get( 'workflows', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
repository_name=tool_shed_repository.name,
repository_owner=tool_shed_repository.owner,
changeset_revision=tool_shed_repository.installed_changeset_revision,
+ repository=tool_shed_repository,
+ datatypes=datatypes,
+ invalid_tools=invalid_tools,
+ missing_tool_dependencies=missing_tool_dependencies,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
- tool_dependencies=tool_dependencies )
+ tool_dependencies=tool_dependencies,
+ valid_tools=valid_tools,
+ workflows=workflows )
else:
containers_dict = dict( readme_files_dict=None, repository_dependencies=None, tool_dependencies=None )
# Handle repository dependencies check box.
@@ -1597,6 +1560,8 @@
repository=tool_shed_repository,
no_changes_check_box=no_changes_check_box,
original_section_name=original_section_name,
+ includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
+ includes_repository_dependencies=includes_repository_dependencies,
install_repository_dependencies_check_box=install_repository_dependencies_check_box,
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
containers_dict=containers_dict,
@@ -1797,7 +1762,7 @@
changeset_revision = params.get( 'changeset_revision', None )
latest_changeset_revision = params.get( 'latest_changeset_revision', None )
latest_ctx_rev = params.get( 'latest_ctx_rev', None )
- repository = shed_util.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
+ repository = suc.get_tool_shed_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 lib/galaxy/webapps/reports/app.py
--- a/lib/galaxy/webapps/reports/app.py
+++ b/lib/galaxy/webapps/reports/app.py
@@ -6,6 +6,7 @@
"""Encapsulates the state of a Universe application"""
def __init__( self, **kwargs ):
print >> sys.stderr, "python path is: " + ", ".join( sys.path )
+ self.name = "reports"
# Read config file and check for errors
self.config = config.Configuration( **kwargs )
self.config.check()
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/common.mako
--- a/templates/admin/tool_shed_repository/common.mako
+++ b/templates/admin/tool_shed_repository/common.mako
@@ -103,14 +103,16 @@
</div></div>
%if repository_dependencies_root_folder:
- <div class="form-row">
- <label>Handle repository dependencies?</label>
- ${install_repository_dependencies_check_box.get_html()}
- <div class="toolParamHelp" style="clear: both;">
- Un-check to skip automatic installation of these additional repositories required by this repository.
+ %if install_repository_dependencies_check_box is not None:
+ <div class="form-row">
+ <label>Handle repository dependencies?</label>
+ ${install_repository_dependencies_check_box.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Un-check to skip automatic installation of these additional repositories required by this repository.
+ </div></div>
- </div>
- <div style="clear: both"></div>
+ <div style="clear: both"></div>
+ %endif
<div class="form-row">
%if repository_dependencies_root_folder:
<p/>
@@ -123,19 +125,21 @@
</div>
%endif
%if tool_dependencies_root_folder:
- <div class="form-row">
- <label>Handle tool dependencies?</label>
- <% disabled = trans.app.config.tool_dependency_dir is None %>
- ${install_tool_dependencies_check_box.get_html( disabled=disabled )}
- <div class="toolParamHelp" style="clear: both;">
- %if disabled:
- Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies.
- %else:
- Un-check to skip automatic handling of these tool dependencies.
- %endif
+ %if install_tool_dependencies_check_box is not None:
+ <div class="form-row">
+ <label>Handle tool dependencies?</label>
+ <% disabled = trans.app.config.tool_dependency_dir is None %>
+ ${install_tool_dependencies_check_box.get_html( disabled=disabled )}
+ <div class="toolParamHelp" style="clear: both;">
+ %if disabled:
+ Set the tool_dependency_dir configuration value in your Galaxy config to automatically handle tool dependencies.
+ %else:
+ Un-check to skip automatic handling of these tool dependencies.
+ %endif
+ </div></div>
- </div>
- <div style="clear: both"></div>
+ <div style="clear: both"></div>
+ %endif
<div class="form-row">
%if tool_dependencies_root_folder:
<p/>
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -1,7 +1,19 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/webapps/community/repository/common.mako" import="*" /><%namespace file="/admin/tool_shed_repository/common.mako" import="*" />
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+ ${container_javascripts()}
+</%def>
+
<br/><br/><ul class="manage-table-actions"><li><a class="action-button" id="repository-${repository.id}-popup" class="menubutton">Repository Actions</a></li>
@@ -91,218 +103,5 @@
</div><p/>
%if not in_error_state:
- <div class="toolForm">
- <div class="toolFormTitle">${repository.name}</div>
- <div class="toolFormBody">
- <%
- metadata = repository.metadata or {}
- missing_tool_dependencies = repository.missing_tool_dependencies
- installed_tool_dependencies = repository.installed_tool_dependencies
- %>
- %if missing_tool_dependencies:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Missing tool dependencies</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>version</b></td>
- <td><b>type</b></td>
- <td><b>status</b></td>
- </tr>
- %for tool_dependency in missing_tool_dependencies:
- <tr>
- <td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='manage_tool_dependencies', id=trans.security.encode_id( tool_dependency.id ) )}">
- ${tool_dependency.name}
- </a>
- </td>
- <td>${tool_dependency.version}</td>
- <td>${tool_dependency.type}</td>
- <td>${tool_dependency.status}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if installed_tool_dependencies:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Installed tool dependencies<i> - click the name to browse the dependency installation directory</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>version</b></td>
- <td><b>type</b></td>
- </tr>
- %for installed_tool_dependency in installed_tool_dependencies:
- <tr>
- <td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='browse_tool_dependency', id=trans.security.encode_id( installed_tool_dependency.id ), repository_id=trans.security.encode_id( repository.id ) )}">
- ${installed_tool_dependency.name}
- </a>
- </td>
- <td>${installed_tool_dependency.version}</td>
- <td>${installed_tool_dependency.type}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'tools' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Tools</b><i> - click the name to view information about the tool</i></td>
- </tr>
- </table>
- </div>
- <div class="form-row">
- <% tool_dicts = metadata[ 'tools' ] %>
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>description</b></td>
- <td><b>version</b></td>
- <td><b>requirements</b></td>
- </tr>
- %for tool_dict in tool_dicts:
- <tr>
- <td>
- <a class="view-info" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=tool_dict[ 'id' ] )}">
- ${tool_dict[ 'name' ]}
- </a>
- </td>
- <td>${tool_dict[ 'description' ]}</td>
- <td>${tool_dict[ 'version' ]}</td>
- <td>
- <%
- if 'requirements' in tool_dict:
- requirements = tool_dict[ 'requirements' ]
- else:
- requirements = None
- %>
- %if requirements:
- <%
- requirements_str = ''
- for requirement_dict in tool_dict[ 'requirements' ]:
- requirements_str += '%s (%s), ' % ( requirement_dict[ 'name' ], requirement_dict[ 'type' ] )
- requirements_str = requirements_str.rstrip( ', ' )
- %>
- ${requirements_str}
- %else:
- none
- %endif
- </td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'workflows' in metadata:
- ## metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- ## [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Workflows</b><i> - click the name to import</i></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% workflow_tups = metadata[ 'workflows' ] %>
- <table class="grid">
- <tr>
- <td><b>name</b></td>
- <td><b>steps</b></td>
- <td><b>format-version</b></td>
- <td><b>annotation</b></td>
- </tr>
- <% index = 0 %>
- %for workflow_tup in workflow_tups:
- <%
- import os.path
- relative_path = workflow_tup[ 0 ]
- full_path = os.path.abspath( relative_path )
- workflow_dict = workflow_tup[ 1 ]
- workflow_name = workflow_dict[ 'name' ]
- ## Initially steps were not stored in the metadata record.
- steps = workflow_dict.get( 'steps', [] )
- format_version = workflow_dict[ 'format-version' ]
- annotation = workflow_dict[ 'annotation' ]
- %>
- <tr>
- <td>
- <div class="menubutton" style="float: left;" id="workflow-${index}-popup">
- ${workflow_name}
- <div popupmenu="workflow-${index}-popup">
- <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow', installed_repository_file=full_path, repository_id=trans.security.encode_id( repository.id ) )}">Import to Galaxy</a>
- </div>
- </div>
- </td>
- <td>
- %if steps:
- ${len( steps )}
- %else:
- unknown
- %endif
- </td>
- <td>${format_version}</td>
- <td>${annotation}</td>
- </tr>
- <% index += 1 %>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- %if 'datatypes' in metadata:
- <div class="form-row">
- <table width="100%">
- <tr bgcolor="#D8D8D8" width="100%">
- <td><b>Data types</b></td>
- </tr>
- </table>
- </div>
- <div style="clear: both"></div>
- <div class="form-row">
- <% datatypes_dicts = metadata[ 'datatypes' ] %>
- <table class="grid">
- <tr>
- <td><b>extension</b></td>
- <td><b>type</b></td>
- <td><b>mimetype</b></td>
- <td><b>subclass</b></td>
- </tr>
- %for datatypes_dict in datatypes_dicts:
- <tr>
- <td>${datatypes_dict.get( 'extension', ' ' )}</td>
- <td>${datatypes_dict.get( 'dtype', ' ' )}</td>
- <td>${datatypes_dict.get( 'mimetype', ' ' )}</td>
- <td>${datatypes_dict.get( 'subclass', ' ' )}</td>
- </tr>
- %endfor
- </table>
- </div>
- <div style="clear: both"></div>
- %endif
- </div>
- </div>
- <p/>
+ ${render_repository_items( repository.metadata, containers_dict, can_set_metadata=False )}
%endif
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
--- a/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
+++ b/templates/admin/tool_shed_repository/reselect_tool_panel_section.mako
@@ -2,6 +2,18 @@
<%namespace file="/message.mako" import="render_msg" /><%namespace file="/admin/tool_shed_repository/common.mako" import="render_dependencies_section" /><%namespace file="/admin/tool_shed_repository/common.mako" import="render_readme_section" />
+<%namespace file="/webapps/community/repository/common.mako" import="*" />
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "library" )}
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("libs/jquery/jquery.rating", "libs/jquery/jstorage" )}
+ ${container_javascripts()}
+</%def>
%if message:
${render_msg( message, status )}
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -208,14 +208,17 @@
folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
else:
folder_label = "%s<i> - this repository requires installation of these additional repositories</i>" % folder_label
+ elif folder.label == 'Valid tools':
+ col_span_str = 'colspan="3"'
+ if folder.description:
+ folder_label = "%s<i> - %s</i>" % ( folder_label, folder.description )
+ else:
+ folder_label = "%s<i> - click the name to preview the tool and use the pop-up menu to inspect all metadata</i>" % folder_label
elif folder.invalid_tools:
folder_label = "%s<i> - click the tool config file name to see why the tool is invalid</i>" % folder_label
elif folder.tool_dependencies:
folder_label = "%s<i> - this repository's tools require handling of these dependencies</i>" % folder_label
col_span_str = 'colspan="3"'
- elif folder.valid_tools:
- folder_label = "%s<i> - click the name to preview the tool and use the pop-up menu to inspect all metadata</i>" % folder_label
- col_span_str = 'colspan="3"'
elif folder.workflows:
col_span_str = 'colspan="4"'
%>
@@ -302,7 +305,7 @@
%endif
id="libraryItem-${encoded_id}"><td style="padding-left: ${pad+20}px;">
- %if invalid_tool.repository_id and invalid_tool.tool_config and invalid_tool.changeset_revision:
+ %if trans.webapp.name == 'community' and invalid_tool.repository_id and invalid_tool.tool_config and invalid_tool.changeset_revision:
<a class="view-info" href="${h.url_for( controller='repository', action='load_invalid_tool', repository_id=trans.security.encode_id( invalid_tool.repository_id ), tool_config=invalid_tool.tool_config, changeset_revision=invalid_tool.changeset_revision )}">
${invalid_tool.tool_config | h}
</a>
@@ -376,12 +379,20 @@
<th style="padding-left: ${pad+20}px;">${tool.name | h}</th>
%else:
<td style="padding-left: ${pad+20}px;">
- <div style="float:left;" class="menubutton split popup" id="tool-${encoded_id}-popup">
- <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( tool.repository_id ), tool_config=tool.tool_config, changeset_revision=tool.changeset_revision )}">${tool.name | h}</a>
- </div>
- <div popupmenu="tool-${encoded_id}-popup">
- <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a>
- </div>
+ %if tool.repository_id:
+ %if trans.webapp.name == 'community':
+ <div style="float:left;" class="menubutton split popup" id="tool-${encoded_id}-popup">
+ <a class="view-info" href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( tool.repository_id ), tool_config=tool.tool_config, changeset_revision=tool.changeset_revision )}">${tool.name | h}</a>
+ </div>
+ <div popupmenu="tool-${encoded_id}-popup">
+ <a class="action-button" href="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a>
+ </div>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='view_tool_metadata', repository_id=trans.security.encode_id( tool.repository_id ), changeset_revision=tool.changeset_revision, tool_id=tool.tool_id )}">View tool metadata</a>
+ %endif
+ %else:
+ ${tool.name | h}
+ %endif
</td>
%endif
<${cell_type}>${tool.description | h}</${cell_type}>
@@ -459,7 +470,7 @@
%></%def>
-<%def name="render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )">
+<%def name="render_repository_items( metadata, containers_dict, can_set_metadata=False )"><%
from galaxy.tool_shed.encoding_util import tool_shed_encode
@@ -472,6 +483,7 @@
readme_files_root_folder = containers_dict.get( 'readme_files', None )
repository_dependencies_root_folder = containers_dict.get( 'repository_dependencies', None )
tool_dependencies_root_folder = containers_dict.get( 'tool_dependencies', None )
+ missing_tool_dependencies_root_folder = containers_dict.get( 'missing_tool_dependencies', None )
valid_tools_root_folder = containers_dict.get( 'valid_tools', none )
workflows_root_folder = containers_dict.get( 'workflows', None )
@@ -515,6 +527,13 @@
${render_folder( tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
</table>
%endif
+ %if missing_tool_dependencies_root_folder:
+ <p/>
+ <% row_counter = RowCounter() %>
+ <table cellspacing="2" cellpadding="2" border="0" width="100%" class="tables container-table" id="missing_tool_dependencies">
+ ${render_folder( missing_tool_dependencies_root_folder, 0, parent=None, row_counter=row_counter, is_root_folder=True )}
+ </table>
+ %endif
</div></div>
%endif
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -210,7 +210,7 @@
</form></div></div>
-${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=True )}
+${render_repository_items( metadata, containers_dict, can_set_metadata=True )}
<p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div>
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/preview_tools_in_changeset.mako
--- a/templates/webapps/community/repository/preview_tools_in_changeset.mako
+++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako
@@ -81,4 +81,4 @@
</div></div><p/>
-${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )}
+${render_repository_items( metadata, containers_dict, can_set_metadata=False )}
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -188,7 +188,7 @@
%endif
</div></div>
-${render_repository_items( repository_metadata_id, changeset_revision, metadata, containers_dict, can_set_metadata=False )}
+${render_repository_items( metadata, containers_dict, can_set_metadata=False )}
%if repository.categories:
<p/><div class="toolForm">
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -67,10 +67,13 @@
for dependency in installed_repository.metadata[ 'tool_dependencies' ]:
tool_dependency = installed_repository.metadata[ 'tool_dependencies' ][ dependency ]
strings_displayed.extend( [ tool_dependency[ 'name' ], tool_dependency[ 'version' ], tool_dependency[ 'type' ] ] )
+ """
+ TODO: Uncomment these when Greg enhances the tool dependencies and missing tool dependencies containers to display the status.
if dependencies_installed:
strings_displayed.append( 'Installed' )
else:
strings_displayed.append( 'Never installed' )
+ """
url = '/admin_toolshed/manage_repository?id=%s' % self.security.encode_id( installed_repository.id )
self.visit_galaxy_url( url )
self.check_for_strings( strings_displayed, strings_not_displayed )
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1000_install_basic_repository.py
--- a/test/tool_shed/functional/test_1000_install_basic_repository.py
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -55,7 +55,7 @@
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
self.verify_installed_repository_on_browse_page( installed_repository )
self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Tools', 'Filter1' ] )
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'Filter1' ] )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0030_verify_installed_repository_metadata( self ):
'''Verify that resetting the metadata on an installed repository does not change the metadata.'''
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -74,7 +74,7 @@
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'freebayes_0010', common.test_user_1_name )
self.verify_installed_repository_on_browse_page( installed_repository )
self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Tools', 'FreeBayes' ] )
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'FreeBayes' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0020_verify_installed_repository_metadata( self ):
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -69,7 +69,7 @@
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0020', common.test_user_1_name )
self.verify_installed_repository_on_browse_page( installed_repository )
self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0020_verify_installed_repository_metadata( self ):
diff -r 075ff805a33a830244c4647fc9cefec663eff2b4 -r 759d96f950b8825bcce157cdf5efbabaa0221b11 test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -111,7 +111,7 @@
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'emboss_0030', common.test_user_1_name )
self.verify_installed_repository_on_browse_page( installed_repository )
self.display_installed_repository_manage_page( installed_repository,
- strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
+ strings_displayed=[ 'Installed tool shed repository', 'Valid tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9785184717ac/
changeset: 9785184717ac
user: carlfeberhard
date: 2012-12-19 21:46:55
summary: history panel: fix delete function to use show_deleted
affected #: 4 files
diff -r 1fe77194bce0c78294f2c1f10fc231faf2006a2c -r 9785184717ac1fcc589c0183f2873d40ed993c57 static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -44,7 +44,8 @@
this.expanded = attributes.expanded || false;
// re-render the entire view on any model change
- this.model.bind( 'change', this.render, this );
+ this.model.bind( 'change', this.render , this );
+
//this.bind( 'all', function( event ){
// this.log( event );
//}, this );
diff -r 1fe77194bce0c78294f2c1f10fc231faf2006a2c -r 9785184717ac1fcc589c0183f2873d40ed993c57 static/scripts/mvc/dataset/hda-edit.js
--- a/static/scripts/mvc/dataset/hda-edit.js
+++ b/static/scripts/mvc/dataset/hda-edit.js
@@ -118,9 +118,6 @@
id : 'historyItemDeleter-' + this.model.get( 'id' ),
icon_class : 'delete',
on_click : function() {
- // Provide feedback by hiding from view immediately.
- self.$el.slideUp();
-
// Delete the dataset on the server and update HDA + view depending on success/failure.
// FIXME: when HDA-delete is implemented in the API, can call set(), then save directly
// on the model.
@@ -134,13 +131,13 @@
},
success: function() {
// FIXME: setting model attribute causes re-rendering, which is unnecessary.
- self.model.set( 'deleted', true );
- self.$el.remove();
+ //self.$el.remove();
+ self.model.set({ deleted: true });
}
});
// Return false so that anchor action (page reload) does not happen.
- return false;
+ //return false;
}
};
if( this.model.get( 'deleted' ) || this.model.get( 'purged' ) ){
diff -r 1fe77194bce0c78294f2c1f10fc231faf2006a2c -r 9785184717ac1fcc589c0183f2873d40ed993c57 static/scripts/mvc/history/history-panel.js
--- a/static/scripts/mvc/history/history-panel.js
+++ b/static/scripts/mvc/history/history-panel.js
@@ -163,6 +163,11 @@
this.model.hdas.bind( 'add', this.add, this );
this.model.hdas.bind( 'reset', this.addAll, this );
+ // when a hda model is (un)deleted or (un)hidden, re-render entirely
+ //TODO??: purged
+ //TODO??: could be more selective here
+ this.model.hdas.bind( 'change:deleted change:hidden', this.render, this );
+
// if an a hidden hda is created (gen. by a workflow), moves thru the updater to the ready state,
// then: remove it from the collection if the panel is set to NOT show hidden datasets
this.model.hdas.bind( 'change:state',
@@ -292,6 +297,7 @@
// fade out existing, swap with the new, fade in, set up behaviours
$( historyView ).queue( setUpQueueName, function( next ){
historyView.$el.fadeOut( 'fast', function(){ next(); });
+ //historyView.$el.show( function(){ next(); });
});
$( historyView ).queue( setUpQueueName, function( next ){
// swap over from temp div newRender
@@ -299,6 +305,7 @@
historyView.$el.append( newRender.children() );
historyView.$el.fadeIn( 'fast', function(){ next(); });
+ //historyView.$el.show( function(){ next(); });
});
$( historyView ).queue( setUpQueueName, function( next ){
this.log( historyView + ' rendered:', historyView.$el );
diff -r 1fe77194bce0c78294f2c1f10fc231faf2006a2c -r 9785184717ac1fcc589c0183f2873d40ed993c57 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -144,8 +144,7 @@
#TODO: via api
#TODO: show deleted handled by history
- 'delete' : h.url_for( controller='dataset', action='delete',
- dataset_id=encoded_id_template, show_deleted_on_refresh=show_deleted ),
+ 'delete' : h.url_for( controller='dataset', action='delete_async', dataset_id=encoded_id_template ),
# ................................................................ download links (and associated meta files),
'download' : h.url_for( controller='/dataset', action='display',
https://bitbucket.org/galaxy/galaxy-central/changeset/075ff805a33a/
changeset: 075ff805a33a
user: carlfeberhard
date: 2012-12-19 21:49:03
summary: pack scripts
affected #: 2 files
diff -r 9785184717ac1fcc589c0183f2873d40ed993c57 -r 075ff805a33a830244c4647fc9cefec663eff2b4 static/scripts/packed/mvc/dataset/hda-edit.js
--- a/static/scripts/packed/mvc/dataset/hda-edit.js
+++ b/static/scripts/packed/mvc/dataset/hda-edit.js
@@ -1,1 +1,1 @@
-var HDAEditView=HDABaseView.extend(LoggableMixin).extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a=this,b=a.urls["delete"],c={title:_l("Delete"),href:b,id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete",on_click:function(){a.$el.slideUp();$.ajax({url:b,type:"POST",error:function(){a.$el.show()},success:function(){a.model.set("deleted",true);a.$el.remove()}});return false}};if(this.model.get("deleted")||this.model.get("purged")){c={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(c)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(c){g.dbkey=c}if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d["f-dbkey"]=b}$.ajax({url:a+"/list_tracks?"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
+var HDAEditView=HDABaseView.extend(LoggableMixin).extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a=this,b=a.urls["delete"],c={title:_l("Delete"),href:b,id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete",on_click:function(){$.ajax({url:b,type:"POST",error:function(){a.$el.show()},success:function(){a.model.set({deleted:true})}})}};if(this.model.get("deleted")||this.model.get("purged")){c={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(c)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(c){g.dbkey=c}if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d["f-dbkey"]=b}$.ajax({url:a+"/list_tracks?"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
diff -r 9785184717ac1fcc589c0183f2873d40ed993c57 -r 075ff805a33a830244c4647fc9cefec663eff2b4 static/scripts/packed/mvc/history/history-panel.js
--- a/static/scripts/packed/mvc/history/history-panel.js
+++ b/static/scripts/packed/mvc/history/history-panel.js
@@ -1,1 +1,1 @@
-var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.model.hdas.bind("change:state",function(c,d,b){if((c.inReadyState())&&(!c.get("visible"))&&(!this.storage.get("show_hidden"))){this.removeHda(c)}},this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log(this+" (prev) storage:",JSON.stringify(this.storage.get(),null,2));if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log(this+" (init'd) storage:",this.storage.get())},add:function(a){this.render()},addAll:function(){this.render()},removeHda:function(a,c){var b=this.hdaViews[a.get("id")];b.$el.fadeOut("fast",function(){b.$el.remove();if(c){c()}});this.model.hdas.remove(a)},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates,logger:a.logger});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render();return this.storage.get("show_deleted")},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render();return this.storage.get("show_hidden")},collapseAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
+var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){if(a.logger){this.logger=this.model.logger=a.logger}this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.model.hdas.bind("change:deleted change:hidden",this.render,this);this.model.hdas.bind("change:state",function(c,d,b){if((c.inReadyState())&&(!c.get("visible"))&&(!this.storage.get("show_hidden"))){this.removeHda(c)}},this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log(this+" (prev) storage:",JSON.stringify(this.storage.get(),null,2));if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log(this+" (init'd) storage:",this.storage.get())},add:function(a){this.render()},addAll:function(){this.render()},removeHda:function(a,c){var b=this.hdaViews[a.get("id")];b.$el.fadeOut("fast",function(){b.$el.remove();if(c){c()}});this.model.hdas.remove(a)},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates,logger:a.logger});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render();return this.storage.get("show_deleted")},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render();return this.storage.get("show_hidden")},collapseAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: .hgignore: ignore test/selenium related jar and html
by Bitbucket 19 Dec '12
by Bitbucket 19 Dec '12
19 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1fe77194bce0/
changeset: 1fe77194bce0
user: carlfeberhard
date: 2012-12-19 18:01:57
summary: .hgignore: ignore test/selenium related jar and html
affected #: 1 file
diff -r ba540beb1cedb0a5040789036bd67359fc0fa64b -r 1fe77194bce0c78294f2c1f10fc231faf2006a2c .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -74,6 +74,10 @@
*/variables.less
static/june_2007_style/blue/base_sprites.less
+# Testing
+selenium-server.jar
+selenium_results.html
+
# Documentation build files.
doc/build
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/36ad8aa7b922/
changeset: 36ad8aa7b922
user: inithello
date: 2012-12-19 17:08:26
summary: Fix for reinstalling tool shed repository with readme files.
affected #: 1 file
diff -r cda19f585a15241a1ee91a795a995a8f1dcca188 -r 36ad8aa7b922a0ea32d27c1b2506c9d715ad53bb lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1569,11 +1569,13 @@
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ repository_dependencies = metadata.get( 'repository_dependencies', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
tool_dependencies=tool_dependencies )
https://bitbucket.org/galaxy/galaxy-central/changeset/ba540beb1ced/
changeset: ba540beb1ced
user: inithello
date: 2012-12-19 17:14:44
summary: Merged changes from galaxy-dist-central.
affected #: 1 file
diff -r 715a7fe08b4c415e422c179250984d727d11673f -r ba540beb1cedb0a5040789036bd67359fc0fa64b lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1569,11 +1569,13 @@
raw_text = response.read()
response.close()
readme_files_dict = from_json_string( raw_text )
+ tool_dependencies = metadata.get( 'tool_dependencies', None )
+ repository_dependencies = metadata.get( 'repository_dependencies', None )
containers_dict = suc.build_repository_containers_for_galaxy( trans=trans,
toolshed_base_url=tool_shed_url,
- repository_name=name,
- repository_owner=repository_owner,
- changeset_revision=changeset_revision,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
readme_files_dict=readme_files_dict,
repository_dependencies=repository_dependencies,
tool_dependencies=tool_dependencies )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/715a7fe08b4c/
changeset: 715a7fe08b4c
user: jgoecks
date: 2012-12-19 14:43:57
summary: Qualifier fix.
affected #: 1 file
diff -r eae248415389203907b5b951f139a200024ae069 -r 715a7fe08b4c415e422c179250984d727d11673f lib/galaxy/visualization/data_providers/registry.py
--- a/lib/galaxy/visualization/data_providers/registry.py
+++ b/lib/galaxy/visualization/data_providers/registry.py
@@ -50,7 +50,7 @@
elif isinstance( original_dataset.datatype, Vcf ):
data_provider_class = genome.RawVcfDataProvider
elif isinstance( original_dataset.datatype, Tabular ):
- data_provider_class = genome.ColumnDataProvider
+ data_provider_class = ColumnDataProvider
elif isinstance( original_dataset.datatype, ( Nexus, Newick, Phyloxml ) ):
data_provider_class = genome.PhylovizDataProvider
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
18 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/eae248415389/
changeset: eae248415389
user: clements
date: 2012-12-18 22:27:20
summary: First iteration of updating Sphinx RST files. These describe classes and modules (and ...) and are used to generate the Sphinx code documentation. However, they are not updated automatically.
Add the patch.py to automate some of the RST update process, but it is still pretty brutal. Look for cleanup in the next few releases.
affected #: 9 files
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/Makefile
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -6,6 +6,9 @@
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
+UPDATEWORKDIR = /tmp/galaxySphinxUpdate
+UPDATEWORKSOURCELIB = $(UPDATEWORKDIR)/source/lib
+SPHINXAPIDOC = sphinx-apidoc
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
@@ -21,7 +24,7 @@
$(TOOLDATASHAREDDIR)/ncbi/builds.txt \
$(TOOLDATASHAREDDIR)/ucsc/publicbuilds.txt
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext updaterst
# Sphinx wants the build files to be there; Copy the sample files into
@@ -51,9 +54,13 @@
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " updaterst to update sphinx rst to reflect code structure changes"
+
+# might also want to do
+# cd source/lib; hg revert; rm *.rst.orig; or not.
clean:
- -rm -rf $(BUILDDIR)/*
+ -rm -rf $(BUILDDIR)/* $(UPDATEWORKDIR)
html: $(TOOLDATABUILDFILES)
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@@ -165,3 +172,20 @@
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
+
+# Targets for updating the structure of the Sphinx RST doc for lib/
+
+$(UPDATEWORKSOURCELIB):
+ mkdir -p $(UPDATEWORKSOURCELIB)
+
+# Create a fresh version of the RST files for the lib, and then create a
+# unified patch file (ignore all emacs leftovers).
+# Feed that to our custom version of patch.py, which applies patches that
+# are only adds, and reports everything else to the user to deal with manually
+#
+# Note: this is still a very rough process. the run of patch.py gets some
+# errors that don't mean anything to us. And the manual process is not fun.
+updaterst: $(UPDATEWORKSOURCELIB)
+ $(SPHINXAPIDOC) -o $(UPDATEWORKSOURCELIB) ../lib
+ -diff -x '*.rst~' -ru source/lib $(UPDATEWORKSOURCELIB) > $(UPDATEWORKDIR)/alldifs.patch
+ ./patch.py $(UPDATEWORKDIR)/alldifs.patch
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/patch.py
--- /dev/null
+++ b/doc/patch.py
@@ -0,0 +1,1085 @@
+#!/usr/bin/env python
+""" Patch utility to apply unified diffs
+
+ Brute-force line-by-line non-recursive parsing
+
+ Copyright (c) 2008-2012 anatoly techtonik
+ Available under the terms of MIT license
+
+ Project home: http://code.google.com/p/python-patch/
+
+
+ $Id: patch.py 181 2012-11-23 16:03:05Z techtonik $
+ $HeadURL: https://python-patch.googlecode.com/svn/trunk/patch.py $
+
+ This program needs further tweaking for how we use it at Galaxy.
+"""
+
+__author__ = "anatoly techtonik <techtonik(a)gmail.com>"
+__version__ = "1.12.11"
+
+import copy
+import logging
+import re
+# cStringIO doesn't support unicode in 2.5
+from StringIO import StringIO
+import urllib2
+
+from os.path import exists, isfile, abspath
+import os
+import shutil
+
+#------------------------------------------------
+# Logging is controlled by logger named after the
+# module name (e.g. 'patch' for patch.py module)
+
+debugmode = False
+
+logger = logging.getLogger(__name__)
+
+debug = logger.debug
+info = logger.info
+warning = logger.warning
+
+class NullHandler(logging.Handler):
+ """ Copied from Python 2.7 to avoid getting
+ `No handlers could be found for logger "patch"`
+ http://bugs.python.org/issue16539
+ """
+ def handle(self, record):
+ pass
+ def emit(self, record):
+ pass
+ def createLock(self):
+ self.lock = None
+
+logger.addHandler(NullHandler())
+
+#------------------------------------------------
+# Constants for Patch/PatchSet types
+
+DIFF = PLAIN = "plain"
+GIT = "git"
+HG = MERCURIAL = "mercurial"
+SVN = SUBVERSION = "svn"
+# mixed type is only actual when PatchSet contains
+# Patches of different type
+MIXED = MIXED = "mixed"
+
+
+#------------------------------------------------
+# Helpers (these could come with Python stdlib)
+
+# x...() function are used to work with paths in
+# cross-platform manner - all paths use forward
+# slashes even on Windows.
+
+def xisabs(filename):
+ """ Cross-platform version of `os.path.isabs()`
+ Returns True if `filename` is absolute on
+ Linux, OS X or Windows.
+ """
+ if filename.startswith('/'): # Linux/Unix
+ return True
+ elif filename.startswith('\\'): # Windows
+ return True
+ elif re.match(r'\w:[\\/]', filename): # Windows
+ return True
+ return False
+
+def xnormpath(path):
+ """ Cross-platform version of os.path.normpath """
+ return os.path.normpath(path).replace(os.sep, '/')
+
+def xstrip(filename):
+ """ Make relative path out of absolute by stripping
+ prefixes used on Linux, OS X and Windows.
+
+ This function is critical for security.
+ """
+ while xisabs(filename):
+ # strip windows drive with all slashes
+ if re.match(r'\w:[\\/]', filename):
+ filename = re.sub(r'^\w+:[\\/]+', '', filename)
+ # strip all slashes
+ elif re.match(r'[\\/]', filename):
+ filename = re.sub(r'^[\\/]+', '', filename)
+ return filename
+
+#-----------------------------------------------
+# Main API functions
+
+def fromfile(filename):
+ """ Parse patch file. If successful, returns
+ PatchSet() object. Otherwise returns False.
+ """
+ patchset = PatchSet()
+ debug("reading %s" % filename)
+ fp = open(filename, "rb")
+ res = patchset.parse(fp)
+ fp.close()
+ if res == True:
+ return patchset
+ return False
+
+
+def fromstring(s):
+ """ Parse text string and return PatchSet()
+ object (or False if parsing fails)
+ """
+ ps = PatchSet( StringIO(s) )
+ if ps.errors == 0:
+ return ps
+ return False
+
+
+def fromurl(url):
+ """ Parse patch from an URL, return False
+ if an error occured. Note that this also
+ can throw urlopen() exceptions.
+ """
+ ps = PatchSet( urllib2.urlopen(url) )
+ if ps.errors == 0:
+ return ps
+ return False
+
+
+# --- Utility functions ---
+# [ ] reuse more universal pathsplit()
+def pathstrip(path, n):
+ """ Strip n leading components from the given path """
+ pathlist = [path]
+ while os.path.dirname(pathlist[0]) != '':
+ pathlist[0:1] = os.path.split(pathlist[0])
+ return '/'.join(pathlist[n:])
+# --- /Utility function ---
+
+
+class Hunk(object):
+ """ Parsed hunk data container (hunk starts with @@ -R +R @@) """
+
+ def __init__(self):
+ self.startsrc=None #: line count starts with 1
+ self.linessrc=None
+ self.starttgt=None
+ self.linestgt=None
+ self.invalid=False
+ self.hasplus=False # True if any "+" lines in hunk
+ self.hasminus=False # True if any "-" lines in hunk
+ self.text=[]
+
+ def originalText(self):
+
+ return("@@ -" + str(self.startsrc) +
+ "," + str(self.linessrc) +
+ " +" + str(self.starttgt) +
+ "," + str(self.linestgt) +
+ "\n" +
+ self.printableText())
+
+ def printableText(self):
+ """Reformat text into printable text"""
+
+ # yeah, there must be a better way to do this.
+ printable = ""
+ for line in self.text:
+ printable += line
+
+ return printable
+
+
+
+# def apply(self, estream):
+# """ write hunk data into enumerable stream
+# return strings one by one until hunk is
+# over
+#
+# enumerable stream are tuples (lineno, line)
+# where lineno starts with 0
+# """
+# pass
+
+
+class Patch(object):
+ """ Patch for a single file """
+ def __init__(self):
+ self.source = None
+ self.target = None
+ self.hunks = []
+ self.hunkends = []
+ self.header = []
+
+ self.type = None
+
+
+class PatchSet(object):
+
+ def __init__(self, stream=None):
+ # --- API accessible fields ---
+
+ # name of the PatchSet (filename or ...)
+ self.name = None
+ # patch set type - one of constants
+ self.type = None
+
+ # list of Patch objects
+ self.items = []
+
+ self.errors = 0 # fatal parsing errors
+ self.warnings = 0 # non-critical warnings
+ # --- /API ---
+
+ if stream:
+ self.parse(stream)
+
+ def __len__(self):
+ return len(self.items)
+
+ def parse(self, stream):
+ """ parse unified diff
+ return True on success
+ """
+ lineends = dict(lf=0, crlf=0, cr=0)
+ nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
+
+ p = None
+ hunk = None
+ # hunkactual variable is used to calculate hunk lines for comparison
+ hunkactual = dict(linessrc=None, linestgt=None)
+
+
+ class wrapumerate(enumerate):
+ """Enumerate wrapper that uses boolean end of stream status instead of
+ StopIteration exception, and properties to access line information.
+ """
+
+ def __init__(self, *args, **kwargs):
+ # we don't call parent, it is magically created by __new__ method
+
+ self._exhausted = False
+ self._lineno = False # after end of stream equal to the num of lines
+ self._line = False # will be reset to False after end of stream
+
+ def next(self):
+ """Try to read the next line and return True if it is available,
+ False if end of stream is reached."""
+ if self._exhausted:
+ return False
+
+ try:
+ self._lineno, self._line = super(wrapumerate, self).next()
+ except StopIteration:
+ self._exhausted = True
+ self._line = False
+ return False
+ return True
+
+ @property
+ def is_empty(self):
+ return self._exhausted
+
+ @property
+ def line(self):
+ return self._line
+
+ @property
+ def lineno(self):
+ return self._lineno
+
+ # define states (possible file regions) that direct parse flow
+ headscan = True # start with scanning header
+ filenames = False # lines starting with --- and +++
+
+ hunkhead = False # @@ -R +R @@ sequence
+ hunkbody = False #
+ hunkskip = False # skipping invalid hunk mode
+
+ hunkparsed = False # state after successfully parsed hunk
+
+ # regexp to match start of hunk, used groups - 1,3,4,6
+ re_hunk_start = re.compile("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?")
+
+ self.errors = 0
+ # temp buffers for header and filenames info
+ header = []
+ srcname = None
+ tgtname = None
+
+ # start of main cycle
+ # each parsing block already has line available in fe.line
+ fe = wrapumerate(stream)
+ while fe.next():
+
+ # -- deciders: these only switch state to decide who should process
+ # -- line fetched at the start of this cycle
+ if hunkparsed:
+ hunkparsed = False
+ if re_hunk_start.match(fe.line):
+ hunkhead = True
+ elif fe.line.startswith("--- "):
+ filenames = True
+ else:
+ headscan = True
+ # -- ------------------------------------
+
+ # read out header
+ if headscan:
+ while not fe.is_empty and not fe.line.startswith("--- "):
+ header.append(fe.line)
+ fe.next()
+ if fe.is_empty:
+ if p == None:
+ debug("no patch data found") # error is shown later
+ self.errors += 1
+ else:
+ info("%d unparsed bytes left at the end of stream" % len(''.join(header)))
+ self.warnings += 1
+ # TODO check for \No new line at the end..
+ # TODO test for unparsed bytes
+ # otherwise error += 1
+ # this is actually a loop exit
+ continue
+
+ headscan = False
+ # switch to filenames state
+ filenames = True
+
+ line = fe.line
+ lineno = fe.lineno
+
+
+ # hunkskip and hunkbody code skipped until definition of hunkhead is parsed
+ if hunkbody:
+ # [x] treat empty lines inside hunks as containing single space
+ # (this happens when diff is saved by copy/pasting to editor
+ # that strips trailing whitespace)
+ if line.strip("\r\n") == "":
+ debug("expanding empty line in a middle of hunk body")
+ self.warnings += 1
+ line = ' ' + line
+
+ # process line first
+ if re.match(r"^[- \+\\]", line):
+ # gather stats about line endings
+ if line.endswith("\r\n"):
+ p.hunkends["crlf"] += 1
+ elif line.endswith("\n"):
+ p.hunkends["lf"] += 1
+ elif line.endswith("\r"):
+ p.hunkends["cr"] += 1
+
+ if line.startswith("-"):
+ hunkactual["linessrc"] += 1
+ hunk.hasminus = True
+ elif line.startswith("+"):
+ hunkactual["linestgt"] += 1
+ hunk.hasplus = True
+ elif not line.startswith("\\"):
+ hunkactual["linessrc"] += 1
+ hunkactual["linestgt"] += 1
+ hunk.text.append(line)
+ # todo: handle \ No newline cases
+ else:
+ warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, p.target))
+ # add hunk status node
+ hunk.invalid = True
+ p.hunks.append(hunk)
+ self.errors += 1
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+
+ # check exit conditions
+ if hunkactual["linessrc"] > hunk.linessrc or hunkactual["linestgt"] > hunk.linestgt:
+ warning("extra lines for hunk no.%d at %d for target %s" % (nexthunkno, lineno+1, p.target))
+ # add hunk status node
+ hunk.invalid = True
+ p.hunks.append(hunk)
+ self.errors += 1
+ # switch to hunkskip state
+ hunkbody = False
+ hunkskip = True
+ elif hunk.linessrc == hunkactual["linessrc"] and hunk.linestgt == hunkactual["linestgt"]:
+ # hunk parsed successfully
+ p.hunks.append(hunk)
+ # switch to hunkparsed state
+ hunkbody = False
+ hunkparsed = True
+
+ # detect mixed window/unix line ends
+ ends = p.hunkends
+ if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
+ warning("inconsistent line ends in patch hunks for %s" % p.source)
+ self.warnings += 1
+ if debugmode:
+ debuglines = dict(ends)
+ debuglines.update(file=p.target, hunk=nexthunkno)
+ debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
+ # fetch next line
+ continue
+
+ if hunkskip:
+ if re_hunk_start.match(line):
+ # switch to hunkhead state
+ hunkskip = False
+ hunkhead = True
+ elif line.startswith("--- "):
+ # switch to filenames state
+ hunkskip = False
+ filenames = True
+ if debugmode and len(self.items) > 0:
+ debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+ if filenames:
+ if line.startswith("--- "):
+ if srcname != None:
+ # XXX testcase
+ warning("skipping false patch for %s" % srcname)
+ srcname = None
+ # XXX header += srcname
+ # double source filename line is encountered
+ # attempt to restart from this second line
+ re_filename = "^--- ([^\t]+)"
+ match = re.match(re_filename, line)
+ # todo: support spaces in filenames
+ if match:
+ srcname = match.group(1).strip()
+ else:
+ warning("skipping invalid filename at line %d" % lineno)
+ self.errors += 1
+ # XXX p.header += line
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ elif not line.startswith("+++ "):
+ if srcname != None:
+ warning("skipping invalid patch with no target for %s" % srcname)
+ self.errors += 1
+ srcname = None
+ # XXX header += srcname
+ # XXX header += line
+ else:
+ # this should be unreachable
+ warning("skipping invalid target patch")
+ filenames = False
+ headscan = True
+ else:
+ if tgtname != None:
+ # XXX seems to be a dead branch
+ warning("skipping invalid patch - double target at line %d" % lineno)
+ self.errors += 1
+ srcname = None
+ tgtname = None
+ # XXX header += srcname
+ # XXX header += tgtname
+ # XXX header += line
+ # double target filename line is encountered
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ else:
+ re_filename = "^\+\+\+ ([^\t]+)"
+ match = re.match(re_filename, line)
+ if not match:
+ warning("skipping invalid patch - no target filename at line %d" % lineno)
+ self.errors += 1
+ srcname = None
+ # switch back to headscan state
+ filenames = False
+ headscan = True
+ else:
+ if p: # for the first run p is None
+ self.items.append(p)
+ p = Patch()
+ p.source = srcname
+ srcname = None
+ p.target = match.group(1).strip()
+ p.header = header
+ header = []
+ # switch to hunkhead state
+ filenames = False
+ hunkhead = True
+ nexthunkno = 0
+ p.hunkends = lineends.copy()
+ continue
+
+ if hunkhead:
+ match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
+ if not match:
+ if not p.hunks:
+ warning("skipping invalid patch with no hunks for file %s" % p.source)
+ self.errors += 1
+ # XXX review switch
+ # switch to headscan state
+ hunkhead = False
+ headscan = True
+ continue
+ else:
+ # TODO review condition case
+ # switch to headscan state
+ hunkhead = False
+ headscan = True
+ else:
+ hunk = Hunk()
+ hunk.startsrc = int(match.group(1))
+ hunk.linessrc = 1
+ if match.group(3): hunk.linessrc = int(match.group(3))
+ hunk.starttgt = int(match.group(4))
+ hunk.linestgt = 1
+ if match.group(6): hunk.linestgt = int(match.group(6))
+ hunk.invalid = False
+ hunk.text = []
+
+ hunkactual["linessrc"] = hunkactual["linestgt"] = 0
+
+ # switch to hunkbody state
+ hunkhead = False
+ hunkbody = True
+ nexthunkno += 1
+ continue
+
+ # /while fe.next()
+
+ if p:
+ self.items.append(p)
+
+ if not hunkparsed:
+ if hunkskip:
+ warning("warning: finished with errors, some hunks may be invalid")
+ elif headscan:
+ if len(self.items) == 0:
+ warning("error: no patch data found!")
+ return False
+ else: # extra data at the end of file
+ pass
+ else:
+ warning("error: patch stream is incomplete!")
+ self.errors += 1
+ if len(self.items) == 0:
+ return False
+
+ if debugmode and len(self.items) > 0:
+ debug("- %2d hunks for %s" % (len(p.hunks), p.source))
+
+ # XXX fix total hunks calculation
+ debug("total files: %d total hunks: %d" % (len(self.items),
+ sum(len(p.hunks) for p in self.items)))
+
+ # ---- detect patch and patchset types ----
+ for idx, p in enumerate(self.items):
+ self.items[idx].type = self._detect_type(p)
+
+ types = set([p.type for p in self.items])
+ if len(types) > 1:
+ self.type = MIXED
+ else:
+ self.type = types.pop()
+ # --------
+
+ self._normalize_filenames()
+
+ return (self.errors == 0)
+
+ def _detect_type(self, p):
+ """ detect and return type for the specified Patch object
+ analyzes header and filenames info
+
+ NOTE: must be run before filenames are normalized
+ """
+
+ # check for SVN
+ # - header starts with Index:
+ # - next line is ===... delimiter
+ # - filename is followed by revision number
+ # TODO add SVN revision
+ if (len(p.header) > 1 and p.header[-2].startswith("Index: ")
+ and p.header[-1].startswith("="*67)):
+ return SVN
+
+ # common checks for both HG and GIT
+ DVCS = ((p.source.startswith('a/') or p.source == '/dev/null')
+ and (p.target.startswith('b/') or p.target == '/dev/null'))
+
+ # GIT type check
+ # - header[-2] is like "diff --git a/oldname b/newname"
+ # - header[-1] is like "index <hash>..<hash><mode>"
+ # TODO add git rename diffs and add/remove diffs
+ # add git diff with spaced filename
+ # TODO http://www.kernel.org/pub/software/scm/git/docs/git-diff.html
+
+ # detect the start of diff header - there might be some comments before
+ if len(p.header) > 1:
+ for idx in reversed(range(len(p.header))):
+ if p.header[idx].startswith("diff --git"):
+ break
+ if re.match(r'diff --git a/[\w/.]+ b/[\w/.]+', p.header[idx]):
+ if (idx+1 < len(p.header)
+ and re.match(r'index \w{7}..\w{7} \d{6}', p.header[idx+1])):
+ if DVCS:
+ return GIT
+
+ # HG check
+ #
+ # - for plain HG format header is like "diff -r b2d9961ff1f5 filename"
+ # - for Git-style HG patches it is "diff --git a/oldname b/newname"
+ # - filename starts with a/, b/ or is equal to /dev/null
+ # - exported changesets also contain the header
+ # # HG changeset patch
+ # # User name(a)example.com
+ # ...
+ # TODO add MQ
+ # TODO add revision info
+ if len(p.header) > 0:
+ if DVCS and re.match(r'diff -r \w{12} .*', p.header[-1]):
+ return HG
+ if DVCS and p.header[-1].startswith('diff --git a/'):
+ if len(p.header) == 1: # native Git patch header len is 2
+ return HG
+ elif p.header[0].startswith('# HG changeset patch'):
+ return HG
+
+ return PLAIN
+
+
+ def _normalize_filenames(self):
+ """ sanitize filenames, normalizing paths, i.e.:
+ 1. strip a/ and b/ prefixes from GIT and HG style patches
+ 2. remove all references to parent directories (with warning)
+ 3. translate any absolute paths to relative (with warning)
+
+ [x] always use forward slashes to be crossplatform
+ (diff/patch were born as a unix utility after all)
+
+ return None
+ """
+ for i,p in enumerate(self.items):
+ if p.type in (HG, GIT):
+ # TODO: figure out how to deal with /dev/null entries
+ debug("stripping a/ and b/ prefixes")
+ if p.source != '/dev/null':
+ if not p.source.startswith("a/"):
+ warning("invalid source filename")
+ else:
+ p.source = p.source[2:]
+ if p.target != '/dev/null':
+ if not p.target.startswith("b/"):
+ warning("invalid target filename")
+ else:
+ p.target = p.target[2:]
+
+ p.source = xnormpath(p.source)
+ p.target = xnormpath(p.target)
+
+ sep = '/' # sep value can be hardcoded, but it looks nice this way
+
+ # references to parent are not allowed
+ if p.source.startswith(".." + sep):
+ warning("error: stripping parent path for source file patch no.%d" % (i+1))
+ self.warnings += 1
+ while p.source.startswith(".." + sep):
+ p.source = p.source.partition(sep)[2]
+ if p.target.startswith(".." + sep):
+ warning("error: stripping parent path for target file patch no.%d" % (i+1))
+ self.warnings += 1
+ while p.target.startswith(".." + sep):
+ p.target = p.target.partition(sep)[2]
+ # absolute paths are not allowed
+ if xisabs(p.source) or xisabs(p.target):
+ warning("error: absolute paths are not allowed - file no.%d" % (i+1))
+ self.warnings += 1
+ if xisabs(p.source):
+ warning("stripping absolute path from source name '%s'" % p.source)
+ p.source = xstrip(p.source)
+ if xisabs(p.target):
+ warning("stripping absolute path from target name '%s'" % p.target)
+ p.target = xstrip(p.target)
+
+ self.items[i].source = p.source
+ self.items[i].target = p.target
+
+
+ def diffstat(self):
+ """ calculate diffstat and return as a string
+ Notes:
+ - original diffstat ouputs target filename
+ - single + or - shouldn't escape histogram
+ """
+ names = []
+ insert = []
+ delete = []
+ namelen = 0
+ maxdiff = 0 # max number of changes for single file
+ # (for histogram width calculation)
+ for patch in self.items:
+ i,d = 0,0
+ for hunk in patch.hunks:
+ for line in hunk.text:
+ if line.startswith('+'):
+ i += 1
+ elif line.startswith('-'):
+ d += 1
+ names.append(patch.target)
+ insert.append(i)
+ delete.append(d)
+ namelen = max(namelen, len(patch.target))
+ maxdiff = max(maxdiff, i+d)
+ output = ''
+ statlen = len(str(maxdiff)) # stats column width
+ for i,n in enumerate(names):
+ # %-19s | %-4d %s
+ format = " %-" + str(namelen) + "s | %" + str(statlen) + "s %s\n"
+
+ hist = ''
+ # -- calculating histogram --
+ width = len(format % ('', '', ''))
+ histwidth = max(2, 80 - width)
+ if maxdiff < histwidth:
+ hist = "+"*insert[i] + "-"*delete[i]
+ else:
+ iratio = (float(insert[i]) / maxdiff) * histwidth
+ dratio = (float(delete[i]) / maxdiff) * histwidth
+
+ # make sure every entry gets at least one + or -
+ iwidth = 1 if 0 < iratio < 1 else int(iratio)
+ dwidth = 1 if 0 < dratio < 1 else int(dratio)
+ #print iratio, dratio, iwidth, dwidth, histwidth
+ hist = "+"*int(iwidth) + "-"*int(dwidth)
+ # -- /calculating +- histogram --
+ output += (format % (names[i], insert[i] + delete[i], hist))
+
+ output += (" %d files changed, %d insertions(+), %d deletions(-)"
+ % (len(names), sum(insert), sum(delete)))
+ return output
+
+
+ def apply(self, strip=0):
+ """ apply parsed patch
+ return True on success
+ """
+
+ total = len(self.items)
+ errors = 0
+ if strip:
+ # [ ] test strip level exceeds nesting level
+ # [ ] test the same only for selected files
+ # [ ] test if files end up being on the same level
+ try:
+ strip = int(strip)
+ except ValueError:
+ errors += 1
+ warning("error: strip parameter '%s' must be an integer" % strip)
+ strip = 0
+
+ #for fileno, filename in enumerate(self.source):
+ for i,p in enumerate(self.items):
+ f2patch = p.source
+ if strip:
+ debug("stripping %s leading component from '%s'" % (strip, f2patch))
+ f2patch = pathstrip(f2patch, strip)
+ if not exists(f2patch):
+ f2patch = p.target
+ if strip:
+ debug("stripping %s leading component from '%s'" % (strip, f2patch))
+ f2patch = pathstrip(f2patch, strip)
+ if not exists(f2patch):
+ warning("source/target file does not exist\n--- %s\n+++ %s" % (p.source, f2patch))
+ errors += 1
+ continue
+ if not isfile(f2patch):
+ warning("not a file - %s" % f2patch)
+ errors += 1
+ continue
+ filename = f2patch
+
+ debug("processing %d/%d:\t %s" % (i+1, total, filename))
+
+ # validate before patching
+ f2fp = open(filename)
+ hunkno = 0
+ hunk = p.hunks[hunkno]
+ hunkfind = []
+ hunkreplace = []
+ validhunks = 0
+ canpatch = False
+ for lineno, line in enumerate(f2fp):
+ if lineno+1 < hunk.startsrc:
+ continue
+ elif lineno+1 == hunk.startsrc:
+ hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
+ hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
+ #pprint(hunkreplace)
+ hunklineno = 0
+
+ # todo \ No newline at end of file
+
+ # check hunks in source file
+ if lineno+1 < hunk.startsrc+len(hunkfind)-1:
+ if line.rstrip("\r\n") == hunkfind[hunklineno]:
+ hunklineno+=1
+ else:
+ info("file %d/%d:\t %s" % (i+1, total, filename))
+ info(" hunk no.%d doesn't match source file at line %d" % (hunkno+1, lineno))
+ info(" expected: %s" % hunkfind[hunklineno])
+ info(" actual : %s" % line.rstrip("\r\n"))
+ # not counting this as error, because file may already be patched.
+ # check if file is already patched is done after the number of
+ # invalid hunks if found
+ # TODO: check hunks against source/target file in one pass
+ # API - check(stream, srchunks, tgthunks)
+ # return tuple (srcerrs, tgterrs)
+
+ # continue to check other hunks for completeness
+ hunkno += 1
+ if hunkno < len(p.hunks):
+ hunk = p.hunks[hunkno]
+ continue
+ else:
+ break
+
+ # check if processed line is the last line
+ if lineno+1 == hunk.startsrc+len(hunkfind)-1:
+ debug(" hunk no.%d for file %s -- is ready to be patched" % (hunkno+1, filename))
+ hunkno+=1
+ validhunks+=1
+ if hunkno < len(p.hunks):
+ hunk = p.hunks[hunkno]
+ else:
+ if validhunks == len(p.hunks):
+ # patch file
+ canpatch = True
+ break
+ else:
+ if hunkno < len(p.hunks):
+ warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
+ errors += 1
+
+ f2fp.close()
+
+ if validhunks < len(p.hunks):
+ if self._match_file_hunks(filename, p.hunks):
+ warning("already patched %s" % filename)
+ else:
+ warning("source file is different - %s" % filename)
+ errors += 1
+ if canpatch:
+ backupname = filename+".orig"
+ if exists(backupname):
+ warning("can't backup original file to %s - aborting" % backupname)
+ else:
+ import shutil
+ shutil.move(filename, backupname)
+ if self.write_hunks(backupname, filename, p.hunks):
+ info("successfully patched %d/%d:\t %s" % (i+1, total, filename))
+ os.unlink(backupname)
+ else:
+ errors += 1
+ warning("error patching file %s" % filename)
+ shutil.copy(filename, filename+".invalid")
+ warning("invalid version is saved to %s" % filename+".invalid")
+ # todo: proper rejects
+ shutil.move(backupname, filename)
+
+ # todo: check for premature eof
+ return (errors == 0)
+
+
+ def can_patch(self, filename):
+ """ Check if specified filename can be patched. Returns None if file can
+ not be found among source filenames. False if patch can not be applied
+ clearly. True otherwise.
+
+ :returns: True, False or None
+ """
+ filename = abspath(filename)
+ for p in self.items:
+ if filename == abspath(p.source):
+ return self._match_file_hunks(filename, p.hunks)
+ return None
+
+
+ def _match_file_hunks(self, filepath, hunks):
+ matched = True
+ fp = open(abspath(filepath))
+
+ class NoMatch(Exception):
+ pass
+
+ lineno = 1
+ line = fp.readline()
+ hno = None
+ try:
+ for hno, h in enumerate(hunks):
+ # skip to first line of the hunk
+ while lineno < h.starttgt:
+ if not len(line): # eof
+ debug("check failed - premature eof before hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+ for hline in h.text:
+ if hline.startswith("-"):
+ continue
+ if not len(line):
+ debug("check failed - premature eof on hunk: %d" % (hno+1))
+ # todo: \ No newline at the end of file
+ raise NoMatch
+ if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
+ debug("file is not patched - failed hunk: %d" % (hno+1))
+ raise NoMatch
+ line = fp.readline()
+ lineno += 1
+
+ except NoMatch:
+ matched = False
+ # todo: display failed hunk, i.e. expected/found
+
+ fp.close()
+ return matched
+
+
+ def patch_stream(self, instream, hunks):
+ """ Generator that yields stream patched with hunks iterable
+
+ Converts lineends in hunk lines to the best suitable format
+ autodetected from input
+ """
+
+ # todo: At the moment substituted lineends may not be the same
+ # at the start and at the end of patching. Also issue a
+ # warning/throw about mixed lineends (is it really needed?)
+
+ hunks = iter(hunks)
+
+ srclineno = 1
+
+ lineends = {'\n':0, '\r\n':0, '\r':0}
+ def get_line():
+ """
+ local utility function - return line from source stream
+ collecting line end statistics on the way
+ """
+ line = instream.readline()
+ # 'U' mode works only with text files
+ if line.endswith("\r\n"):
+ lineends["\r\n"] += 1
+ elif line.endswith("\n"):
+ lineends["\n"] += 1
+ elif line.endswith("\r"):
+ lineends["\r"] += 1
+ return line
+
+ for hno, h in enumerate(hunks):
+ debug("hunk %d" % (hno+1))
+ if h.hasminus:
+ warning("Change removes/replaces some text; INVESTIGATE AND APPLY (OR NOT) MANUALLY")
+ warning("Change:")
+ changeText = h.originalText()
+ if len(changeText) > 1000:
+ changeText = changeText[0:999] + "...\n"
+ warning(changeText)
+ else:
+ # skip to line just before hunk starts
+ while srclineno < h.startsrc:
+ yield get_line()
+ srclineno += 1
+
+ for hline in h.text:
+ # todo: check \ No newline at the end of file
+ if hline.startswith("-") or hline.startswith("\\"):
+ get_line()
+ srclineno += 1
+ continue
+ else:
+ if not hline.startswith("+"):
+ get_line()
+ srclineno += 1
+ line2write = hline[1:]
+ # detect if line ends are consistent in source file
+ if sum([bool(lineends[x]) for x in lineends]) == 1:
+ newline = [x for x in lineends if lineends[x] != 0][0]
+ yield line2write.rstrip("\r\n")+newline
+ else: # newlines are mixed
+ yield line2write
+
+ for line in instream:
+ yield line
+
+
+ def write_hunks(self, srcname, tgtname, hunks):
+ src = open(srcname, "rb")
+ tgt = open(tgtname, "wb")
+
+ debug("processing target file %s" % tgtname)
+
+ tgt.writelines(self.patch_stream(src, hunks))
+
+ tgt.close()
+ src.close()
+ # [ ] TODO: add test for permission copy
+ shutil.copymode(srcname, tgtname)
+ return True
+
+
+
+if __name__ == "__main__":
+ from optparse import OptionParser
+ from os.path import exists
+ import sys
+
+ opt = OptionParser(usage="1. %prog [options] unified.diff\n"
+ " 2. %prog [options] http://host/patch\n"
+ " 3. %prog [options] -- < unified.diff",
+ version="python-patch %s" % __version__)
+ opt.add_option("-q", "--quiet", action="store_const", dest="verbosity",
+ const=0, help="print only warnings and errors", default=1)
+ opt.add_option("-v", "--verbose", action="store_const", dest="verbosity",
+ const=2, help="be verbose")
+ opt.add_option("--debug", action="store_true", dest="debugmode", help="debug mode")
+ opt.add_option("--diffstat", action="store_true", dest="diffstat",
+ help="print diffstat and exit")
+ opt.add_option("-p", "--strip", type="int", metavar='N', default=0,
+ help="strip N path components from filenames")
+ (options, args) = opt.parse_args()
+
+ if not args and sys.argv[-1:] != ['--']:
+ opt.print_version()
+ opt.print_help()
+ sys.exit()
+ readstdin = (sys.argv[-1:] == ['--'] and not args)
+
+ debugmode = options.debugmode
+
+ verbosity_levels = {0:logging.WARNING, 1:logging.INFO, 2:logging.DEBUG}
+ loglevel = verbosity_levels[options.verbosity]
+ logformat = "%(message)s"
+ if debugmode:
+ loglevel = logging.DEBUG
+ logformat = "%(levelname)8s %(message)s"
+ logger.setLevel(loglevel)
+ loghandler = logging.StreamHandler()
+ loghandler.setFormatter(logging.Formatter(logformat))
+ logger.addHandler(loghandler)
+
+
+ if readstdin:
+ patch = PatchSet(sys.stdin)
+ else:
+ patchfile = args[0]
+ urltest = patchfile.split(':')[0]
+ if (':' in patchfile and urltest.isalpha()
+ and len(urltest) > 1): # one char before : is a windows drive letter
+ patch = fromurl(patchfile)
+ else:
+ if not exists(patchfile) or not isfile(patchfile):
+ sys.exit("patch file does not exist - %s" % patchfile)
+ patch = fromfile(patchfile)
+
+ if options.diffstat:
+ print patch.diffstat()
+ sys.exit(0)
+
+ #pprint(patch)
+ patch.apply(options.strip) or sys.exit(-1)
+
+ # todo: document and test line ends handling logic - patch.py detects proper line-endings
+ # for inserted hunks and issues a warning if patched file has incosistent line ends
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.jobs.rst
--- a/doc/source/lib/galaxy.jobs.rst
+++ b/doc/source/lib/galaxy.jobs.rst
@@ -48,6 +48,7 @@
galaxy.jobs.actions
galaxy.jobs.deferred
+ galaxy.jobs.rules
galaxy.jobs.runners
galaxy.jobs.splitters
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.jobs.runners.rst
--- a/doc/source/lib/galaxy.jobs.runners.rst
+++ b/doc/source/lib/galaxy.jobs.runners.rst
@@ -57,14 +57,6 @@
:undoc-members:
:show-inheritance:
-:mod:`sge` Module
------------------
-
-.. automodule:: galaxy.jobs.runners.sge
- :members:
- :undoc-members:
- :show-inheritance:
-
:mod:`tasks` Module
-------------------
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.tool_shed.rst
--- a/doc/source/lib/galaxy.tool_shed.rst
+++ b/doc/source/lib/galaxy.tool_shed.rst
@@ -9,6 +9,14 @@
:undoc-members:
:show-inheritance:
+:mod:`common_util` Module
+-------------------------
+
+.. automodule:: galaxy.tool_shed.common_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`encoding_util` Module
---------------------------
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.util.rst
--- a/doc/source/lib/galaxy.util.rst
+++ b/doc/source/lib/galaxy.util.rst
@@ -25,6 +25,14 @@
:undoc-members:
:show-inheritance:
+:mod:`debugging` Module
+-----------------------
+
+.. automodule:: galaxy.util.debugging
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`expressions` Module
-------------------------
@@ -113,6 +121,14 @@
:undoc-members:
:show-inheritance:
+:mod:`shed_util_common` Module
+------------------------------
+
+.. automodule:: galaxy.util.shed_util_common
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`streamball` Module
------------------------
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.webapps.community.rst
--- a/doc/source/lib/galaxy.webapps.community.rst
+++ b/doc/source/lib/galaxy.webapps.community.rst
@@ -42,4 +42,5 @@
galaxy.webapps.community.framework
galaxy.webapps.community.model
galaxy.webapps.community.security
+ galaxy.webapps.community.util
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.webapps.community.util.rst
--- /dev/null
+++ b/doc/source/lib/galaxy.webapps.community.util.rst
@@ -0,0 +1,27 @@
+util Package
+============
+
+:mod:`container_util` Module
+----------------------------
+
+.. automodule:: galaxy.webapps.community.util.container_util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`hgweb_config` Module
+--------------------------
+
+.. automodule:: galaxy.webapps.community.util.hgweb_config
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+:mod:`shed_statistics` Module
+-----------------------------
+
+.. automodule:: galaxy.webapps.community.util.shed_statistics
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff -r de8bc5bde938c937759ced066ef28ce1f6202641 -r eae248415389203907b5b951f139a200024ae069 doc/source/lib/galaxy.webapps.galaxy.api.rst
--- a/doc/source/lib/galaxy.webapps.galaxy.api.rst
+++ b/doc/source/lib/galaxy.webapps.galaxy.api.rst
@@ -293,6 +293,14 @@
:undoc-members:
:show-inheritance:
+:mod:`item_tags` Module
+-----------------------
+
+.. automodule:: galaxy.webapps.galaxy.api.item_tags
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`libraries` Module
-----------------------
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/da270e660db5/
changeset: da270e660db5
user: natefoo
date: 2012-12-18 21:41:10
summary: Backed out changeset 0d6653d29222, this needs to go to -dist so it will be committed on the current dist parent and then merged back to -central.
affected #: 1 file
diff -r 0d6653d29222880e3ade27bd055569ccebc7234e -r da270e660db5594d14205ac7a2dfd8f3b0fcc649 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1511,7 +1511,6 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- message = ''
repository_id = kwd[ 'id' ]
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
@@ -1552,14 +1551,14 @@
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
- message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
+ message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
% ( tool_shed_repository.name, original_section_name )
message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
- message += "different section in the tool panel. "
+ message += "different section in the tool panel."
status = 'warning'
else:
- message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
- message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
+ message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
+ message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
status = 'warning'
if metadata and 'readme_files' in metadata:
url = suc.url_join( tool_shed_url,
@@ -1583,9 +1582,9 @@
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
- if tool_shed_repository.includes_tool_dependencies:
- message += "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
- message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository. "
+ if includes_tool_dependencies:
+ message = "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
+ message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository."
status = "warning"
install_tool_dependencies_check_box_checked = False
else:
https://bitbucket.org/galaxy/galaxy-central/changeset/cda19f585a15/
changeset: cda19f585a15
user: natefoo
date: 2012-12-18 21:41:41
summary: From Dave B.: Fix for reinstalling repository with tool dependencies when tool_dependency_dir is not set.
affected #: 1 file
diff -r 140b1d164818a3ecba581b9e313a1b846f120164 -r cda19f585a15241a1ee91a795a995a8f1dcca188 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1511,6 +1511,7 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
+ message = ''
repository_id = kwd[ 'id' ]
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
@@ -1551,14 +1552,14 @@
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
+ message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
% ( tool_shed_repository.name, original_section_name )
message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
- message += "different section in the tool panel."
+ message += "different section in the tool panel. "
status = 'warning'
else:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
- message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
+ message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
+ message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
if metadata and 'readme_files' in metadata:
url = suc.url_join( tool_shed_url,
@@ -1582,9 +1583,9 @@
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
- if includes_tool_dependencies:
- message = "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
- message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository."
+ if tool_shed_repository.includes_tool_dependencies:
+ message += "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
+ message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository. "
status = "warning"
install_tool_dependencies_check_box_checked = False
else:
https://bitbucket.org/galaxy/galaxy-central/changeset/de8bc5bde938/
changeset: de8bc5bde938
user: natefoo
date: 2012-12-18 21:42:24
summary: Merged changes from galaxy-dist-staging.
affected #: 1 file
diff -r da270e660db5594d14205ac7a2dfd8f3b0fcc649 -r de8bc5bde938c937759ced066ef28ce1f6202641 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1511,6 +1511,7 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
+ message = ''
repository_id = kwd[ 'id' ]
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, repository_id )
metadata = tool_shed_repository.metadata
@@ -1551,14 +1552,14 @@
tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
+ message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
% ( tool_shed_repository.name, original_section_name )
message += "Uncheck the <b>No changes</b> check box and select a different tool panel section to load the tools in a "
- message += "different section in the tool panel."
+ message += "different section in the tool panel. "
status = 'warning'
else:
- message = "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
- message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section."
+ message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel outside of any sections. " % tool_shed_repository.name
+ message += "Uncheck the <b>No changes</b> check box and select a tool panel section to load the tools into that section. "
status = 'warning'
if metadata and 'readme_files' in metadata:
url = suc.url_join( tool_shed_url,
@@ -1582,9 +1583,9 @@
install_repository_dependencies_check_box = CheckboxField( 'install_repository_dependencies', checked=True )
# Handle tool dependencies check box.
if trans.app.config.tool_dependency_dir is None:
- if includes_tool_dependencies:
- message = "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
- message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository."
+ if tool_shed_repository.includes_tool_dependencies:
+ message += "Tool dependencies defined in this repository can be automatically installed if you set the value of your <b>tool_dependency_dir</b> "
+ message += "setting in your Galaxy config file (universe_wsgi.ini) and restart your Galaxy server before installing the repository. "
status = "warning"
install_tool_dependencies_check_box_checked = False
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0