commit/galaxy-central: 27 new changesets
27 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/ae321a0084b1/ Changeset: ae321a0084b1 Branch: release_15.03 User: dannon Date: 2015-03-10 21:14:30+00:00 Summary: Allow admin to create users via API when config.allow_user_creation is False Affected #: 1 file diff -r 070468c42fbfed6b28c389e7df4a38dae9bf921b -r ae321a0084b1e88b7cf0cebbe8ba465329bce565 lib/galaxy/webapps/galaxy/api/users.py --- a/lib/galaxy/webapps/galaxy/api/users.py +++ b/lib/galaxy/webapps/galaxy/api/users.py @@ -105,7 +105,7 @@ POST /api/users Creates a new Galaxy user. """ - if not trans.app.config.allow_user_creation: + if not trans.app.config.allow_user_creation and not trans.user_is_admin(): raise exceptions.ConfigDoesNotAllowException( 'User creation is not allowed in this Galaxy instance' ) if trans.app.config.use_remote_user and trans.user_is_admin(): user = trans.get_or_create_remote_user( remote_user_email=payload['remote_user_email'] ) https://bitbucket.org/galaxy/galaxy-central/commits/d3d63bcfa52c/ Changeset: d3d63bcfa52c Branch: release_15.03 User: dannon Date: 2015-03-11 15:12:41+00:00 Summary: No longer mark job as deleted when purging hdas. I can't find a reason we would want to do this anyway, and doing so cascades deletion of copied hdas, which is bad. Affected #: 1 file diff -r ae321a0084b1e88b7cf0cebbe8ba465329bce565 -r d3d63bcfa52ce1778046f1017074e5c6959dda2d lib/galaxy/managers/hdas.py --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -159,7 +159,6 @@ # signal to stop the creating job? if hda.creating_job_associations: job = hda.creating_job_associations[0].job - job.mark_deleted( self.app.config.track_jobs_in_database ) self.app.job_manager.job_stop_queue.put( job.id ) # more importantly, purge dataset as well https://bitbucket.org/galaxy/galaxy-central/commits/bc2a337d8f30/ Changeset: bc2a337d8f30 Branch: release_15.03 User: dannon Date: 2015-03-10 19:56:55+00:00 Summary: Cherry-pick over non-terminal-only job stop fix. Affected #: 1 file diff -r d3d63bcfa52ce1778046f1017074e5c6959dda2d -r bc2a337d8f30bdbca4e31e9ad56f03963c97cfff lib/galaxy/managers/hdas.py --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -156,10 +156,11 @@ self.dataset_manager.error_unless_dataset_purge_allowed( trans, hda ) super( HDAManager, self ).purge( trans, hda, flush=flush ) - # signal to stop the creating job? if hda.creating_job_associations: job = hda.creating_job_associations[0].job - self.app.job_manager.job_stop_queue.put( job.id ) + if not job.finished: + # signal to stop the creating job + self.app.job_manager.job_stop_queue.put( job.id ) # more importantly, purge dataset as well if hda.dataset.user_can_purge: https://bitbucket.org/galaxy/galaxy-central/commits/5ac767b3b060/ Changeset: 5ac767b3b060 Branch: release_15.03 User: carlfeberhard Date: 2015-03-11 17:51:54+00:00 Summary: UI, history: (in the client) when calling purge on the history model, mark the history as deleted as well Affected #: 3 files diff -r bc2a337d8f30bdbca4e31e9ad56f03963c97cfff -r 5ac767b3b060356c22c100936d804a77ebee9d59 client/galaxy/scripts/mvc/history/history-model.js --- a/client/galaxy/scripts/mvc/history/history-model.js +++ b/client/galaxy/scripts/mvc/history/history-model.js @@ -208,7 +208,7 @@ /** purge this history, _Mark_ing it as purged and removing all dataset data from the server */ purge : function( options ){ if( this.get( 'purged' ) ){ return jQuery.when(); } - return this.save( { purged: true }, options ); + return this.save( { deleted: true, purged: true }, options ); }, /** save this history, _Mark_ing it as undeleted */ undelete : function( options ){ diff -r bc2a337d8f30bdbca4e31e9ad56f03963c97cfff -r 5ac767b3b060356c22c100936d804a77ebee9d59 static/scripts/mvc/history/history-model.js --- a/static/scripts/mvc/history/history-model.js +++ b/static/scripts/mvc/history/history-model.js @@ -208,7 +208,7 @@ /** purge this history, _Mark_ing it as purged and removing all dataset data from the server */ purge : function( options ){ if( this.get( 'purged' ) ){ return jQuery.when(); } - return this.save( { purged: true }, options ); + return this.save( { deleted: true, purged: true }, options ); }, /** save this history, _Mark_ing it as undeleted */ undelete : function( options ){ diff -r bc2a337d8f30bdbca4e31e9ad56f03963c97cfff -r 5ac767b3b060356c22c100936d804a77ebee9d59 static/scripts/packed/mvc/history/history-model.js --- a/static/scripts/packed/mvc/history/history-model.js +++ b/static/scripts/packed/mvc/history/history-model.js @@ -1,1 +1,1 @@ -define(["mvc/history/history-contents","mvc/base-mvc","utils/localization"],function(h,i,d){var e=Backbone.Model.extend(i.LoggableMixin).extend(i.mixin(i.SearchableModelMixin,{defaults:{model_class:"History",id:null,name:"Unnamed History",state:"new",diskSize:0,deleted:false},urlRoot:galaxy_config.root+"api/histories",initialize:function(k,l,j){j=j||{};this.logger=j.logger||null;this.log(this+".initialize:",k,l,j);this.log("creating history contents:",l);this.contents=new h.HistoryContents(l||[],{historyId:this.get("id")});this._setUpListeners();this.updateTimeoutId=null},_setUpListeners:function(){this.on("error",function(k,n,j,m,l){this.errorHandler(k,n,j,m,l)});if(this.contents){this.listenTo(this.contents,"error",function(){this.trigger.apply(this,["error:contents"].concat(jQuery.makeArray(arguments)))})}this.on("change:id",function(k,j){if(this.contents){this.contents.historyId=j}},this)},errorHandler:function(k,n,j,m,l){this.clearUpdateTimeout()},ownedByCurrUser:function(){if(!Galaxy||!Galaxy.currUser){return false}if(Galaxy.currUser.isAnonymous()||Galaxy.currUser.id!==this.get("user_id")){return false}return true},contentsCount:function(){return _.reduce(_.values(this.get("state_details")),function(j,k){return j+k},0)},searchAttributes:["name","annotation","tags"],searchAliases:{title:"name",tag:"tags"},checkForUpdates:function(j){if(this.contents.running().length){this.setUpdateTimeout()}else{this.trigger("ready");if(_.isFunction(j)){j.call(this)}}return this},setUpdateTimeout:function(j){j=j||e.UPDATE_DELAY;var k=this;this.clearUpdateTimeout();this.updateTimeoutId=setTimeout(function(){k.refresh()},j);return this.updateTimeoutId},clearUpdateTimeout:function(){if(this.updateTimeoutId){clearTimeout(this.updateTimeoutId);this.updateTimeoutId=null}},refresh:function(k,j){k=k||[];j=j||{};var l=this;j.data=j.data||{};if(k.length){j.data.details=k.join(",")}var m=this.contents.fetch(j);m.done(function(n){l.checkForUpdates(function(){this.fetch()})});return m},_delete:function(j){if(this.get("deleted")){return jQuery.when()}return this.save({deleted:true},j)},purge:function(j){if(this.get("purged")){return jQuery.when()}return this.save({purged:true},j)},undelete:function(j){if(!this.get("deleted")){return jQuery.when()}return this.save({deleted:false},j)},copy:function(m,k){m=(m!==undefined)?(m):(true);if(!this.id){throw new Error("You must set the history ID before copying it.")}var j={history_id:this.id};if(m){j.current=true}if(k){j.name=k}var l=this,n=jQuery.post(this.urlRoot,j);if(m){return n.then(function(o){var p=new e(o);return p.setAsCurrent().done(function(){l.trigger("copied",l,o)})})}return n.done(function(o){l.trigger("copied",l,o)})},setAsCurrent:function(){var j=this,k=jQuery.getJSON("/history/set_as_current?id="+this.id);k.done(function(){j.trigger("set-as-current",j)});return k},toString:function(){return"History("+this.get("id")+","+this.get("name")+")"}}));e.UPDATE_DELAY=4000;e.getHistoryData=function c(j,v){v=v||{};var r=v.detailIdsFn||[];var m=v.hdcaDetailIds||[];var s=jQuery.Deferred(),p=null;function k(w){if(j==="current"){return jQuery.getJSON(galaxy_config.root+"history/current_history_json")}return jQuery.ajax(galaxy_config.root+"api/histories/"+j)}function o(w){return w&&w.empty}function q(x){if(o(x)){return[]}if(_.isFunction(r)){r=r(x)}if(_.isFunction(m)){m=m(x)}var w={};if(r.length){w.dataset_details=r.join(",")}if(m.length){w.dataset_collection_details=m.join(",")}return jQuery.ajax(galaxy_config.root+"api/histories/"+x.id+"/contents",{data:w})}var t=v.historyFn||k,u=v.contentsFn||q;var n=t(j);n.done(function(w){p=w;s.notify({status:"history data retrieved",historyJSON:p})});n.fail(function(y,w,x){s.reject(y,"loading the history")});var l=n.then(u);l.then(function(w){s.notify({status:"contents data retrieved",historyJSON:p,contentsJSON:w});s.resolve(p,w)});l.fail(function(y,w,x){s.reject(y,"loading the contents",{history:p})});return s};var f=Backbone.Collection.extend(i.LoggableMixin).extend({model:e,urlRoot:(window.galaxy_config?galaxy_config.root:"/")+"api/histories",initialize:function(k,j){j=j||{};this.log("HistoryCollection.initialize",arguments);this.includeDeleted=j.includeDeleted||false;this.setUpListeners()},setUpListeners:function a(){var j=this;this.on("change:deleted",function(k){this.debug("change:deleted",j.includeDeleted,k.get("deleted"));if(!j.includeDeleted&&k.get("deleted")){j.remove(k)}});this.on("copied",function(k,l){this.unshift(new e(l,[]))})},create:function g(m,k,j,l){var o=this,n=jQuery.getJSON(galaxy_config.root+"history/create_new_current");return n.done(function(p){var q=new e(p,[],j||{});o.unshift(q);o.trigger("new-current")})},toString:function b(){return"HistoryCollection("+this.length+")"}});return{History:e,HistoryCollection:f}}); \ No newline at end of file +define(["mvc/history/history-contents","mvc/base-mvc","utils/localization"],function(h,i,d){var e=Backbone.Model.extend(i.LoggableMixin).extend(i.mixin(i.SearchableModelMixin,{defaults:{model_class:"History",id:null,name:"Unnamed History",state:"new",diskSize:0,deleted:false},urlRoot:galaxy_config.root+"api/histories",initialize:function(k,l,j){j=j||{};this.logger=j.logger||null;this.log(this+".initialize:",k,l,j);this.log("creating history contents:",l);this.contents=new h.HistoryContents(l||[],{historyId:this.get("id")});this._setUpListeners();this.updateTimeoutId=null},_setUpListeners:function(){this.on("error",function(k,n,j,m,l){this.errorHandler(k,n,j,m,l)});if(this.contents){this.listenTo(this.contents,"error",function(){this.trigger.apply(this,["error:contents"].concat(jQuery.makeArray(arguments)))})}this.on("change:id",function(k,j){if(this.contents){this.contents.historyId=j}},this)},errorHandler:function(k,n,j,m,l){this.clearUpdateTimeout()},ownedByCurrUser:function(){if(!Galaxy||!Galaxy.currUser){return false}if(Galaxy.currUser.isAnonymous()||Galaxy.currUser.id!==this.get("user_id")){return false}return true},contentsCount:function(){return _.reduce(_.values(this.get("state_details")),function(j,k){return j+k},0)},searchAttributes:["name","annotation","tags"],searchAliases:{title:"name",tag:"tags"},checkForUpdates:function(j){if(this.contents.running().length){this.setUpdateTimeout()}else{this.trigger("ready");if(_.isFunction(j)){j.call(this)}}return this},setUpdateTimeout:function(j){j=j||e.UPDATE_DELAY;var k=this;this.clearUpdateTimeout();this.updateTimeoutId=setTimeout(function(){k.refresh()},j);return this.updateTimeoutId},clearUpdateTimeout:function(){if(this.updateTimeoutId){clearTimeout(this.updateTimeoutId);this.updateTimeoutId=null}},refresh:function(k,j){k=k||[];j=j||{};var l=this;j.data=j.data||{};if(k.length){j.data.details=k.join(",")}var m=this.contents.fetch(j);m.done(function(n){l.checkForUpdates(function(){this.fetch()})});return m},_delete:function(j){if(this.get("deleted")){return jQuery.when()}return this.save({deleted:true},j)},purge:function(j){if(this.get("purged")){return jQuery.when()}return this.save({deleted:true,purged:true},j)},undelete:function(j){if(!this.get("deleted")){return jQuery.when()}return this.save({deleted:false},j)},copy:function(m,k){m=(m!==undefined)?(m):(true);if(!this.id){throw new Error("You must set the history ID before copying it.")}var j={history_id:this.id};if(m){j.current=true}if(k){j.name=k}var l=this,n=jQuery.post(this.urlRoot,j);if(m){return n.then(function(o){var p=new e(o);return p.setAsCurrent().done(function(){l.trigger("copied",l,o)})})}return n.done(function(o){l.trigger("copied",l,o)})},setAsCurrent:function(){var j=this,k=jQuery.getJSON("/history/set_as_current?id="+this.id);k.done(function(){j.trigger("set-as-current",j)});return k},toString:function(){return"History("+this.get("id")+","+this.get("name")+")"}}));e.UPDATE_DELAY=4000;e.getHistoryData=function c(j,v){v=v||{};var r=v.detailIdsFn||[];var m=v.hdcaDetailIds||[];var s=jQuery.Deferred(),p=null;function k(w){if(j==="current"){return jQuery.getJSON(galaxy_config.root+"history/current_history_json")}return jQuery.ajax(galaxy_config.root+"api/histories/"+j)}function o(w){return w&&w.empty}function q(x){if(o(x)){return[]}if(_.isFunction(r)){r=r(x)}if(_.isFunction(m)){m=m(x)}var w={};if(r.length){w.dataset_details=r.join(",")}if(m.length){w.dataset_collection_details=m.join(",")}return jQuery.ajax(galaxy_config.root+"api/histories/"+x.id+"/contents",{data:w})}var t=v.historyFn||k,u=v.contentsFn||q;var n=t(j);n.done(function(w){p=w;s.notify({status:"history data retrieved",historyJSON:p})});n.fail(function(y,w,x){s.reject(y,"loading the history")});var l=n.then(u);l.then(function(w){s.notify({status:"contents data retrieved",historyJSON:p,contentsJSON:w});s.resolve(p,w)});l.fail(function(y,w,x){s.reject(y,"loading the contents",{history:p})});return s};var f=Backbone.Collection.extend(i.LoggableMixin).extend({model:e,urlRoot:(window.galaxy_config?galaxy_config.root:"/")+"api/histories",initialize:function(k,j){j=j||{};this.log("HistoryCollection.initialize",arguments);this.includeDeleted=j.includeDeleted||false;this.setUpListeners()},setUpListeners:function a(){var j=this;this.on("change:deleted",function(k){this.debug("change:deleted",j.includeDeleted,k.get("deleted"));if(!j.includeDeleted&&k.get("deleted")){j.remove(k)}});this.on("copied",function(k,l){this.unshift(new e(l,[]))})},create:function g(m,k,j,l){var o=this,n=jQuery.getJSON(galaxy_config.root+"history/create_new_current");return n.done(function(p){var q=new e(p,[],j||{});o.unshift(q);o.trigger("new-current")})},toString:function b(){return"HistoryCollection("+this.length+")"}});return{History:e,HistoryCollection:f}}); \ No newline at end of file https://bitbucket.org/galaxy/galaxy-central/commits/58de410169f1/ Changeset: 58de410169f1 Branch: release_15.03 User: guerler Date: 2015-03-11 21:53:22+00:00 Summary: Map imported datasets and collections to current history without hash_util Affected #: 1 file diff -r 5ac767b3b060356c22c100936d804a77ebee9d59 -r 58de410169f168df15f7081478fc7c8d7c8f663b lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -2587,11 +2587,9 @@ hda_source_dict = {} # Mapping from HDA in history to source HDAs. for hda in history.datasets: source_hda = hda.copied_from_history_dataset_association - while source_hda:#should this check library datasets as well? - #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, - #but is still less than perfect when eg individual datasets are copied between histories - if source_hda not in hda_source_dict or source_hda.hid == hda.hid: - hda_source_dict[ source_hda ] = hda + while source_hda: + if source_hda.dataset.id not in hda_source_dict or source_hda.hid == hda.hid: + hda_source_dict[ source_hda.dataset.id ] = hda source_hda = source_hda.copied_from_history_dataset_association # Ditto for dataset collections. @@ -2599,8 +2597,8 @@ for hdca in history.dataset_collections: source_hdca = hdca.copied_from_history_dataset_collection_association while source_hdca: - if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid: - hdca_source_dict[ source_hdca ] = hdca + if source_hdca.collection.id not in hdca_source_dict or source_hdca.hid == hdca.hid: + hdca_source_dict[ source_hdca.collection.id ] = hdca source_hdca = source_hdca.copied_from_history_dataset_collection_association # Unpack unvalidated values to strings, they'll be validated when the @@ -2619,17 +2617,21 @@ if isinstance(value,list): values = [] for val in value: - if is_hashable( val ): - if val in history.datasets: + if isinstance(val, trans.app.model.HistoryDatasetAssociation): + if val.dataset.id in hda_source_dict: + values.append( hda_source_dict[ val.dataset.id ] ) + else: values.append( val ) - elif val in hda_source_dict: - values.append( hda_source_dict[ val ]) return values - if is_hashable( value ) and value not in history.datasets and value in hda_source_dict: - return hda_source_dict[ value ] + if isinstance(value, trans.app.model.HistoryDatasetAssociation): + if value.dataset.id in hda_source_dict: + return hda_source_dict[ value.dataset.id ] + if isinstance(value, trans.app.model.HistoryDatasetCollectionAssociation): + if value.collection.id in hdca_source_dict: + return hdca_source_dict[ value.collection.id ] elif isinstance( input, DataCollectionToolParameter ): - if is_hashable( value ) and value not in history.dataset_collections and value in hdca_source_dict: - return hdca_source_dict[ value ] + if value.collection.id in hdca_source_dict: + return hdca_source_dict[ value.collection.id ] visit_input_values( tool_inputs, params, rerun_callback ) def _compare_tool_version( self, trans, job ): https://bitbucket.org/galaxy/galaxy-central/commits/1b71c96a9008/ Changeset: 1b71c96a9008 Branch: release_15.03 User: dannon Date: 2015-03-12 15:34:01+00:00 Summary: Sanely size the workflow parameter input boxes (at runtime), tweak formatting a bit. Affected #: 1 file diff -r 58de410169f168df15f7081478fc7c8d7c8f663b -r 1b71c96a9008e1e920affc4b65838ff50d13d537 templates/webapps/galaxy/workflow/run.mako --- a/templates/webapps/galaxy/workflow/run.mako +++ b/templates/webapps/galaxy/workflow/run.mako @@ -513,7 +513,7 @@ <div class="metadataFormTitle">Workflow Parameters</div><div class="metadataFormBody"> %for parm in wf_parms: - <div class='form-row'><label style='width:100px;'>${parm}<input style="border:2px solid ${wf_parms[parm]};border-left-width:8px;" type="text" class='wf_parm_input ptag_${parm}' name="wf_parm|${parm}" value=""/></label></div> + <div class='form-row'><label>${parm}<br/><input size=40 style="border:2px solid ${wf_parms[parm]};border-left-width:8px;" type="text" class='wf_parm_input ptag_${parm}' name="wf_parm|${parm}" value=""/></label></div> %endfor </div></div> https://bitbucket.org/galaxy/galaxy-central/commits/3d809a9312d0/ Changeset: 3d809a9312d0 Branch: release_15.03 User: natefoo Date: 2015-03-12 16:16:35+00:00 Summary: Prevent the deletion of jobs that are already terminal, and prevent their outputs from being discarded. Affected #: 2 files diff -r 1b71c96a9008e1e920affc4b65838ff50d13d537 -r 3d809a9312d0a37908aef77c62fbccfbf0e41486 lib/galaxy/jobs/handler.py --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -680,6 +680,9 @@ except Empty: pass for job, error_msg in jobs_to_check: + if job.finished: + log.debug('Job %s already finished, not deleting or stopping', job.id) + continue final_state = job.states.DELETED if error_msg is not None: final_state = job.states.ERROR diff -r 1b71c96a9008e1e920affc4b65838ff50d13d537 -r 3d809a9312d0a37908aef77c62fbccfbf0e41486 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -517,6 +517,9 @@ """ Mark this job as deleted, and mark any output datasets as discarded. """ + if self.finished: + # Do not modify the state/outputs of jobs that are already terminal + return if track_jobs_in_database: self.state = Job.states.DELETED_NEW else: https://bitbucket.org/galaxy/galaxy-central/commits/ac807cb6c89b/ Changeset: ac807cb6c89b Branch: release_15.03 User: natefoo Date: 2015-03-12 16:22:59+00:00 Summary: Restore job deletion when purging a history and tracking jobs in the database. Affected #: 1 file diff -r 3d809a9312d0a37908aef77c62fbccfbf0e41486 -r ac807cb6c89bdd7507648cd43f3aac67aaf2f6fc lib/galaxy/managers/hdas.py --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -160,6 +160,7 @@ job = hda.creating_job_associations[0].job if not job.finished: # signal to stop the creating job + job.mark_deleted( self.app.config.track_jobs_in_database ) self.app.job_manager.job_stop_queue.put( job.id ) # more importantly, purge dataset as well https://bitbucket.org/galaxy/galaxy-central/commits/83f798b67265/ Changeset: 83f798b67265 Branch: release_15.03 User: natefoo Date: 2015-03-12 16:38:58+00:00 Summary: Make history purge behavior consistent across all UI paths (always attempt to stop jobs if the creating job is in the purged history), previously this was not implemented in the grid history view. Affected #: 1 file diff -r ac807cb6c89bdd7507648cd43f3aac67aaf2f6fc -r 83f798b67265729eb0f7dc9902bfcbf2219fd295 lib/galaxy/webapps/galaxy/controllers/history.py --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -359,6 +359,15 @@ history.purged = True self.sa_session.add( history ) self.sa_session.flush() + for hda in history.datasets: + # Not all datasets have jobs associated with them (e.g., datasets imported from libraries). + if hda.creating_job_associations: + # HDA has associated job, so try marking it deleted. + job = hda.creating_job_associations[0].job + if job.history_id == history.id and not job.finished: + # No need to check other outputs since the job's parent history is this history + job.mark_deleted( trans.app.config.track_jobs_in_database ) + trans.app.job_manager.job_stop_queue.put( job.id ) trans.sa_session.flush() if n_deleted: part = "Deleted %d %s" % ( n_deleted, iff( n_deleted != 1, "histories", "history" ) ) @@ -1125,7 +1134,7 @@ if hda.creating_job_associations: # HDA has associated job, so try marking it deleted. job = hda.creating_job_associations[0].job - if job.history_id == history.id and job.state in [ trans.app.model.Job.states.QUEUED, trans.app.model.Job.states.RUNNING, trans.app.model.Job.states.NEW ]: + if job.history_id == history.id and not job.finished: # No need to check other outputs since the job's parent history is this history job.mark_deleted( trans.app.config.track_jobs_in_database ) trans.app.job_manager.job_stop_queue.put( job.id ) https://bitbucket.org/galaxy/galaxy-central/commits/868e124734d4/ Changeset: 868e124734d4 Branch: release_15.03 User: dannon Date: 2015-03-12 17:36:43+00:00 Summary: Process *all* replacements in Rename PJA, instead of just the first one Affected #: 1 file diff -r 83f798b67265729eb0f7dc9902bfcbf2219fd295 -r 868e124734d4577d67290f0a8468dfc53b8e4f12 lib/galaxy/jobs/actions/post.py --- a/lib/galaxy/jobs/actions/post.py +++ b/lib/galaxy/jobs/actions/post.py @@ -156,7 +156,7 @@ # "replace" option so you can replace a portion of the name, # support multiple #{name} in one rename action... - if new_name.find("#{") > -1: + while new_name.find("#{") > -1: to_be_replaced = "" # This assumes a single instance of #{variable} will exist start_pos = new_name.find("#{") + 2 https://bitbucket.org/galaxy/galaxy-central/commits/733ec31a4cae/ Changeset: 733ec31a4cae Branch: release_15.03 User: natefoo Date: 2015-03-02 19:58:49+00:00 Summary: Relocate the external set_metadata script to the galaxy_utils package. A temporary script in the job working directory will be created to import and call it (trusting that `$PYTHONPATH` in a job script is always set to `galaxy/lib`). This is so the auto-detect button can defer command line generation until job preparation time for the case that handlers and web processes run from different paths. Related Trello card: https://trello.com/c/v2eCOYZi (cherry picked from commit b043d43a355e7679f9aad76ab6271f34d12a90ff) Move set_metadata script from galaxy_utils to galaxy, remove unused imports. (cherry picked from commit e093f58d636fab68707d1abfff4383dd0cd086d1) Restore old set_metadata files to prevent failure of jobs running at upgrade time. (cherry picked from commit 4443b64eaed14f36753198c50c7013cbfcb788c8) Fix unit test broken by set_metadata changes. (cherry picked from commit d2422861871e0714fd3e448c68025e12a96bd9e8) Use the job working directory for creating MetadataFiles in external set_metadata, rather than new_files_path. (cherry picked from commit 8116d2c9173a0ce4fc69caf9312d36df55062da7) Affected #: 8 files diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -667,8 +667,12 @@ # need to make different keys for them, since ids can overlap return "%s_%d" % ( dataset.__class__.__name__, dataset.id ) - def setup_external_metadata( self, datasets, sa_session, exec_dir=None, tmp_dir=None, dataset_files_path=None, - output_fnames=None, config_root=None, config_file=None, datatypes_config=None, job_metadata=None, compute_tmp_dir=None, kwds=None ): + def setup_external_metadata( self, datasets, sa_session, exec_dir=None, + tmp_dir=None, dataset_files_path=None, + output_fnames=None, config_root=None, + config_file=None, datatypes_config=None, + job_metadata=None, compute_tmp_dir=None, + include_command=True, kwds=None ): kwds = kwds or {} if tmp_dir is None: tmp_dir = MetadataTempFile.tmp_dir @@ -761,8 +765,22 @@ sa_session.add( metadata_files ) sa_session.flush() metadata_files_list.append( metadata_files ) - #return command required to build - return "%s %s %s %s %s %s %s %s" % ( os.path.join( exec_dir, 'set_metadata.sh' ), dataset_files_path, compute_tmp_dir or tmp_dir, config_root, config_file, datatypes_config, job_metadata, " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) + args = "%s %s %s %s %s %s %s" % ( dataset_files_path, + compute_tmp_dir or tmp_dir, + config_root, + config_file, + datatypes_config, + job_metadata, + " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) + if include_command: + #return command required to build + fd, fp = tempfile.mkstemp( suffix='.py', dir = tmp_dir, prefix = "set_metadata_" ) + metadata_script_file = abspath( fp ) + os.fdopen( fd, 'w' ).write( 'from galaxy.metadata.set_metadata import set_metadata; set_metadata()' ) + return "python %s %s" % ( metadata_script_file, args ) + else: + # return args to galaxy.metadata.set_metadata required to build + return args def external_metadata_set_successfully( self, dataset, sa_session ): metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session ) diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/datatypes/registry.py --- a/lib/galaxy/datatypes/registry.py +++ b/lib/galaxy/datatypes/registry.py @@ -653,12 +653,15 @@ <requirement type="package">samtools</requirement></requirements><action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/> - <command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command> + <command>python $set_metadata $__SET_EXTERNAL_METADATA_COMMAND_LINE__</command><inputs><param format="data" name="input1" type="data" label="File to set metadata on."/><param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/><param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/></inputs> + <configfiles> + <configfile name="set_metadata">from galaxy.metadata.set_metadata import set_metadata; set_metadata()</configfile> + </configfiles></tool> """ tmp_name = tempfile.NamedTemporaryFile() diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -831,8 +831,7 @@ self.command_line, self.extra_filenames = tool_evaluator.build() # FIXME: for now, tools get Galaxy's lib dir in their path - if self.command_line and self.command_line.startswith( 'python' ): - self.galaxy_lib_dir = os.path.abspath( "lib" ) # cwd = galaxy root + self.galaxy_lib_dir = os.path.abspath( "lib" ) # cwd = galaxy root # Shell fragment to inject dependencies self.dependency_shell_commands = self.tool.build_dependency_shell_commands() # We need command_line persisted to the db in order for Galaxy to re-queue the job diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/jobs/command_factory.py --- a/lib/galaxy/jobs/command_factory.py +++ b/lib/galaxy/jobs/command_factory.py @@ -145,7 +145,7 @@ metadata_command = metadata_command.strip() if metadata_command: commands_builder.capture_return_code() - commands_builder.append_command("cd %s; %s" % (exec_dir, metadata_command)) + commands_builder.append_command(metadata_command) def __copy_if_exists_command(work_dir_output): diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/metadata/__init__.py --- /dev/null +++ b/lib/galaxy/metadata/__init__.py @@ -0,0 +1,2 @@ +""" Work with Galaxy metadata +""" diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/metadata/set_metadata.py --- /dev/null +++ b/lib/galaxy/metadata/set_metadata.py @@ -0,0 +1,122 @@ +""" +Execute an external process to set_meta() on a provided list of pickled datasets. + +This was formerly scripts/set_metadata.py and expects the same arguments as +that script. +""" + +import logging +logging.basicConfig() +log = logging.getLogger( __name__ ) + +import cPickle +import json +import os +import sys + +# ensure supported version +assert sys.version_info[:2] >= ( 2, 6 ) and sys.version_info[:2] <= ( 2, 7 ), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version + +new_path = [ os.path.join( os.getcwd(), "lib" ) ] +new_path.extend( sys.path[ 1: ] ) # remove scripts/ from the path +sys.path = new_path + +from galaxy import eggs +import pkg_resources +import galaxy.model.mapping # need to load this before we unpickle, in order to setup properties assigned by the mappers +galaxy.model.Job() # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here +from galaxy.util import stringify_dictionary_keys +from sqlalchemy.orm import clear_mappers + + +def set_meta_with_tool_provided( dataset_instance, file_dict, set_meta_kwds ): + # This method is somewhat odd, in that we set the metadata attributes from tool, + # then call set_meta, then set metadata attributes from tool again. + # This is intentional due to interplay of overwrite kwd, the fact that some metadata + # parameters may rely on the values of others, and that we are accepting the + # values provided by the tool as Truth. + for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): + setattr( dataset_instance.metadata, metadata_name, metadata_value ) + dataset_instance.datatype.set_meta( dataset_instance, **set_meta_kwds ) + for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): + setattr( dataset_instance.metadata, metadata_name, metadata_value ) + +def set_metadata(): + # locate galaxy_root for loading datatypes + galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) + galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tool_job_working_directory = os.path.abspath(os.getcwd()) + + # Set up datatypes registry + datatypes_config = sys.argv.pop( 1 ) + datatypes_registry = galaxy.datatypes.registry.Registry() + datatypes_registry.load_datatypes( root_dir=config_root, config=datatypes_config ) + galaxy.model.set_datatypes_registry( datatypes_registry ) + + job_metadata = sys.argv.pop( 1 ) + existing_job_metadata_dict = {} + new_job_metadata_dict = {} + if job_metadata != "None" and os.path.exists( job_metadata ): + for line in open( job_metadata, 'r' ): + try: + line = stringify_dictionary_keys( json.loads( line ) ) + if line['type'] == 'dataset': + existing_job_metadata_dict[ line['dataset_id'] ] = line + elif line['type'] == 'new_primary_dataset': + new_job_metadata_dict[ line[ 'filename' ] ] = line + except: + continue + + for filenames in sys.argv[1:]: + fields = filenames.split( ',' ) + filename_in = fields.pop( 0 ) + filename_kwds = fields.pop( 0 ) + filename_out = fields.pop( 0 ) + filename_results_code = fields.pop( 0 ) + dataset_filename_override = fields.pop( 0 ) + # Need to be careful with the way that these parameters are populated from the filename splitting, + # because if a job is running when the server is updated, any existing external metadata command-lines + #will not have info about the newly added override_metadata file + if fields: + override_metadata = fields.pop( 0 ) + else: + override_metadata = None + set_meta_kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode + try: + dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance + if dataset_filename_override: + dataset.dataset.external_filename = dataset_filename_override + files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) )) + dataset.dataset.external_extra_files_path = files_path + if dataset.dataset.id in existing_job_metadata_dict: + dataset.extension = existing_job_metadata_dict[ dataset.dataset.id ].get( 'ext', dataset.extension ) + # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles + if override_metadata: + override_metadata = json.load( open( override_metadata ) ) + for metadata_name, metadata_file_override in override_metadata: + if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ): + metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) + setattr( dataset.metadata, metadata_name, metadata_file_override ) + file_dict = existing_job_metadata_dict.get( dataset.dataset.id, {} ) + set_meta_with_tool_provided( dataset, file_dict, set_meta_kwds ) + dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta + json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded + except Exception, e: + json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow + + for i, ( filename, file_dict ) in enumerate( new_job_metadata_dict.iteritems(), start=1 ): + new_dataset = galaxy.model.Dataset( id=-i, external_filename=os.path.join( tool_job_working_directory, file_dict[ 'filename' ] ) ) + extra_files = file_dict.get( 'extra_files', None ) + if extra_files is not None: + new_dataset._extra_files_path = os.path.join( tool_job_working_directory, extra_files ) + new_dataset.state = new_dataset.states.OK + new_dataset_instance = galaxy.model.HistoryDatasetAssociation( id=-i, dataset=new_dataset, extension=file_dict.get( 'ext', 'data' ) ) + set_meta_with_tool_provided( new_dataset_instance, file_dict, set_meta_kwds ) + file_dict[ 'metadata' ] = json.loads( new_dataset_instance.metadata.to_JSON_dict() ) #storing metadata in external form, need to turn back into dict, then later jsonify + if existing_job_metadata_dict or new_job_metadata_dict: + with open( job_metadata, 'wb' ) as job_metadata_fh: + for value in existing_job_metadata_dict.values() + new_job_metadata_dict.values(): + job_metadata_fh.write( "%s\n" % ( json.dumps( value ) ) ) + + clear_mappers() + # Shut down any additional threads that might have been created via the ObjectStore + object_store.shutdown() diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -76,6 +76,7 @@ config_file = app.config.config_file, datatypes_config = app.datatypes_registry.integrated_datatypes_configs, job_metadata = None, + include_command = False, kwds = { 'overwrite' : overwrite } ) incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line for name, value in tool.params_to_strings( incoming, app ).iteritems(): diff -r 868e124734d4577d67290f0a8468dfc53b8e4f12 -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 test/unit/jobs/test_command_factory.py --- a/test/unit/jobs/test_command_factory.py +++ b/test/unit/jobs/test_command_factory.py @@ -72,7 +72,7 @@ self.include_metadata = True self.include_work_dir_outputs = False self.job_wrapper.metadata_line = TEST_METADATA_LINE - expected_command = '%s; return_code=$?; cd %s; %s; sh -c "exit $return_code"' % (MOCK_COMMAND_LINE, getcwd(), TEST_METADATA_LINE) + expected_command = '%s; return_code=$?; %s; sh -c "exit $return_code"' % (MOCK_COMMAND_LINE, TEST_METADATA_LINE) self.__assert_command_is( expected_command ) def test_empty_metadata(self): https://bitbucket.org/galaxy/galaxy-central/commits/56ea9cfb4394/ Changeset: 56ea9cfb4394 Branch: release_15.03 User: natefoo Date: 2015-03-04 18:44:17+00:00 Summary: Run history export jobs in the same manner as metadata auto-detection (so the absolute path of the web galaxy dir is not part of the command line and can be computed at runtime). (cherry picked from commit de5d6d11503f931dd58f7f09094c45c39578e494) Affected #: 2 files diff -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 -r 56ea9cfb4394efbd3e56dd0a0965602be085b6a7 lib/galaxy/tools/imp_exp/__init__.py --- a/lib/galaxy/tools/imp_exp/__init__.py +++ b/lib/galaxy/tools/imp_exp/__init__.py @@ -17,12 +17,15 @@ <tool id="__EXPORT_HISTORY__" name="Export History" version="0.1" tool_type="export_history"><type class="ExportHistoryTool" module="galaxy.tools"/><action module="galaxy.tools.actions.history_imp_exp" class="ExportHistoryToolAction"/> - <command>$__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__ $output_file</command> + <command>python $export_history $__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__ $output_file</command><inputs><param name="__HISTORY_TO_EXPORT__" type="hidden"/><param name="compress" type="boolean"/><param name="__EXPORT_HISTORY_COMMAND_INPUTS_OPTIONS__" type="hidden"/></inputs> + <configfiles> + <configfile name="export_history">from galaxy.tools.imp_exp.export_history import main; main()</configfile> + </configfiles><outputs><data format="gzip" name="output_file"/></outputs> @@ -530,11 +533,9 @@ options = "" if jeha.compressed: options = "-G" - return "python %s %s %s %s %s" % ( os.path.join( os.path.abspath( os.getcwd() ), - "lib/galaxy/tools/imp_exp/export_history.py" ), - options, history_attrs_filename, - datasets_attrs_filename, - jobs_attrs_filename ) + return "%s %s %s %s" % ( options, history_attrs_filename, + datasets_attrs_filename, + jobs_attrs_filename ) def cleanup_after_job( self, db_session ): """ Remove temporary directory and attribute files generated during setup for this job. """ diff -r 733ec31a4cae0797f045a3f37f89402593c2bdc7 -r 56ea9cfb4394efbd3e56dd0a0965602be085b6a7 lib/galaxy/tools/imp_exp/export_history.py --- a/lib/galaxy/tools/imp_exp/export_history.py +++ b/lib/galaxy/tools/imp_exp/export_history.py @@ -70,7 +70,7 @@ except Exception, e: return 'Error creating history archive: %s' % str( e ), sys.stderr -if __name__ == "__main__": +def main(): # Parse command line. parser = optparse.OptionParser() parser.add_option( '-G', '--gzip', dest='gzip', action="store_true", help='Compress archive using gzip.' ) @@ -81,3 +81,6 @@ # Create archive. status = create_archive( history_attrs, dataset_attrs, job_attrs, out_file, gzip ) print status + +if __name__ == "__main__": + main() https://bitbucket.org/galaxy/galaxy-central/commits/ace41a078a44/ Changeset: ace41a078a44 Branch: release_15.03 User: natefoo Date: 2015-03-13 02:11:39+00:00 Summary: Add minor and combined major+minor version to version.py. Affected #: 1 file diff -r 56ea9cfb4394efbd3e56dd0a0965602be085b6a7 -r ace41a078a44cb3fe32c6bcdc9e051ba72278d3f lib/galaxy/version.py --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,1 +1,3 @@ VERSION_MAJOR = "15.03" +VERSION_MINOR = None +VERSION = VERSION_MAJOR + ('.' + VERSION_MINOR if VERSION_MINOR else '') https://bitbucket.org/galaxy/galaxy-central/commits/b194d6db2f76/ Changeset: b194d6db2f76 Branch: release_15.03 User: natefoo Date: 2015-03-13 02:31:30+00:00 Summary: Fix things that I somehow missed in the rebase for PR #22. Affected #: 2 files diff -r ace41a078a44cb3fe32c6bcdc9e051ba72278d3f -r b194d6db2f765daabe5bfab74b9f3bc58df267b7 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -765,13 +765,9 @@ sa_session.add( metadata_files ) sa_session.flush() metadata_files_list.append( metadata_files ) - args = "%s %s %s %s %s %s %s" % ( dataset_files_path, - compute_tmp_dir or tmp_dir, - config_root, - config_file, - datatypes_config, - job_metadata, - " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) + args = "%s %s %s" % ( datatypes_config, + job_metadata, + " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) if include_command: #return command required to build fd, fp = tempfile.mkstemp( suffix='.py', dir = tmp_dir, prefix = "set_metadata_" ) diff -r ace41a078a44cb3fe32c6bcdc9e051ba72278d3f -r b194d6db2f765daabe5bfab74b9f3bc58df267b7 lib/galaxy/metadata/set_metadata.py --- a/lib/galaxy/metadata/set_metadata.py +++ b/lib/galaxy/metadata/set_metadata.py @@ -1,8 +1,14 @@ """ Execute an external process to set_meta() on a provided list of pickled datasets. -This was formerly scripts/set_metadata.py and expects the same arguments as -that script. +This was formerly scripts/set_metadata.py and expects these arguments: + + %prog datatypes_conf.xml job_metadata_file metadata_in,metadata_kwds,metadata_out,metadata_results_code,output_filename_override,metadata_override... + +Galaxy should be importable on sys.path and output_filename_override should be +set to the path of the dataset on which metadata is being set +(output_filename_override could previously be left empty and the path would be +constructed automatically). """ import logging @@ -49,7 +55,7 @@ # Set up datatypes registry datatypes_config = sys.argv.pop( 1 ) datatypes_registry = galaxy.datatypes.registry.Registry() - datatypes_registry.load_datatypes( root_dir=config_root, config=datatypes_config ) + datatypes_registry.load_datatypes( root_dir=galaxy_root, config=datatypes_config ) galaxy.model.set_datatypes_registry( datatypes_registry ) job_metadata = sys.argv.pop( 1 ) @@ -83,8 +89,7 @@ set_meta_kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode try: dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance - if dataset_filename_override: - dataset.dataset.external_filename = dataset_filename_override + dataset.dataset.external_filename = dataset_filename_override files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) )) dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: @@ -118,5 +123,3 @@ job_metadata_fh.write( "%s\n" % ( json.dumps( value ) ) ) clear_mappers() - # Shut down any additional threads that might have been created via the ObjectStore - object_store.shutdown() https://bitbucket.org/galaxy/galaxy-central/commits/3d1aa30fbe57/ Changeset: 3d1aa30fbe57 Branch: release_15.03 User: natefoo Date: 2015-03-13 03:45:05+00:00 Summary: Another change missed in the rebase/commit. Affected #: 1 file diff -r b194d6db2f765daabe5bfab74b9f3bc58df267b7 -r 3d1aa30fbe57f94e65963f1a45c472f9762d34de lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -691,8 +691,8 @@ def __get_filename_override(): if output_fnames: for dataset_path in output_fnames: - if dataset_path.false_path and dataset_path.real_path == metadata_files.dataset.file_name: - return dataset_path.false_path + if dataset_path.real_path == metadata_files.dataset.file_name: + return dataset_path.false_path or dataset_path.real_path return "" line = "%s,%s,%s,%s,%s,%s" % ( metadata_path_on_compute(metadata_files.filename_in), https://bitbucket.org/galaxy/galaxy-central/commits/5bf564a741ca/ Changeset: 5bf564a741ca Branch: release_15.03 User: natefoo Date: 2015-03-13 03:56:21+00:00 Summary: DELETED_NEW is part of the Job.finished property... Affected #: 1 file diff -r 3d1aa30fbe57f94e65963f1a45c472f9762d34de -r 5bf564a741ca8b8d87f27776e3de99a7287f7ed9 lib/galaxy/jobs/handler.py --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -680,7 +680,7 @@ except Empty: pass for job, error_msg in jobs_to_check: - if job.finished: + if job.state != job.states.DELETED_NEW and job.finished: log.debug('Job %s already finished, not deleting or stopping', job.id) continue final_state = job.states.DELETED https://bitbucket.org/galaxy/galaxy-central/commits/4efc589120a6/ Changeset: 4efc589120a6 Branch: release_15.03 User: natefoo Date: 2015-03-13 04:36:25+00:00 Summary: One more change missed in my rebase. Affected #: 1 file diff -r 5bf564a741ca8b8d87f27776e3de99a7287f7ed9 -r 4efc589120a6f1273382ea1bd31fbd6b6486dcbd lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -2,6 +2,7 @@ from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper from galaxy.util.odict import odict from galaxy.util.json import dumps +from galaxy.jobs.datasets import DatasetPath import logging log = logging.getLogger( __name__ ) @@ -65,13 +66,14 @@ #add parameters to job_parameter table # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state + input_paths = [DatasetPath( dataset.id, real_path=dataset.file_name, mutable=False )] external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, sa_session, exec_dir = None, tmp_dir = app.config.new_file_path, dataset_files_path = app.model.Dataset.file_path, - output_fnames = None, + output_fnames = input_paths, config_root = app.config.root, config_file = app.config.config_file, datatypes_config = app.datatypes_registry.integrated_datatypes_configs, https://bitbucket.org/galaxy/galaxy-central/commits/84e6ea54c10c/ Changeset: 84e6ea54c10c Branch: release_15.03 User: natefoo Date: 2015-03-13 16:30:57+00:00 Summary: Use job working directory for MetadataFiles created with auto-detect. Affected #: 2 files diff -r 4efc589120a6f1273382ea1bd31fbd6b6486dcbd -r 84e6ea54c10c58ade9ce36c824571f43024dfcb5 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -676,6 +676,11 @@ kwds = kwds or {} if tmp_dir is None: tmp_dir = MetadataTempFile.tmp_dir + else: + MetadataTempFile.tmp_dir = tmp_dir + + if not os.path.exists(tmp_dir): + os.makedirs(tmp_dir) # path is calculated for Galaxy, may be different on compute - rewrite # for the compute server. diff -r 4efc589120a6f1273382ea1bd31fbd6b6486dcbd -r 84e6ea54c10c58ade9ce36c824571f43024dfcb5 lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -1,12 +1,15 @@ +import logging + from __init__ import ToolAction from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper from galaxy.util.odict import odict from galaxy.util.json import dumps from galaxy.jobs.datasets import DatasetPath -import logging + log = logging.getLogger( __name__ ) + class SetMetadataToolAction( ToolAction ): """Tool action used for setting external metadata on an existing dataset""" @@ -67,11 +70,12 @@ # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state input_paths = [DatasetPath( dataset.id, real_path=dataset.file_name, mutable=False )] + job_working_dir = app.object_store.get_filename(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id)) external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, sa_session, exec_dir = None, - tmp_dir = app.config.new_file_path, + tmp_dir = job_working_dir, dataset_files_path = app.model.Dataset.file_path, output_fnames = input_paths, config_root = app.config.root, https://bitbucket.org/galaxy/galaxy-central/commits/bb2f159e899d/ Changeset: bb2f159e899d Branch: release_15.03 User: natefoo Date: 2015-03-13 17:17:07+00:00 Summary: Use the correct path to the set_metadata temp script on remote compute resources. Affected #: 1 file diff -r 84e6ea54c10c58ade9ce36c824571f43024dfcb5 -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -778,7 +778,7 @@ fd, fp = tempfile.mkstemp( suffix='.py', dir = tmp_dir, prefix = "set_metadata_" ) metadata_script_file = abspath( fp ) os.fdopen( fd, 'w' ).write( 'from galaxy.metadata.set_metadata import set_metadata; set_metadata()' ) - return "python %s %s" % ( metadata_script_file, args ) + return "python %s %s" % ( metadata_path_on_compute(metadata_script_file), args ) else: # return args to galaxy.metadata.set_metadata required to build return args https://bitbucket.org/galaxy/galaxy-central/commits/03f569f9eb69/ Changeset: 03f569f9eb69 Branch: release_15.03 User: natefoo Date: 2015-03-13 18:31:58+00:00 Summary: Move set_metadata (once again), to galaxy_ext so that the `galaxy` namespace does not conflict with others (e.g. the galaxy-ops package in the toolshed). Affected #: 7 files diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -777,10 +777,10 @@ #return command required to build fd, fp = tempfile.mkstemp( suffix='.py', dir = tmp_dir, prefix = "set_metadata_" ) metadata_script_file = abspath( fp ) - os.fdopen( fd, 'w' ).write( 'from galaxy.metadata.set_metadata import set_metadata; set_metadata()' ) + os.fdopen( fd, 'w' ).write( 'from galaxy_ext.metadata.set_metadata import set_metadata; set_metadata()' ) return "python %s %s" % ( metadata_path_on_compute(metadata_script_file), args ) else: - # return args to galaxy.metadata.set_metadata required to build + # return args to galaxy_ext.metadata.set_metadata required to build return args def external_metadata_set_successfully( self, dataset, sa_session ): diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy/datatypes/registry.py --- a/lib/galaxy/datatypes/registry.py +++ b/lib/galaxy/datatypes/registry.py @@ -660,7 +660,7 @@ <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/></inputs><configfiles> - <configfile name="set_metadata">from galaxy.metadata.set_metadata import set_metadata; set_metadata()</configfile> + <configfile name="set_metadata">from galaxy_ext.metadata.set_metadata import set_metadata; set_metadata()</configfile></configfiles></tool> """ diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy/metadata/__init__.py --- a/lib/galaxy/metadata/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" Work with Galaxy metadata -""" diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy/metadata/set_metadata.py --- a/lib/galaxy/metadata/set_metadata.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Execute an external process to set_meta() on a provided list of pickled datasets. - -This was formerly scripts/set_metadata.py and expects these arguments: - - %prog datatypes_conf.xml job_metadata_file metadata_in,metadata_kwds,metadata_out,metadata_results_code,output_filename_override,metadata_override... - -Galaxy should be importable on sys.path and output_filename_override should be -set to the path of the dataset on which metadata is being set -(output_filename_override could previously be left empty and the path would be -constructed automatically). -""" - -import logging -logging.basicConfig() -log = logging.getLogger( __name__ ) - -import cPickle -import json -import os -import sys - -# ensure supported version -assert sys.version_info[:2] >= ( 2, 6 ) and sys.version_info[:2] <= ( 2, 7 ), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version - -new_path = [ os.path.join( os.getcwd(), "lib" ) ] -new_path.extend( sys.path[ 1: ] ) # remove scripts/ from the path -sys.path = new_path - -from galaxy import eggs -import pkg_resources -import galaxy.model.mapping # need to load this before we unpickle, in order to setup properties assigned by the mappers -galaxy.model.Job() # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here -from galaxy.util import stringify_dictionary_keys -from sqlalchemy.orm import clear_mappers - - -def set_meta_with_tool_provided( dataset_instance, file_dict, set_meta_kwds ): - # This method is somewhat odd, in that we set the metadata attributes from tool, - # then call set_meta, then set metadata attributes from tool again. - # This is intentional due to interplay of overwrite kwd, the fact that some metadata - # parameters may rely on the values of others, and that we are accepting the - # values provided by the tool as Truth. - for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): - setattr( dataset_instance.metadata, metadata_name, metadata_value ) - dataset_instance.datatype.set_meta( dataset_instance, **set_meta_kwds ) - for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): - setattr( dataset_instance.metadata, metadata_name, metadata_value ) - -def set_metadata(): - # locate galaxy_root for loading datatypes - galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) - galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tool_job_working_directory = os.path.abspath(os.getcwd()) - - # Set up datatypes registry - datatypes_config = sys.argv.pop( 1 ) - datatypes_registry = galaxy.datatypes.registry.Registry() - datatypes_registry.load_datatypes( root_dir=galaxy_root, config=datatypes_config ) - galaxy.model.set_datatypes_registry( datatypes_registry ) - - job_metadata = sys.argv.pop( 1 ) - existing_job_metadata_dict = {} - new_job_metadata_dict = {} - if job_metadata != "None" and os.path.exists( job_metadata ): - for line in open( job_metadata, 'r' ): - try: - line = stringify_dictionary_keys( json.loads( line ) ) - if line['type'] == 'dataset': - existing_job_metadata_dict[ line['dataset_id'] ] = line - elif line['type'] == 'new_primary_dataset': - new_job_metadata_dict[ line[ 'filename' ] ] = line - except: - continue - - for filenames in sys.argv[1:]: - fields = filenames.split( ',' ) - filename_in = fields.pop( 0 ) - filename_kwds = fields.pop( 0 ) - filename_out = fields.pop( 0 ) - filename_results_code = fields.pop( 0 ) - dataset_filename_override = fields.pop( 0 ) - # Need to be careful with the way that these parameters are populated from the filename splitting, - # because if a job is running when the server is updated, any existing external metadata command-lines - #will not have info about the newly added override_metadata file - if fields: - override_metadata = fields.pop( 0 ) - else: - override_metadata = None - set_meta_kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode - try: - dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance - dataset.dataset.external_filename = dataset_filename_override - files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) )) - dataset.dataset.external_extra_files_path = files_path - if dataset.dataset.id in existing_job_metadata_dict: - dataset.extension = existing_job_metadata_dict[ dataset.dataset.id ].get( 'ext', dataset.extension ) - # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles - if override_metadata: - override_metadata = json.load( open( override_metadata ) ) - for metadata_name, metadata_file_override in override_metadata: - if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ): - metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) - setattr( dataset.metadata, metadata_name, metadata_file_override ) - file_dict = existing_job_metadata_dict.get( dataset.dataset.id, {} ) - set_meta_with_tool_provided( dataset, file_dict, set_meta_kwds ) - dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta - json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded - except Exception, e: - json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow - - for i, ( filename, file_dict ) in enumerate( new_job_metadata_dict.iteritems(), start=1 ): - new_dataset = galaxy.model.Dataset( id=-i, external_filename=os.path.join( tool_job_working_directory, file_dict[ 'filename' ] ) ) - extra_files = file_dict.get( 'extra_files', None ) - if extra_files is not None: - new_dataset._extra_files_path = os.path.join( tool_job_working_directory, extra_files ) - new_dataset.state = new_dataset.states.OK - new_dataset_instance = galaxy.model.HistoryDatasetAssociation( id=-i, dataset=new_dataset, extension=file_dict.get( 'ext', 'data' ) ) - set_meta_with_tool_provided( new_dataset_instance, file_dict, set_meta_kwds ) - file_dict[ 'metadata' ] = json.loads( new_dataset_instance.metadata.to_JSON_dict() ) #storing metadata in external form, need to turn back into dict, then later jsonify - if existing_job_metadata_dict or new_job_metadata_dict: - with open( job_metadata, 'wb' ) as job_metadata_fh: - for value in existing_job_metadata_dict.values() + new_job_metadata_dict.values(): - job_metadata_fh.write( "%s\n" % ( json.dumps( value ) ) ) - - clear_mappers() diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy_ext/__init__.py --- /dev/null +++ b/lib/galaxy_ext/__init__.py @@ -0,0 +1,4 @@ +""" Package for things which are loaded from outside Galaxy and which we can be +sure will not conflict with the `galaxy` namespace, which may be provided by +other packages (e.g. galaxy_ops in the toolshed). +""" diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy_ext/metadata/__init__.py --- /dev/null +++ b/lib/galaxy_ext/metadata/__init__.py @@ -0,0 +1,2 @@ +""" Work with Galaxy metadata +""" diff -r bb2f159e899d88cdcc5eaec45c9550203ac6d0d7 -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c lib/galaxy_ext/metadata/set_metadata.py --- /dev/null +++ b/lib/galaxy_ext/metadata/set_metadata.py @@ -0,0 +1,125 @@ +""" +Execute an external process to set_meta() on a provided list of pickled datasets. + +This was formerly scripts/set_metadata.py and expects these arguments: + + %prog datatypes_conf.xml job_metadata_file metadata_in,metadata_kwds,metadata_out,metadata_results_code,output_filename_override,metadata_override... + +Galaxy should be importable on sys.path and output_filename_override should be +set to the path of the dataset on which metadata is being set +(output_filename_override could previously be left empty and the path would be +constructed automatically). +""" + +import logging +logging.basicConfig() +log = logging.getLogger( __name__ ) + +import cPickle +import json +import os +import sys + +# ensure supported version +assert sys.version_info[:2] >= ( 2, 6 ) and sys.version_info[:2] <= ( 2, 7 ), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version + +new_path = [ os.path.join( os.getcwd(), "lib" ) ] +new_path.extend( sys.path[ 1: ] ) # remove scripts/ from the path +sys.path = new_path + +from galaxy import eggs +import pkg_resources +import galaxy.model.mapping # need to load this before we unpickle, in order to setup properties assigned by the mappers +galaxy.model.Job() # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here +from galaxy.util import stringify_dictionary_keys +from sqlalchemy.orm import clear_mappers + + +def set_meta_with_tool_provided( dataset_instance, file_dict, set_meta_kwds ): + # This method is somewhat odd, in that we set the metadata attributes from tool, + # then call set_meta, then set metadata attributes from tool again. + # This is intentional due to interplay of overwrite kwd, the fact that some metadata + # parameters may rely on the values of others, and that we are accepting the + # values provided by the tool as Truth. + for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): + setattr( dataset_instance.metadata, metadata_name, metadata_value ) + dataset_instance.datatype.set_meta( dataset_instance, **set_meta_kwds ) + for metadata_name, metadata_value in file_dict.get( 'metadata', {} ).iteritems(): + setattr( dataset_instance.metadata, metadata_name, metadata_value ) + +def set_metadata(): + # locate galaxy_root for loading datatypes + galaxy_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir)) + galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tool_job_working_directory = os.path.abspath(os.getcwd()) + + # Set up datatypes registry + datatypes_config = sys.argv.pop( 1 ) + datatypes_registry = galaxy.datatypes.registry.Registry() + datatypes_registry.load_datatypes( root_dir=galaxy_root, config=datatypes_config ) + galaxy.model.set_datatypes_registry( datatypes_registry ) + + job_metadata = sys.argv.pop( 1 ) + existing_job_metadata_dict = {} + new_job_metadata_dict = {} + if job_metadata != "None" and os.path.exists( job_metadata ): + for line in open( job_metadata, 'r' ): + try: + line = stringify_dictionary_keys( json.loads( line ) ) + if line['type'] == 'dataset': + existing_job_metadata_dict[ line['dataset_id'] ] = line + elif line['type'] == 'new_primary_dataset': + new_job_metadata_dict[ line[ 'filename' ] ] = line + except: + continue + + for filenames in sys.argv[1:]: + fields = filenames.split( ',' ) + filename_in = fields.pop( 0 ) + filename_kwds = fields.pop( 0 ) + filename_out = fields.pop( 0 ) + filename_results_code = fields.pop( 0 ) + dataset_filename_override = fields.pop( 0 ) + # Need to be careful with the way that these parameters are populated from the filename splitting, + # because if a job is running when the server is updated, any existing external metadata command-lines + #will not have info about the newly added override_metadata file + if fields: + override_metadata = fields.pop( 0 ) + else: + override_metadata = None + set_meta_kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode + try: + dataset = cPickle.load( open( filename_in ) ) # load DatasetInstance + dataset.dataset.external_filename = dataset_filename_override + files_path = os.path.abspath(os.path.join( tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id) )) + dataset.dataset.external_extra_files_path = files_path + if dataset.dataset.id in existing_job_metadata_dict: + dataset.extension = existing_job_metadata_dict[ dataset.dataset.id ].get( 'ext', dataset.extension ) + # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles + if override_metadata: + override_metadata = json.load( open( override_metadata ) ) + for metadata_name, metadata_file_override in override_metadata: + if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ): + metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) + setattr( dataset.metadata, metadata_name, metadata_file_override ) + file_dict = existing_job_metadata_dict.get( dataset.dataset.id, {} ) + set_meta_with_tool_provided( dataset, file_dict, set_meta_kwds ) + dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta + json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded + except Exception, e: + json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow + + for i, ( filename, file_dict ) in enumerate( new_job_metadata_dict.iteritems(), start=1 ): + new_dataset = galaxy.model.Dataset( id=-i, external_filename=os.path.join( tool_job_working_directory, file_dict[ 'filename' ] ) ) + extra_files = file_dict.get( 'extra_files', None ) + if extra_files is not None: + new_dataset._extra_files_path = os.path.join( tool_job_working_directory, extra_files ) + new_dataset.state = new_dataset.states.OK + new_dataset_instance = galaxy.model.HistoryDatasetAssociation( id=-i, dataset=new_dataset, extension=file_dict.get( 'ext', 'data' ) ) + set_meta_with_tool_provided( new_dataset_instance, file_dict, set_meta_kwds ) + file_dict[ 'metadata' ] = json.loads( new_dataset_instance.metadata.to_JSON_dict() ) #storing metadata in external form, need to turn back into dict, then later jsonify + if existing_job_metadata_dict or new_job_metadata_dict: + with open( job_metadata, 'wb' ) as job_metadata_fh: + for value in existing_job_metadata_dict.values() + new_job_metadata_dict.values(): + job_metadata_fh.write( "%s\n" % ( json.dumps( value ) ) ) + + clear_mappers() https://bitbucket.org/galaxy/galaxy-central/commits/439b5c72d17f/ Changeset: 439b5c72d17f Branch: release_15.03 User: natefoo Date: 2015-03-13 18:32:54+00:00 Summary: Ensure that set_metadata always loads the `galaxy` module from the same version of Galaxy it is in. Affected #: 1 file diff -r 03f569f9eb693b41c14e8e6e5b0fa30576905c7c -r 439b5c72d17f6572629ca87f6b199120cb2f74ca lib/galaxy_ext/metadata/set_metadata.py --- a/lib/galaxy_ext/metadata/set_metadata.py +++ b/lib/galaxy_ext/metadata/set_metadata.py @@ -23,9 +23,9 @@ # ensure supported version assert sys.version_info[:2] >= ( 2, 6 ) and sys.version_info[:2] <= ( 2, 7 ), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version -new_path = [ os.path.join( os.getcwd(), "lib" ) ] -new_path.extend( sys.path[ 1: ] ) # remove scripts/ from the path -sys.path = new_path +# insert *this* galaxy before all others on sys.path +new_path = os.path.abspath( os.path.join( os.path.dirname( __file__ ), os.pardir, os.pardir ) ) +sys.path.insert( 0, new_path ) from galaxy import eggs import pkg_resources https://bitbucket.org/galaxy/galaxy-central/commits/3341160d0919/ Changeset: 3341160d0919 Branch: release_15.03 User: natefoo Date: 2015-03-13 20:56:10+00:00 Summary: galaxy.metadata symlink so running jobs do not fail. Affected #: 1 file diff -r 439b5c72d17f6572629ca87f6b199120cb2f74ca -r 3341160d09195487382b2613b7e309eb5aaf08b7 lib/galaxy/metadata --- /dev/null +++ b/lib/galaxy/metadata @@ -0,0 +1,1 @@ +../galaxy_ext/metadata \ No newline at end of file https://bitbucket.org/galaxy/galaxy-central/commits/728bc8fe323b/ Changeset: 728bc8fe323b Branch: release_15.03 User: natefoo Date: 2015-03-16 15:05:37+00:00 Summary: In the metadata tool action, ensure job_working_directory exists before trying to get its path. Otherwise, hierarchical/distributed object stores will be broken. Affected #: 1 file diff -r 3341160d09195487382b2613b7e309eb5aaf08b7 -r 728bc8fe323b879c542ea87e313b2cb9a263b79c lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -70,6 +70,7 @@ # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state input_paths = [DatasetPath( dataset.id, real_path=dataset.file_name, mutable=False )] + app.object_store.create(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id)) job_working_dir = app.object_store.get_filename(job, base_dir='job_work', dir_only=True, extra_dir=str(job.id)) external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, https://bitbucket.org/galaxy/galaxy-central/commits/4733f50fccca/ Changeset: 4733f50fccca Branch: release_15.03 User: natefoo Date: 2015-03-16 19:18:53+00:00 Summary: Because __call_method was private, DistributedObjectStore's __call_method was never being called, only HierarchicalObjectStore's. . Make it pseudo-protected by renaming to _call_method. Affected #: 1 file diff -r 728bc8fe323b879c542ea87e313b2cb9a263b79c -r 4733f50fccca4328eb544f44721ce3ec6315ec09 lib/galaxy/objectstore/__init__.py --- a/lib/galaxy/objectstore/__init__.py +++ b/lib/galaxy/objectstore/__init__.py @@ -392,39 +392,39 @@ super(NestedObjectStore, self).shutdown() def exists(self, obj, **kwargs): - return self.__call_method('exists', obj, False, False, **kwargs) + return self._call_method('exists', obj, False, False, **kwargs) def file_ready(self, obj, **kwargs): - return self.__call_method('file_ready', obj, False, False, **kwargs) + return self._call_method('file_ready', obj, False, False, **kwargs) def create(self, obj, **kwargs): random.choice(self.backends.values()).create(obj, **kwargs) def empty(self, obj, **kwargs): - return self.__call_method('empty', obj, True, False, **kwargs) + return self._call_method('empty', obj, True, False, **kwargs) def size(self, obj, **kwargs): - return self.__call_method('size', obj, 0, False, **kwargs) + return self._call_method('size', obj, 0, False, **kwargs) def delete(self, obj, **kwargs): - return self.__call_method('delete', obj, False, False, **kwargs) + return self._call_method('delete', obj, False, False, **kwargs) def get_data(self, obj, **kwargs): - return self.__call_method('get_data', obj, ObjectNotFound, True, **kwargs) + return self._call_method('get_data', obj, ObjectNotFound, True, **kwargs) def get_filename(self, obj, **kwargs): - return self.__call_method('get_filename', obj, ObjectNotFound, True, **kwargs) + return self._call_method('get_filename', obj, ObjectNotFound, True, **kwargs) def update_from_file(self, obj, **kwargs): if kwargs.get('create', False): self.create(obj, **kwargs) kwargs['create'] = False - return self.__call_method('update_from_file', obj, ObjectNotFound, True, **kwargs) + return self._call_method('update_from_file', obj, ObjectNotFound, True, **kwargs) def get_object_url(self, obj, **kwargs): - return self.__call_method('get_object_url', obj, None, False, **kwargs) + return self._call_method('get_object_url', obj, None, False, **kwargs) - def __call_method(self, method, obj, default, default_is_exception, **kwargs): + def _call_method(self, method, obj, default, default_is_exception, **kwargs): """ Check all children object stores for the first one with the dataset """ @@ -432,7 +432,7 @@ if store.exists(obj, **kwargs): return store.__getattribute__(method)(obj, **kwargs) if default_is_exception: - raise default( 'objectstore, __call_method failed: %s on %s, kwargs: %s' + raise default( 'objectstore, _call_method failed: %s on %s, kwargs: %s' % ( method, str( obj ), str( kwargs ) ) ) else: return default @@ -535,12 +535,12 @@ log.debug("Using preferred backend '%s' for creation of %s %s" % (obj.object_store_id, obj.__class__.__name__, obj.id)) self.backends[obj.object_store_id].create(obj, **kwargs) - def __call_method(self, method, obj, default, default_is_exception, **kwargs): + def _call_method(self, method, obj, default, default_is_exception, **kwargs): object_store_id = self.__get_store_id_for(obj, **kwargs) if object_store_id is not None: return self.backends[object_store_id].__getattribute__(method)(obj, **kwargs) if default_is_exception: - raise default( 'objectstore, __call_method failed: %s on %s, kwargs: %s' + raise default( 'objectstore, _call_method failed: %s on %s, kwargs: %s' % ( method, str( obj ), str( kwargs ) ) ) else: return default https://bitbucket.org/galaxy/galaxy-central/commits/589af3c3c437/ Changeset: 589af3c3c437 Branch: release_15.03 User: dannon Date: 2015-03-16 20:28:20+00:00 Summary: Don't log exception to errormsg on workflow import failure. Affected #: 1 file diff -r 4733f50fccca4328eb544f44721ce3ec6315ec09 -r 589af3c3c43703a0c6b3f3d69542658c3b958b10 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -1,6 +1,7 @@ import base64 import httplib import json +import logging import os import sgmllib import urllib2 @@ -41,6 +42,8 @@ from galaxy.workflow.render import WorkflowCanvas, MARGIN, LINE_SPACING from markupsafe import escape +log = logging.getLogger( __name__ ) + class StoredWorkflowListGrid( grids.Grid ): @@ -895,8 +898,9 @@ data = json.loads( workflow_data ) except Exception, e: data = None - message = "The data content does not appear to be a Galaxy workflow.<br/>Exception: %s" % str( e ) + message = "The data content does not appear to be a Galaxy workflow." status = 'error' + log.exception("Error importing workflow.") if data: # Create workflow if possible. If a required tool is not available in the local # Galaxy instance, the tool information will be available in the step_dict. https://bitbucket.org/galaxy/galaxy-central/commits/274bc3b9763c/ Changeset: 274bc3b9763c Branch: release_15.03 User: dannon Date: 2015-03-16 22:21:08+00:00 Summary: Add information about making history accessible to initial export archive message. Affected #: 1 file diff -r 589af3c3c43703a0c6b3f3d69542658c3b958b10 -r 274bc3b9763cb3956af678cf5043561641e27200 lib/galaxy/webapps/galaxy/controllers/history.py --- a/lib/galaxy/webapps/galaxy/controllers/history.py +++ b/lib/galaxy/webapps/galaxy/controllers/history.py @@ -1233,33 +1233,28 @@ # Use current history. history = trans.history id = trans.security.encode_id( history.id ) - if not history: return trans.show_error_message( "This history does not exist or you cannot export this history." ) - - # # If history has already been exported and it has not changed since export, stream it. - # jeha = history.latest_export if jeha and jeha.up_to_date: if jeha.ready: if preview: url = url_for( controller='history', action="export_archive", id=id, qualified=True ) - return trans.show_message( "History Ready: '%(n)s'. Use this link to download \ - the archive or import it to another Galaxy server: \ - <a href='%(u)s'>%(u)s</a>" % ( { 'n' : history.name, 'u' : url } ) ) + return trans.show_message( "History Ready: '%(n)s'. Use this link to download " + "the archive or import it to another Galaxy server: " + "<a href='%(u)s'>%(u)s</a>" % ( { 'n': history.name, 'u': url } ) ) else: return self.serve_ready_history_export( trans, jeha ) elif jeha.preparing: - return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" \ - % ( { 'n' : history.name, 's' : url_for( controller='history', action="export_archive", id=id, qualified=True ) } ) ) - + return trans.show_message( "Still exporting history %(n)s; please check back soon. Link: <a href='%(s)s'>%(s)s</a>" + % ( { 'n': history.name, 's': url_for( controller='history', action="export_archive", id=id, qualified=True ) } ) ) self.queue_history_export( trans, history, gzip=gzip, include_hidden=include_hidden, include_deleted=include_deleted ) url = url_for( controller='history', action="export_archive", id=id, qualified=True ) - return trans.show_message( "Exporting History '%(n)s'. Use this link to download \ - the archive or import it to another Galaxy server: \ - <a href='%(u)s'>%(u)s</a>" % ( { 'n' : history.name, 'u' : url } ) ) - #TODO: used in this file and index.mako + return trans.show_message( "Exporting History '%(n)s'. You will need to <a href='%(share)s'>make this history 'accessible'</a> in order to import this to another galaxy sever. <br/>" + "Use this link to download the archive or import it to another Galaxy server: " + "<a href='%(u)s'>%(u)s</a>" % ( { 'share': url_for(controller='history', action='sharing'), 'n': history.name, 'u': url } ) ) + # TODO: used in this file and index.mako @web.expose @web.json https://bitbucket.org/galaxy/galaxy-central/commits/01412241542a/ Changeset: 01412241542a Branch: release_15.03 User: natefoo Date: 2015-03-17 03:24:47+00:00 Summary: Bump version to 15.03.1 Affected #: 1 file diff -r 274bc3b9763cb3956af678cf5043561641e27200 -r 01412241542ab4f3b0fa67139394b30277d5be49 lib/galaxy/version.py --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,3 +1,3 @@ VERSION_MAJOR = "15.03" -VERSION_MINOR = None +VERSION_MINOR = "1" VERSION = VERSION_MAJOR + ('.' + VERSION_MINOR if VERSION_MINOR else '') Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org