1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0741ac889c4b/
changeset: 0741ac889c4b
user: dannon
date: 2012-11-19 23:05:34
summary: Update README to display a compatible python version as a result of python -V.
affected #: 1 file
diff -r 8155a8ff8dd4f8aaabe4cee31ca0415c39302ed3 -r 0741ac889c4b85286a9baadcf6110f3e052a79f0 README.txt
--- a/README.txt
+++ b/README.txt
@@ -10,7 +10,7 @@
Galaxy requires Python 2.5, 2.6 or 2.7. To check your python version, run:
% python -V
-Python 2.4.4
+Python 2.7.3
Start Galaxy:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8155a8ff8dd4/
changeset: 8155a8ff8dd4
user: dannon
date: 2012-11-19 19:02:12
summary: Require pexpect -- previously not explicitly required
affected #: 1 file
diff -r 886d8ada4a2215aa4b69244908cb4859352a2f03 -r 8155a8ff8dd4f8aaabe4cee31ca0415c39302ed3 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -5,9 +5,10 @@
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import pkg_resources, os, errno, codecs, operator, socket, pexpect, logging, time
-pkg_resources.require( "simplejson" )
-import simplejson
+import pkg_resources
+pkg_resources.require("simplejson")
+pkg_resources.require("pexpect")
+import simplejson, os, errno, codecs, operator, socket, pexpect, logging, time
from sqlalchemy.orm import object_session
from sqlalchemy.sql.expression import func
import galaxy.datatypes
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/886d8ada4a22/
changeset: 886d8ada4a22
user: dannon
date: 2012-11-19 18:54:46
summary: Refactor imports to remove 'import *s', remove unused variables/dead code, strip whitespace.
affected #: 1 file
diff -r 950544a9b858d86bd1f3ff51c7433a90d2e89bfb -r 886d8ada4a2215aa4b69244908cb4859352a2f03 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -4,24 +4,22 @@
Naming: try to use class names that have a distinct plural form so that
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import pkg_resources
+
+import pkg_resources, os, errno, codecs, operator, socket, pexpect, logging, time
pkg_resources.require( "simplejson" )
import simplejson
+from sqlalchemy.orm import object_session
+from sqlalchemy.sql.expression import func
import galaxy.datatypes
-from galaxy.util.bunch import Bunch
-from galaxy import util
import galaxy.datatypes.registry
from galaxy.datatypes.metadata import MetadataCollection
-from galaxy.security import RBACAgent, get_permitted_actions
-from galaxy.util.hash_util import *
-from galaxy.web.form_builder import *
+from galaxy.security import get_permitted_actions
+from galaxy import util
+from galaxy.util.bunch import Bunch
+from galaxy.util.hash_util import new_secure_hash
+from galaxy.web.form_builder import (AddressField, CheckboxField, PasswordField, SelectField, TextArea, TextField,
+ WorkflowField, WorkflowMappingField, HistoryField)
from galaxy.model.item_attrs import UsesAnnotations, APIItem
-from sqlalchemy.orm import object_session
-from sqlalchemy.sql.expression import func
-import os.path, os, errno, codecs, operator, socket, pexpect, logging, time, shutil
-
-if sys.version_info[:2] < ( 2, 5 ):
- from sets import Set as set
log = logging.getLogger( __name__ )
@@ -138,13 +136,13 @@
self.exit_code = None
# TODO: Add accessors for members defined in SQL Alchemy for the Job table and
- # for the mapper defined to the Job table.
+ # for the mapper defined to the Job table.
def get_external_output_metadata( self ):
"""
- The external_output_metadata is currently a reference from Job to
+ The external_output_metadata is currently a reference from Job to
JobExternalOutputMetadata. It exists for a job but not a task.
"""
- return self.external_output_metadata
+ return self.external_output_metadata
def get_session_id( self ):
return self.session_id
def get_user_id( self ):
@@ -177,7 +175,7 @@
# runner_name is not the same thing.
return self.job_runner_name
def get_job_runner_external_id( self ):
- # This is different from the Task just in the member accessed:
+ # This is different from the Task just in the member accessed:
return self.job_runner_external_id
def get_post_job_actions( self ):
return self.post_job_actions
@@ -197,10 +195,10 @@
# The tasks member is pert of a reference in the SQL Alchemy schema:
return self.tasks
def get_id_tag( self ):
- """
- Return a tag that can be useful in identifying a Job.
+ """
+ Return a tag that can be useful in identifying a Job.
This returns the Job's get_id
- """
+ """
return "%s" % self.id;
def set_session_id( self, session_id ):
@@ -324,8 +322,8 @@
self.task_runner_name = None
self.task_runner_external_id = None
self.job = job
- self.stdout = ""
- self.stderr = ""
+ self.stdout = ""
+ self.stderr = ""
self.exit_code = None
self.prepare_input_files_cmd = prepare_files_cmd
@@ -340,8 +338,8 @@
return param_dict
def get_id( self ):
- # This is defined in the SQL Alchemy schema:
- return self.id
+ # This is defined in the SQL Alchemy schema:
+ return self.id
def get_id_tag( self ):
"""
Return an id tag suitable for identifying the task.
@@ -378,7 +376,7 @@
# metdata). These can be filled in as needed.
def get_external_output_metadata( self ):
"""
- The external_output_metadata is currently a backref to
+ The external_output_metadata is currently a backref to
JobExternalOutputMetadata. It exists for a job but not a task,
and when a task is cancelled its corresponding parent Job will
be cancelled. So None is returned now, but that could be changed
@@ -395,13 +393,13 @@
"""
Runners will use the same methods to get information about the Task
class as they will about the Job class, so this method just returns
- the task's external id.
+ the task's external id.
"""
# TODO: Merge into get_runner_external_id.
return self.task_runner_external_id
def get_session_id( self ):
# The Job's galaxy session is equal to the Job's session, so the
- # Job's session is the same as the Task's session.
+ # Job's session is the same as the Task's session.
return self.get_job().get_session_id()
def set_id( self, id ):
@@ -424,7 +422,7 @@
# This method is available for runners that do not want/need to
# differentiate between the kinds of Runnable things (Jobs and Tasks)
# that they're using.
- log.debug( "Task %d: Set external id to %s"
+ log.debug( "Task %d: Set external id to %s"
% ( self.id, task_runner_external_id ) )
self.task_runner_external_id = task_runner_external_id
def set_task_runner_external_id( self, task_runner_external_id ):
@@ -954,7 +952,7 @@
return False
try:
return util.is_multi_byte( codecs.open( self.file_name, 'r', 'utf-8' ).read( 100 ) )
- except UnicodeDecodeError, e:
+ except UnicodeDecodeError:
return False
# FIXME: sqlalchemy will replace this
def _delete(self):
@@ -1136,7 +1134,6 @@
"""
Returns dict of { "dependency" => HDA }
"""
- converted_dataset = self.get_converted_files_by_type( target_ext )
# List of string of dependencies
try:
depends_list = trans.app.datatypes_registry.converter_deps[self.extension][target_ext]
@@ -1307,7 +1304,7 @@
"""
Returns datasources for dataset; if datasources are not available
due to indexing, indexing is started. Return value is a dictionary
- with entries of type
+ with entries of type
(<datasource_type> : {<datasource_name>, <indexing_message>}).
"""
track_type, data_sources = self.datatype.get_track_type()
@@ -1320,17 +1317,17 @@
else:
# Convert.
msg = self.convert_dataset( trans, data_source )
-
+
# Store msg.
data_sources_dict[ source_type ] = { "name" : data_source, "message": msg }
-
+
return data_sources_dict
def convert_dataset( self, trans, target_type ):
"""
- Converts a dataset to the target_type and returns a message indicating
+ Converts a dataset to the target_type and returns a message indicating
status of the conversion. None is returned to indicate that dataset
- was converted successfully.
+ was converted successfully.
"""
# FIXME: copied from controller.py
@@ -1402,7 +1399,7 @@
hda.metadata = self.metadata
if copy_children:
for child in self.children:
- child_copy = child.copy( copy_children = copy_children, parent_id = hda.id )
+ child.copy( copy_children = copy_children, parent_id = hda.id )
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
hda.set_peek()
@@ -1454,11 +1451,11 @@
object_session( self ).add( library_dataset )
object_session( self ).flush()
for child in self.children:
- child_copy = child.to_library_dataset_dataset_association( trans,
- target_folder=target_folder,
- replace_dataset=replace_dataset,
- parent_id=ldda.id,
- user=ldda.user )
+ child.to_library_dataset_dataset_association( trans,
+ target_folder=target_folder,
+ replace_dataset=replace_dataset,
+ parent_id=ldda.id,
+ user=ldda.user )
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
@@ -1808,7 +1805,7 @@
if add_to_history and target_history:
target_history.add_dataset( hda )
for child in self.children:
- child_copy = child.to_history_dataset_association( target_history = target_history, parent_id = hda.id, add_to_history = False )
+ child.to_history_dataset_association( target_history = target_history, parent_id = hda.id, add_to_history = False )
if not self.datatype.copy_safe_peek:
hda.set_peek() #in some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
object_session( self ).flush()
@@ -1833,7 +1830,7 @@
ldda.metadata = self.metadata
if copy_children:
for child in self.children:
- child_copy = child.copy( copy_children = copy_children, parent_id = ldda.id )
+ child.copy( copy_children = copy_children, parent_id = ldda.id )
if not self.datatype.copy_safe_peek:
# In some instances peek relies on dataset_id, i.e. gmaj.zip for viewing MAFs
ldda.set_peek()
@@ -2640,7 +2637,7 @@
events={ '.ssword:*': scp_configs['password']+'\r\n',
pexpect.TIMEOUT:print_ticks},
timeout=10 )
- except Exception, e:
+ except Exception:
return error_msg
# cleanup the output to get just the file size
return output.replace( filepath, '' )\
@@ -3280,7 +3277,6 @@
.first()
return None
def get_versions( self, app ):
- sa_session = app.model.context.current
tool_versions = []
# Prepend ancestors.
def __ancestors( app, tool_version ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/950544a9b858/
changeset: 950544a9b858
user: carlfeberhard
date: 2012-11-19 18:18:10
summary: history panel: noop on ajax error fetching history updates when interrupted by iframe refresh.
affected #: 2 files
diff -r 9b27168c94423a91e0dc0249bb9453adc9532610 -r 950544a9b858d86bd1f3ff51c7433a90d2e89bfb static/scripts/mvc/history/history-model.js
--- a/static/scripts/mvc/history/history-model.js
+++ b/static/scripts/mvc/history/history-model.js
@@ -184,10 +184,14 @@
}
}).error( function( xhr, status, error ){
- if( console && console.warn ){
- console.warn( 'Error getting history updates from the server:', xhr, status, error );
+ // if not interruption by iframe reload
+ //TODO: remove when iframes are removed
+ if( !( ( xhr.readyState === 0 ) && ( xhr.status === 0 ) ) ){
+ if( console && console.warn ){
+ console.warn( 'Error getting history updates from the server:', xhr, status, error );
+ }
+ alert( _l( 'Error getting history updates from the server.' ) + '\n' + error );
}
- alert( _l( 'Error getting history updates from the server.' ) + '\n' + error );
});
},
diff -r 9b27168c94423a91e0dc0249bb9453adc9532610 -r 950544a9b858d86bd1f3ff51c7433a90d2e89bfb static/scripts/packed/mvc/history/history-model.js
--- a/static/scripts/packed/mvc/history/history-model.js
+++ b/static/scripts/packed/mvc/history/history-model.js
@@ -1,1 +1,1 @@
-var History=BaseModel.extend(LoggableMixin).extend({defaults:{id:"",name:"",state:"",diskSize:0,deleted:false,annotation:null,message:null},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("id")},initialize:function(a,b){this.log(this+".initialize:",a,b);this.hdas=new HDACollection();if(b&&b.length){this.hdas.reset(b);this.checkForUpdates()}},loadFromApi:function(a,c){var b=this;b.attributes.id=a;jQuery.when(jQuery.ajax("api/users/current"),b.fetch()).then(function(e,d){b.attributes.user=e[0];b.trigger("loaded:user",e[0]);b.trigger("loaded",d[0])}).then(function(){jQuery.ajax(b.url()+"/contents?"+jQuery.param({ids:b.hdaIdsFromStateIds().join(",")})).success(function(d){b.hdas.reset(d);b.checkForUpdates();b.trigger("loaded:hdas",d);if(c){callback(b)}})})},hdaIdsFromStateIds:function(){return _.reduce(_.values(this.get("state_ids")),function(b,a){return b.concat(a)})},checkForUpdates:function(a){if(this.hdas.running().length){this.stateUpdater()}else{this.trigger("ready")}return this},stateUpdater:function(){var c=this,a=this.get("state"),b=this.get("state_ids");jQuery.ajax("api/histories/"+this.get("id")).success(function(d){c.set(d);c.log("current history state:",c.get("state"),"(was)",a,"new size:",c.get("nice_size"));var e=[];_.each(_.keys(d.state_ids),function(g){var f=_.difference(d.state_ids[g],b[g]);e=e.concat(f)});if(e.length){c.hdas.update(e)}if((c.get("state")===HistoryDatasetAssociation.STATES.RUNNING)||(c.get("state")===HistoryDatasetAssociation.STATES.QUEUED)){setTimeout(function(){c.stateUpdater()},4000)}else{c.trigger("ready")}}).error(function(f,d,e){if(console&&console.warn){console.warn("Error getting history updates from the server:",f,d,e)}alert(_l("Error getting history updates from the server.")+"\n"+e)})},toString:function(){var a=(this.get("name"))?(","+this.get("name")):("");return"History("+this.get("id")+a+")"}});var HistoryCollection=Backbone.Collection.extend(LoggableMixin).extend({model:History,urlRoot:"api/histories"});
\ No newline at end of file
+var History=BaseModel.extend(LoggableMixin).extend({defaults:{id:"",name:"",state:"",diskSize:0,deleted:false,annotation:null,message:null},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("id")},initialize:function(a,b){this.log(this+".initialize:",a,b);this.hdas=new HDACollection();if(b&&b.length){this.hdas.reset(b);this.checkForUpdates()}},loadFromApi:function(a,c){var b=this;b.attributes.id=a;jQuery.when(jQuery.ajax("api/users/current"),b.fetch()).then(function(e,d){b.attributes.user=e[0];b.trigger("loaded:user",e[0]);b.trigger("loaded",d[0])}).then(function(){jQuery.ajax(b.url()+"/contents?"+jQuery.param({ids:b.hdaIdsFromStateIds().join(",")})).success(function(d){b.hdas.reset(d);b.checkForUpdates();b.trigger("loaded:hdas",d);if(c){callback(b)}})})},hdaIdsFromStateIds:function(){return _.reduce(_.values(this.get("state_ids")),function(b,a){return b.concat(a)})},checkForUpdates:function(a){if(this.hdas.running().length){this.stateUpdater()}else{this.trigger("ready")}return this},stateUpdater:function(){var c=this,a=this.get("state"),b=this.get("state_ids");jQuery.ajax("api/histories/"+this.get("id")).success(function(d){c.set(d);c.log("current history state:",c.get("state"),"(was)",a,"new size:",c.get("nice_size"));var e=[];_.each(_.keys(d.state_ids),function(g){var f=_.difference(d.state_ids[g],b[g]);e=e.concat(f)});if(e.length){c.hdas.update(e)}if((c.get("state")===HistoryDatasetAssociation.STATES.RUNNING)||(c.get("state")===HistoryDatasetAssociation.STATES.QUEUED)){setTimeout(function(){c.stateUpdater()},4000)}else{c.trigger("ready")}}).error(function(f,d,e){if(!((f.readyState===0)&&(f.status===0))){if(console&&console.warn){console.warn("Error getting history updates from the server:",f,d,e)}alert(_l("Error getting history updates from the server.")+"\n"+e)}})},toString:function(){var a=(this.get("name"))?(","+this.get("name")):("");return"History("+this.get("id")+a+")"}});var HistoryCollection=Backbone.Collection.extend(LoggableMixin).extend({model:History,urlRoot:"api/histories"});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9b27168c9442/
changeset: 9b27168c9442
user: carlfeberhard
date: 2012-11-19 17:30:35
summary: sphinx-autodoc: better formatting for metadata_spec
affected #: 3 files
diff -r 19cbbaf566216cb46ecc6a6d17e0f1e0ab52978e -r 9b27168c94423a91e0dc0249bb9453adc9532610 lib/galaxy/datatypes/assembly.py
--- a/lib/galaxy/datatypes/assembly.py
+++ b/lib/galaxy/datatypes/assembly.py
@@ -225,4 +225,3 @@
if __name__ == '__main__':
import doctest, sys
doctest.testmod(sys.modules[__name__])
-
diff -r 19cbbaf566216cb46ecc6a6d17e0f1e0ab52978e -r 9b27168c94423a91e0dc0249bb9453adc9532610 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -69,6 +69,9 @@
<class 'galaxy.datatypes.metadata.MetadataParameter'>
"""
+ #: dictionary of metadata fields for this datatype::
+ metadata_spec = None
+
__metaclass__ = DataMeta
# Add metadata elements
MetadataElement( name="dbkey", desc="Database/Build", default="?", param=metadata.DBKeyParameter, multiple=False, no_value="?" )
@@ -849,4 +852,3 @@
except UnicodeDecodeError:
text = "binary/unknown file"
return text
-
diff -r 19cbbaf566216cb46ecc6a6d17e0f1e0ab52978e -r 9b27168c94423a91e0dc0249bb9453adc9532610 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -123,6 +123,7 @@
def __getstate__( self ):
return None #cannot pickle a weakref item (self._parent), when data._metadata_collection is None, it will be recreated on demand
+
class MetadataSpecCollection( odict ):
"""
A simple extension of dict which allows cleaner access to items
@@ -132,13 +133,21 @@
"""
def __init__( self, dict = None ):
odict.__init__( self, dict = None )
+
def append( self, item ):
self[item.name] = item
+
def iter( self ):
return self.itervalues()
+
def __getattr__( self, name ):
return self.get( name )
+ def __repr__( self ):
+ # force elements to draw with __str__ for sphinx-apidoc
+ return ', '.join([ item.__str__() for item in self.iter() ])
+
+
class MetadataParameter( object ):
def __init__( self, spec ):
self.spec = spec
@@ -185,7 +194,6 @@
"""
pass
-
def unwrap( self, form_value ):
"""
Turns a value into its storable form.
@@ -205,19 +213,22 @@
Turns a value read from an external dict into its value to be pushed directly into the metadata dict.
"""
return value
+
def to_external_value( self, value ):
"""
Turns a value read from a metadata into its value to be pushed directly into the external dict.
"""
return value
+
class MetadataElementSpec( object ):
"""
Defines a metadata element and adds it to the metadata_spec (which
is a MetadataSpecCollection) of datatype.
"""
-
- def __init__( self, datatype, name=None, desc=None, param=MetadataParameter, default=None, no_value = None, visible=True, set_in_upload = False, **kwargs ):
+ def __init__( self, datatype,
+ name=None, desc=None, param=MetadataParameter, default=None, no_value = None,
+ visible=True, set_in_upload = False, **kwargs ):
self.name = name
self.desc = desc or name
self.default = default
@@ -226,24 +237,37 @@
self.set_in_upload = set_in_upload
# Catch-all, allows for extra attributes to be set
self.__dict__.update(kwargs)
- #set up param last, as it uses values set above
+ # set up param last, as it uses values set above
self.param = param( self )
- datatype.metadata_spec.append( self ) #add spec element to the spec
+ # add spec element to the spec
+ datatype.metadata_spec.append( self )
+
def get( self, name, default=None ):
return self.__dict__.get(name, default)
+
def wrap( self, value ):
"""
Turns a stored value into its usable form.
"""
return self.param.wrap( value )
+
def unwrap( self, value ):
"""
Turns an incoming value into its storable form.
"""
return self.param.unwrap( value )
+ def __str__( self ):
+ #TODO??: assuming param is the class of this MetadataElementSpec - add the plain class name for that
+ spec_dict = dict( param_class=self.param.__class__.__name__ )
+ spec_dict.update( self.__dict__ )
+ return ( "{name} ({param_class}): {desc}, defaults to '{default}'".format( **spec_dict ) )
+
+# create a statement class that, when called,
+# will add a new MetadataElementSpec to a class's metadata_spec
MetadataElement = Statement( MetadataElementSpec )
+
"""
MetadataParameter sub-classes.
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/19cbbaf56621/
changeset: 19cbbaf56621
user: dannon
date: 2012-11-16 21:49:10
summary: Jobs need to be set back to NEW to immediately resume running, instead of QUEUED.
affected #: 1 file
diff -r 243ff2bfbf28fb92f94eca408d063fb5257a6481 -r 19cbbaf566216cb46ecc6a6d17e0f1e0ab52978e lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -702,7 +702,7 @@
for dataset in self.datasets:
job = dataset.creating_job
if job is not None and job.state == Job.states.PAUSED:
- job.set_state(Job.states.QUEUED)
+ job.set_state(Job.states.NEW)
def get_disk_size( self, nice_size=False ):
# unique datasets only
db_session = object_session( self )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/243ff2bfbf28/
changeset: 243ff2bfbf28
user: inithello
date: 2012-11-16 21:39:50
summary: Fix for TwillTestCase.wait() with the new client-side histories.
affected #: 1 file
diff -r 8bd9f729fb6a92077b9652690b11877f203902ec -r 243ff2bfbf28fb92f94eca408d063fb5257a6481 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -1199,15 +1199,25 @@
tc.fv( 2, "hgta_doGalaxyQuery", "Send query to Galaxy" )
self.submit_form( button="Send query to Galaxy" )#, **output_params ) #AssertionError: Attempting to set field 'fbQual' to value '['whole']' in form 'None' threw exception: no matching forms! control: <RadioControl(fbQual=[whole, upstreamAll, endAll])>
+ def get_running_datasets( self ):
+ self.visit_url( '/api/histories' )
+ history_id = from_json_string( self.last_page() )[0][ 'id' ]
+ self.visit_url( '/api/histories/%s/contents' % history_id )
+ jsondata = from_json_string( self.last_page() )
+ for history_item in jsondata:
+ self.visit_url( history_item[ 'url' ] )
+ item_json = from_json_string( self.last_page() )
+ if item_json[ 'state' ] in [ 'queued', 'running', 'paused' ]:
+ return True
+ return False
+
def wait( self, maxseconds=120 ):
"""Waits for the tools to finish"""
sleep_amount = 0.1
slept = 0
self.home()
while slept <= maxseconds:
- self.visit_page( "history" )
- page = tc.browser.get_html()
- if page.find( '<!-- running: do not change this comment, used by TwillTestCase.wait -->' ) > -1:
+ if self.get_running_datasets():
time.sleep( sleep_amount )
slept += sleep_amount
sleep_amount *= 2
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7f7171c52b1d/
changeset: 7f7171c52b1d
user: natefoo
date: 2012-11-16 21:14:11
summary: Fix unpause when you have datasets imported from a library.
affected #: 1 file
diff -r c7bc628577af7f57b5511b2cc8e1423d5faf31df -r 7f7171c52b1d922fb6471c8ce373fa3e50901206 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -701,7 +701,7 @@
def resume_paused_jobs( self ):
for dataset in self.datasets:
job = dataset.creating_job
- if job.state == Job.states.PAUSED:
+ if job is not None and job.state == Job.states.PAUSED:
job.set_state(Job.states.QUEUED)
def get_disk_size( self, nice_size=False ):
# unique datasets only
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c7bc628577af/
changeset: c7bc628577af
user: greg
date: 2012-11-16 19:36:36
summary: Yet more shed_util refactoring.
affected #: 2 files
diff -r 9e1d821b5c12f56bbfe1923ea1092608054c9d08 -r c7bc628577af7f57b5511b2cc8e1423d5faf31df lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -784,24 +784,6 @@
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( app )
return repository_tools_tups
-def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ):
- """
- This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an
- existing tool dependency was changed (e.g., the version of the dependency was changed) but the tool version for which it is a dependency
- was not changed. In this case, we only want to determine if any of the dependency information defined in original_dependency_dict was
- changed in new_dependency_dict. We don't care if new dependencies were added in new_dependency_dict since they will just be treated as
- missing dependencies for the tool.
- """
- updated_tool_dependency_names = []
- deleted_tool_dependency_names = []
- for original_dependency_key, original_dependency_val_dict in original_dependency_dict.items():
- if original_dependency_key not in new_dependency_dict:
- updated_tool_dependency = update_existing_tool_dependency( app, repository, original_dependency_val_dict, new_dependency_dict )
- if updated_tool_dependency:
- updated_tool_dependency_names.append( updated_tool_dependency.name )
- else:
- deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
- return updated_tool_dependency_names, deleted_tool_dependency_names
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
"""
Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file.
@@ -1136,21 +1118,6 @@
trans.sa_session.add( tool_dependency )
trans.sa_session.flush()
return removed, error_message
-def remove_tool_dependency_installation_directory( dependency_install_dir ):
- if os.path.exists( dependency_install_dir ):
- try:
- shutil.rmtree( dependency_install_dir )
- removed = True
- error_message = ''
- log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
- except Exception, e:
- removed = False
- error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
- log.debug( error_message )
- else:
- removed = True
- error_message = ''
- return removed, error_message
def to_html_str( text ):
"""Translates the characters in text to an html string"""
translated = []
@@ -1183,65 +1150,6 @@
else:
translated_string = ''
return translated_string
-def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
- """
- Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
- to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is::
-
- {"name": "bwa",
- "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
- "type": "package",
- "version": "0.6.2"}
-
- The new_dependencies_dict is the dictionary generated by the generate_tool_dependency_metadata method.
- """
- new_tool_dependency = None
- original_name = original_dependency_dict[ 'name' ]
- original_type = original_dependency_dict[ 'type' ]
- original_version = original_dependency_dict[ 'version' ]
- # Locate the appropriate tool_dependency associated with the repository.
- tool_dependency = None
- for tool_dependency in repository.tool_dependencies:
- if tool_dependency.name == original_name and tool_dependency.type == original_type and tool_dependency.version == original_version:
- break
- if tool_dependency and tool_dependency.can_update:
- dependency_install_dir = tool_dependency.installation_directory( app )
- removed_from_disk, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
- if removed_from_disk:
- sa_session = app.model.context.current
- new_dependency_name = None
- new_dependency_type = None
- new_dependency_version = None
- for new_dependency_key, new_dependency_val_dict in new_dependencies_dict.items():
- # Match on name only, hopefully this will be enough!
- if original_name == new_dependency_val_dict[ 'name' ]:
- new_dependency_name = new_dependency_val_dict[ 'name' ]
- new_dependency_type = new_dependency_val_dict[ 'type' ]
- new_dependency_version = new_dependency_val_dict[ 'version' ]
- break
- if new_dependency_name and new_dependency_type and new_dependency_version:
- # Update all attributes of the tool_dependency record in the database.
- log.debug( "Updating tool dependency '%s' with type '%s' and version '%s' to have new type '%s' and version '%s'." % \
- ( str( tool_dependency.name ),
- str( tool_dependency.type ),
- str( tool_dependency.version ),
- str( new_dependency_type ),
- str( new_dependency_version ) ) )
- tool_dependency.type = new_dependency_type
- tool_dependency.version = new_dependency_version
- tool_dependency.status = app.model.ToolDependency.installation_status.UNINSTALLED
- tool_dependency.error_message = None
- sa_session.add( tool_dependency )
- sa_session.flush()
- new_tool_dependency = tool_dependency
- else:
- # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record
- # from the database.
- log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
- ( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
- sa_session.delete( tool_dependency )
- sa_session.flush()
- return new_tool_dependency
def update_in_shed_tool_config( app, repository ):
# A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list.
diff -r 9e1d821b5c12f56bbfe1923ea1092608054c9d08 -r c7bc628577af7f57b5511b2cc8e1423d5faf31df lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -884,6 +884,24 @@
relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
return relative_path_to_file
+def handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_dependency_dict, new_dependency_dict ):
+ """
+ This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an
+ existing tool dependency was changed (e.g., the version of the dependency was changed) but the tool version for which it is a dependency
+ was not changed. In this case, we only want to determine if any of the dependency information defined in original_dependency_dict was
+ changed in new_dependency_dict. We don't care if new dependencies were added in new_dependency_dict since they will just be treated as
+ missing dependencies for the tool.
+ """
+ updated_tool_dependency_names = []
+ deleted_tool_dependency_names = []
+ for original_dependency_key, original_dependency_val_dict in original_dependency_dict.items():
+ if original_dependency_key not in new_dependency_dict:
+ updated_tool_dependency = update_existing_tool_dependency( app, repository, original_dependency_val_dict, new_dependency_dict )
+ if updated_tool_dependency:
+ updated_tool_dependency_names.append( updated_tool_dependency.name )
+ else:
+ deleted_tool_dependency_names.append( original_dependency_val_dict[ 'name' ] )
+ return updated_tool_dependency_names, deleted_tool_dependency_names
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
@@ -978,6 +996,21 @@
shutil.rmtree( dir )
except:
pass
+def remove_tool_dependency_installation_directory( dependency_install_dir ):
+ if os.path.exists( dependency_install_dir ):
+ try:
+ shutil.rmtree( dependency_install_dir )
+ removed = True
+ error_message = ''
+ log.debug( "Removed tool dependency installation directory: %s" % str( dependency_install_dir ) )
+ except Exception, e:
+ removed = False
+ error_message = "Error removing tool dependency installation directory %s: %s" % ( str( dependency_install_dir ), str( e ) )
+ log.debug( error_message )
+ else:
+ removed = True
+ error_message = ''
+ return removed, error_message
def reset_all_metadata_on_installed_repository( trans, id ):
"""Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
repository = get_installed_tool_shed_repository( trans, id )
@@ -1198,6 +1231,65 @@
except:
file_name = fpath
return file_name
+def update_existing_tool_dependency( app, repository, original_dependency_dict, new_dependencies_dict ):
+ """
+ Update an exsiting tool dependency whose definition was updated in a change set pulled by a Galaxy administrator when getting updates
+ to an installed tool shed repository. The original_dependency_dict is a single tool dependency definition, an example of which is::
+
+ {"name": "bwa",
+ "readme": "\\nCompiling BWA requires zlib and libpthread to be present on your system.\\n ",
+ "type": "package",
+ "version": "0.6.2"}
+
+ The new_dependencies_dict is the dictionary generated by the generate_tool_dependency_metadata method.
+ """
+ new_tool_dependency = None
+ original_name = original_dependency_dict[ 'name' ]
+ original_type = original_dependency_dict[ 'type' ]
+ original_version = original_dependency_dict[ 'version' ]
+ # Locate the appropriate tool_dependency associated with the repository.
+ tool_dependency = None
+ for tool_dependency in repository.tool_dependencies:
+ if tool_dependency.name == original_name and tool_dependency.type == original_type and tool_dependency.version == original_version:
+ break
+ if tool_dependency and tool_dependency.can_update:
+ dependency_install_dir = tool_dependency.installation_directory( app )
+ removed_from_disk, error_message = remove_tool_dependency_installation_directory( dependency_install_dir )
+ if removed_from_disk:
+ sa_session = app.model.context.current
+ new_dependency_name = None
+ new_dependency_type = None
+ new_dependency_version = None
+ for new_dependency_key, new_dependency_val_dict in new_dependencies_dict.items():
+ # Match on name only, hopefully this will be enough!
+ if original_name == new_dependency_val_dict[ 'name' ]:
+ new_dependency_name = new_dependency_val_dict[ 'name' ]
+ new_dependency_type = new_dependency_val_dict[ 'type' ]
+ new_dependency_version = new_dependency_val_dict[ 'version' ]
+ break
+ if new_dependency_name and new_dependency_type and new_dependency_version:
+ # Update all attributes of the tool_dependency record in the database.
+ log.debug( "Updating tool dependency '%s' with type '%s' and version '%s' to have new type '%s' and version '%s'." % \
+ ( str( tool_dependency.name ),
+ str( tool_dependency.type ),
+ str( tool_dependency.version ),
+ str( new_dependency_type ),
+ str( new_dependency_version ) ) )
+ tool_dependency.type = new_dependency_type
+ tool_dependency.version = new_dependency_version
+ tool_dependency.status = app.model.ToolDependency.installation_status.UNINSTALLED
+ tool_dependency.error_message = None
+ sa_session.add( tool_dependency )
+ sa_session.flush()
+ new_tool_dependency = tool_dependency
+ else:
+ # We have no new tool dependency definition based on a matching dependency name, so remove the existing tool dependency record
+ # from the database.
+ log.debug( "Deleting tool dependency with name '%s', type '%s' and version '%s' from the database since it is no longer defined." % \
+ ( str( tool_dependency.name ), str( tool_dependency.type ), str( tool_dependency.version ) ) )
+ sa_session.delete( tool_dependency )
+ sa_session.flush()
+ return new_tool_dependency
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.