details: http://www.bx.psu.edu/hg/galaxy/rev/1d113c5386da changeset: 1589:1d113c5386da user: Greg Von Kuster greg@bx.psu.edu date: Thu Oct 30 16:17:46 2008 -0400 description: Migrate central repo to alchemy 4.
18 file(s) affected in this change:
eggs.ini lib/galaxy/app.py lib/galaxy/jobs/__init__.py lib/galaxy/model/custom_types.py lib/galaxy/model/mapping.py lib/galaxy/model/mapping_tests.py lib/galaxy/model/orm/__init__.py lib/galaxy/model/orm/ext/__init__.py lib/galaxy/model/orm/ext/assignmapper.py lib/galaxy/web/controllers/root.py lib/galaxy/web/controllers/user.py lib/galaxy/web/controllers/workflow.py lib/galaxy/web/framework/__init__.py lib/galaxy/webapps/reports/controllers/jobs.py lib/galaxy/webapps/reports/controllers/system.py lib/galaxy/webapps/reports/controllers/users.py scripts/cleanup_datasets/cleanup_datasets.py tools/stats/grouping.py
diffs (623 lines):
diff -r ea92290c4e10 -r 1d113c5386da eggs.ini --- a/eggs.ini Thu Oct 30 16:11:44 2008 -0400 +++ b/eggs.ini Thu Oct 30 16:17:46 2008 -0400 @@ -40,7 +40,7 @@ PasteScript = 1.3.6 Routes = 1.6.3 simplejson = 1.5 -SQLAlchemy = 0.3.11 +SQLAlchemy = 0.4.7p1 Tempita = 0.1 twill = 0.9 WebError = 0.8a @@ -85,7 +85,7 @@ PasteScript = http://cheeseshop.python.org/packages/source/P/PasteScript/PasteScript-1.3.6... Routes = http://pypi.python.org/packages/source/R/Routes/Routes-1.6.3.tar.gz simplejson = http://cheeseshop.python.org/packages/source/s/simplejson/simplejson-1.5.tar... -SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.3.11.tar.gz +SQLAlchemy = http://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-0.4.7p1.tar.g... Tempita = http://pypi.python.org/packages/source/T/Tempita/Tempita-0.1.tar.gz twill = http://darcs.idyll.org/~t/projects/twill-0.9.tar.gz WebError = http://pypi.python.org/packages/source/W/WebError/WebError-0.8a.tar.gz diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/app.py --- a/lib/galaxy/app.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/app.py Thu Oct 30 16:17:46 2008 -0400 @@ -20,7 +20,7 @@ if self.config.database_connection: db_url = self.config.database_connection else: - db_url = "sqlite://%s?isolation_level=IMMEDIATE" % self.config.database + db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Setup the database engine and ORM self.model = galaxy.model.mapping.init( self.config.file_path, db_url, diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/jobs/__init__.py Thu Oct 30 16:17:46 2008 -0400 @@ -97,11 +97,10 @@ model = self.app.model # Jobs in the NEW state won't be requeued unless we're tracking in the database if not self.track_jobs_in_database: - for job in model.Job.select( model.Job.c.state == model.Job.states.NEW ): + for job in model.Job.filter( model.Job.c.state==model.Job.states.NEW ).all(): log.debug( "no runner: %s is still in new state, adding to the jobs queue" %job.id ) self.queue.put( ( job.id, job.tool_id ) ) - for job in model.Job.select( (model.Job.c.state == model.Job.states.RUNNING) - | (model.Job.c.state == model.Job.states.QUEUED) ): + for job in model.Job.filter( (model.Job.c.state == model.Job.states.RUNNING) | (model.Job.c.state == model.Job.states.QUEUED) ).all(): if job.job_runner_name is not None: # why are we passing the queue to the wrapper? job_wrapper = JobWrapper( job.id, self.app.toolbox.tools_by_id[ job.tool_id ], self ) @@ -136,7 +135,7 @@ new_jobs = [] if self.track_jobs_in_database: model = self.app.model - for j in model.Job.select( model.Job.c.state == model.Job.states.NEW ): + for j in model.Job.filter( model.Job.c.state==model.Job.states.NEW ).all(): job = JobWrapper( j.id, self.app.toolbox.tools_by_id[ j.tool_id ], self ) new_jobs.append( job ) else: diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/custom_types.py --- a/lib/galaxy/model/custom_types.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/model/custom_types.py Thu Oct 30 16:17:46 2008 -0400 @@ -18,16 +18,15 @@ self.mutable = mutable super( JSONType, self).__init__()
- def convert_result_value( self, value, dialect ): + def process_bind_param( self, value, dialect ): if value is None: return None - buf = self.impl.convert_result_value( value, dialect ) - return self.jsonifyer.loads( str(buf) ) - - def convert_bind_param( self, value, dialect ): + return self.jsonifyer.dumps( value ) + + def process_result_value( self, value, dialect ): if value is None: return None - return self.impl.convert_bind_param( self.jsonifyer.dumps(value), dialect ) + return self.jsonifyer.loads( str( value ) )
def copy_value( self, value ): if self.mutable: @@ -60,10 +59,10 @@ self.mutable = mutable super( MetadataType, self).__init__()
- def convert_result_value( self, value, dialect ): + def process_result_value( self, value, dialect ): if value is None: return None - buf = self.impl.convert_result_value( value, dialect ) + buf = value ret = None try: ret = self.pickler.loads( str(buf) ) @@ -77,7 +76,7 @@
class TrimmedString( TypeDecorator ): impl = String - def convert_bind_param( self, value, dialect ): + def process_bind_param( self, value, dialect ): """Automatically truncate string values""" if self.impl.length and value is not None: value = value[0:self.impl.length] diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/model/mapping.py Thu Oct 30 16:17:46 2008 -0400 @@ -5,23 +5,20 @@ import logging log = logging.getLogger( __name__ )
-import pkg_resources -pkg_resources.require( "sqlalchemy>=0.3" ) - import sys import datetime
-from sqlalchemy.ext.sessioncontext import SessionContext -from sqlalchemy.ext.assignmapper import assign_mapper -from sqlalchemy.ext.orderinglist import ordering_list - -from sqlalchemy import * from galaxy.model import * +from galaxy.model.orm import * +from galaxy.model.orm.ext.assignmapper import * from galaxy.model.custom_types import * from galaxy.util.bunch import Bunch
-metadata = DynamicMetaData( threadlocal=False ) -context = SessionContext( create_session ) +metadata = MetaData() +context = Session = scoped_session( sessionmaker( autoflush=False, transactional=False ) ) + +# For backward compatibility with "context.current" +context.current = Session
dialect_to_egg = { "sqlite" : "pysqlite>=2", @@ -120,15 +117,15 @@ Column( "update_time", DateTime, default=now, onupdate=now ), Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ), Column( "tool_id", String( 255 ) ), - Column( "tool_version", String, default="1.0.0" ), + Column( "tool_version", TEXT, default="1.0.0" ), Column( "state", String( 64 ) ), Column( "info", TrimmedString( 255 ) ), - Column( "command_line", String() ), + Column( "command_line", TEXT ), Column( "param_filename", String( 1024 ) ), Column( "runner_name", String( 255 ) ), - Column( "stdout", String() ), - Column( "stderr", String() ), - Column( "traceback", String() ), + Column( "stdout", TEXT ), + Column( "stderr", TEXT ), + Column( "traceback", TEXT ), Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True, nullable=True ), Column( "job_runner_name", String( 255 ) ), Column( "job_runner_external_id", String( 255 ) ) ) @@ -188,7 +185,7 @@ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ), Column( "latest_workflow_id", Integer, ForeignKey( "workflow.id", use_alter=True, name='stored_workflow_latest_workflow_id_fk' ), index=True ), - Column( "name", String ), + Column( "name", TEXT ), Column( "deleted", Boolean, default=False ), )
@@ -197,7 +194,7 @@ Column( "create_time", DateTime, default=now ), Column( "update_time", DateTime, default=now, onupdate=now ), Column( "stored_workflow_id", Integer, ForeignKey( "stored_workflow.id" ), index=True, nullable=False ), - Column( "name", String ), + Column( "name", TEXT ), Column( "has_cycles", Boolean ), Column( "has_errors", Boolean ) ) @@ -208,8 +205,8 @@ Column( "update_time", DateTime, default=now, onupdate=now ), Column( "workflow_id", Integer, ForeignKey( "workflow.id" ), index=True, nullable=False ), Column( "type", String(64) ), - Column( "tool_id", String ), - Column( "tool_version", String ), # Reserved for future + Column( "tool_id", TEXT ), + Column( "tool_version", TEXT ), # Reserved for future Column( "tool_inputs", JSONType ), Column( "tool_errors", JSONType ), Column( "position", JSONType ), @@ -222,8 +219,8 @@ Column( "id", Integer, primary_key=True ), Column( "output_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ), Column( "input_step_id", Integer, ForeignKey( "workflow_step.id" ), index=True ), - Column( "output_name", String ), - Column( "input_name", String) + Column( "output_name", TEXT ), + Column( "input_name", TEXT) )
StoredWorkflowUserShareAssociation.table = Table( "stored_workflow_user_share_connection", metadata, @@ -240,7 +237,7 @@
MetadataFile.table = Table( "metadata_file", metadata, Column( "id", Integer, primary_key=True ), - Column( "name", String ), + Column( "name", TEXT ), Column( "hda_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True, nullable=True ), Column( "create_time", DateTime, default=now ), Column( "update_time", DateTime, index=True, default=now, onupdate=now ), @@ -257,9 +254,7 @@ dataset=relation( Dataset, primaryjoin=( Dataset.table.c.id == HistoryDatasetAssociation.table.c.dataset_id ), lazy=False ), - history=relation( - History, - primaryjoin=( History.table.c.id == HistoryDatasetAssociation.table.c.history_id ) ), + # .history defined in History mapper copied_to_history_dataset_associations=relation( HistoryDatasetAssociation, primaryjoin=( HistoryDatasetAssociation.table.c.copied_from_history_dataset_association_id == HistoryDatasetAssociation.table.c.id ), @@ -380,11 +375,12 @@ Override __next_hid to generate from the database in a concurrency safe way. """ - conn = self.table.engine.contextual_connect() + conn = object_session( self ).connection() + table = self.table trans = conn.begin() try: - next_hid = select( [self.c.hid_counter], self.c.id == self.id, for_update=True ).scalar() - self.table.update( self.c.id == self.id ).execute( hid_counter = ( next_hid + 1 ) ) + next_hid = select( [table.c.hid_counter], table.c.id == self.id, for_update=True ).scalar() + table.update( table.c.id == self.id ).execute( hid_counter = ( next_hid + 1 ) ) trans.commit() return next_hid except: @@ -413,17 +409,21 @@ # Create the database engine engine = create_engine( url, **engine_options ) # Connect the metadata to the database. - metadata.connect( engine ) - ## metadata.engine.echo = True + metadata.bind = engine + # Clear any existing contextual sessions and reconfigure + Session.remove() + Session.configure( bind=engine ) # Create tables if needed if create_tables: metadata.create_all() # metadata.engine.commit() # Pack everything into a bunch result = Bunch( **globals() ) - result.engine = metadata.engine - result.flush = lambda *args, **kwargs: context.current.flush( *args, **kwargs ) - result.context = context + result.engine = engine + result.flush = lambda *args, **kwargs: Session.flush( *args, **kwargs ) + result.session = Session + # For backward compatibility with "model.context.current" + result.context = Session result.create_tables = create_tables return result
diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/mapping_tests.py --- a/lib/galaxy/model/mapping_tests.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/model/mapping_tests.py Thu Oct 30 16:17:46 2008 -0400 @@ -22,13 +22,13 @@ model.context.current.flush() model.context.current.clear() # Check - users = model.User.select() + users = model.User.query().all() assert len( users ) == 1 assert users[0].email == "james@foo.bar.baz" assert users[0].password == "password" assert len( users[0].histories ) == 1 assert users[0].histories[0].name == "History 1" - hists = model.History.select() + hists = model.History.query().all() assert hists[0].name == "History 1" assert hists[1].name == ( "H" * 255 ) assert hists[0].user == users[0] @@ -40,7 +40,7 @@ hists[1].name = "History 2b" model.context.current.flush() model.context.current.clear() - hists = model.History.select() + hists = model.History.query().all() assert hists[0].name == "History 1" assert hists[1].name == "History 2b" # gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like. diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/orm/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/model/orm/__init__.py Thu Oct 30 16:17:46 2008 -0400 @@ -0,0 +1,7 @@ +import pkg_resources +pkg_resources.require( "SQLAlchemy >= 0.4" ) + +from sqlalchemy import * +from sqlalchemy.orm import * + +from sqlalchemy.ext.orderinglist import ordering_list diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/orm/ext/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/model/orm/ext/__init__.py Thu Oct 30 16:17:46 2008 -0400 @@ -0,0 +1,3 @@ +""" +Galaxy specific SQLAlchemy extensions. +""" \ No newline at end of file diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/model/orm/ext/assignmapper.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/model/orm/ext/assignmapper.py Thu Oct 30 16:17:46 2008 -0400 @@ -0,0 +1,62 @@ +""" +This is similar to the assignmapper extensions in SQLAclhemy 0.3 and 0.4 but +with some compatibility fixes. It assumes that the session is a ScopedSession, +and thus has the "mapper" method to attach contextual mappers to a class. It +adds additional query and session methods to the class to support the +SQLAlchemy 0.3 style of access. The following methods which would normally be +accessed through "Object.query().method()" are available directly through the +object: + + 'get', 'filter', 'filter_by', 'select', 'select_by', + 'selectfirst', 'selectfirst_by', 'selectone', 'selectone_by', + 'get_by', 'join_to', 'join_via', 'count', 'count_by', + 'options', 'instances' + +Additionally, the following Session methods, which normally accept an instance +or list of instances, are available directly through the objects, e.g. +"Session.flush( [instance] )" can be performed as "instance.flush()": + + 'refresh', 'expire', 'delete', 'expunge', 'update' +""" + +__all__ = [ 'assign_mapper' ] + +from sqlalchemy import util, exceptions +import types +from sqlalchemy.orm import mapper, Query + +def _monkeypatch_query_method( name, session, class_ ): + def do(self, *args, **kwargs): + ## util.warn_deprecated('Query methods on the class are deprecated; use %s.query.%s instead' % (class_.__name__, name)) + return getattr( class_.query, name)(*args, **kwargs) + try: + do.__name__ = name + except: + pass + if not hasattr(class_, name): + setattr(class_, name, classmethod(do)) + +def _monkeypatch_session_method(name, session, class_, make_list=False): + def do(self, *args, **kwargs): + if make_list: + self = [ self ] + return getattr(session, name)( self, *args, **kwargs ) + try: + do.__name__ = name + except: + pass + if not hasattr(class_, name): + setattr(class_, name, do) + +def assign_mapper( session, class_, *args, **kwargs ): + m = class_.mapper = session.mapper( class_, *args, **kwargs ) + for name in ('get', 'filter', 'filter_by', 'select', 'select_by', + 'selectfirst', 'selectfirst_by', 'selectone', 'selectone_by', + 'get_by', 'join_to', 'join_via', 'count', 'count_by', + 'options', 'instances'): + _monkeypatch_query_method(name, session, class_) + for name in ('refresh', 'expire', 'delete', 'expunge', 'update'): + _monkeypatch_session_method(name, session, class_) + for name in ( 'flush', ): + _monkeypatch_session_method( name, session, class_, make_list=True ) + return m diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/web/controllers/root.py --- a/lib/galaxy/web/controllers/root.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/web/controllers/root.py Thu Oct 30 16:17:46 2008 -0400 @@ -429,7 +429,7 @@ if not email: return trans.fill_template("/history/share.mako", histories=histories, email=email, send_to_err=send_to_err) user = trans.get_user() - send_to_user = trans.app.model.User.get_by( email = email ) + send_to_user = trans.app.model.User.filter_by( email=email ).first() if not send_to_user: send_to_err = "No such user" elif user.email == email: @@ -488,7 +488,7 @@ new_history.user_id = user.id galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation.selectone_by( session_id=galaxy_session.id, history_id=new_history.id ) + association = trans.app.model.GalaxySessionToHistoryAssociation.filter_by( session_id=galaxy_session.id, history_id=new_history.id ).first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) @@ -505,7 +505,7 @@ new_history.user_id = None galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation.selectone_by( session_id=galaxy_session.id, history_id=new_history.id ) + association = trans.app.model.GalaxySessionToHistoryAssociation.filter_by( session_id=galaxy_session.id, history_id=new_history.id ).first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) @@ -530,7 +530,7 @@ if new_history: galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation.selectone_by( session_id=galaxy_session.id, history_id=new_history.id ) + association = trans.app.model.GalaxySessionToHistoryAssociation.filter_by( session_id=galaxy_session.id, history_id=new_history.id ).first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/web/controllers/user.py --- a/lib/galaxy/web/controllers/user.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/web/controllers/user.py Thu Oct 30 16:17:46 2008 -0400 @@ -53,7 +53,7 @@ email_err = "Please enter a real email address" elif len( email) > 255: email_err = "Email address exceeds maximum allowable length" - elif len( trans.app.model.User.select_by( email=email ) ) > 0: + elif trans.app.model.User.filter_by( email=email ).first(): email_err = "User with that email already exists" elif email != conf_email: conf_email_err = "Email addresses do not match." @@ -73,7 +73,7 @@ email_error = password_error = None # Attempt login if email or password: - user = trans.app.model.User.get_by( email = email ) + user = trans.app.model.User.filter_by( email=email ).first() if not user: email_error = "No such user" elif user.external: @@ -108,7 +108,7 @@ email_error = "Please enter a real email address" elif len( email) > 255: email_error = "Email address exceeds maximum allowable length" - elif len( trans.app.model.User.select_by( email=email ) ) > 0: + elif trans.app.model.User.filter_by( email=email ).first(): email_error = "User with that email already exists" elif len( password ) < 6: password_error = "Please use a password of at least 6 characters" @@ -143,7 +143,7 @@ @web.expose def reset_password(self, trans, email=None, **kwd): error = '' - reset_user = trans.app.model.User.get_by( email = email ) + reset_user = trans.app.model.User.filter_by( email=email ).first() user = trans.get_user() if reset_user: if user and user.id != reset_user.id: diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/web/controllers/workflow.py --- a/lib/galaxy/web/controllers/workflow.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/web/controllers/workflow.py Thu Oct 30 16:17:46 2008 -0400 @@ -44,7 +44,7 @@ # Load workflow from database stored = get_stored_workflow( trans, id ) if email: - other = model.User.get_by( email=email ) + other = model.User.filter_by( email=email ).first() if not other: mtype = "error" msg = ( "User '%s' does not exist" % email ) diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/web/framework/__init__.py --- a/lib/galaxy/web/framework/__init__.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/web/framework/__init__.py Thu Oct 30 16:17:46 2008 -0400 @@ -24,7 +24,7 @@ import mako.template import mako.lookup
-pkg_resources.require( "sqlalchemy>=0.3" ) +pkg_resources.require( "SQLAlchemy >= 0.4" ) from sqlalchemy import desc
import logging @@ -172,7 +172,7 @@ if secure_id: session_key = self.security.decode_session_key( secure_id ) try: - galaxy_session = self.app.model.GalaxySession.selectone_by( session_key=session_key ) + galaxy_session = self.app.model.GalaxySession.filter_by( session_key=session_key ).first() if galaxy_session and galaxy_session.is_valid and galaxy_session.current_history_id: history = self.app.model.History.get( galaxy_session.current_history_id ) if history and not history.deleted: @@ -216,7 +216,7 @@ galaxy_session.user_id = self.user.id try: # See if we have already associated the history with the session - association = self.app.model.GalaxySessionToHistoryAssociation.select_by( session_id=galaxy_session.id, history_id=history.id )[0] + association = self.app.model.GalaxySessionToHistoryAssociation.filter_by( session_id=galaxy_session.id, history_id=history.id ).first() except: association = None history.add_galaxy_session( galaxy_session, association=association ) @@ -265,7 +265,7 @@ """Return the user in $HTTP_REMOTE_USER and create if necessary""" # remote_user middleware ensures HTTP_REMOTE_USER exists try: - user = self.app.model.User.selectone_by( email=self.environ[ 'HTTP_REMOTE_USER' ] ) + user = self.app.model.User.filter_by( email=self.environ[ 'HTTP_REMOTE_USER' ] ).first() except: user = self.app.model.User( email=self.environ[ 'HTTP_REMOTE_USER' ] ) user.set_password_cleartext( 'external' ) @@ -281,7 +281,7 @@ if secure_id: session_key = self.security.decode_session_key( secure_id ) try: - galaxy_session = self.app.model.GalaxySession.selectone_by( session_key=session_key ) + galaxy_session = self.app.model.GalaxySession.filter_by( session_key=session_key ).first() if galaxy_session and galaxy_session.is_valid and galaxy_session.user_id: user = self.app.model.User.get( galaxy_session.user_id ) if user: @@ -321,7 +321,7 @@ session_key = self.security.decode_session_key( secure_id ) try: # Retrive the galaxy_session id via the unique session_key - galaxy_session = self.app.model.GalaxySession.selectone_by( session_key=session_key ) + galaxy_session = self.app.model.GalaxySession.filter_by( session_key=session_key ).first() if galaxy_session and galaxy_session.is_valid: self.__galaxy_session = galaxy_session except: @@ -382,7 +382,7 @@ if self.history is not None: # See if we have already associated the session with the history try: - association = self.app.model.GalaxySessionToHistoryAssociation.select_by( session_id=galaxy_session.id, history_id=self.history.id )[0] + association = self.app.model.GalaxySessionToHistoryAssociation.filter_by( session_id=galaxy_session.id, history_id=self.history.id ).first() except: association = None galaxy_session.add_history( self.history, association=association ) diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/webapps/reports/controllers/jobs.py --- a/lib/galaxy/webapps/reports/controllers/jobs.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/webapps/reports/controllers/jobs.py Thu Oct 30 16:17:46 2008 -0400 @@ -5,7 +5,7 @@ from galaxy.webapps.reports.base.controller import * import galaxy.model import pkg_resources -pkg_resources.require( "sqlalchemy>=0.3" ) +pkg_resources.require( "SQLAlchemy >= 0.4" ) import sqlalchemy as sa import logging log = logging.getLogger( __name__ ) diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/webapps/reports/controllers/system.py --- a/lib/galaxy/webapps/reports/controllers/system.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/webapps/reports/controllers/system.py Thu Oct 30 16:17:46 2008 -0400 @@ -2,8 +2,9 @@ from datetime import datetime, timedelta from galaxy.webapps.reports.base.controller import * import pkg_resources -pkg_resources.require( "sqlalchemy>=0.3" ) -from sqlalchemy import eagerload, desc +pkg_resources.require( "SQLAlchemy >= 0.4" ) +from sqlalchemy.orm import eagerload +from sqlalchemy import desc import logging log = logging.getLogger( __name__ )
diff -r ea92290c4e10 -r 1d113c5386da lib/galaxy/webapps/reports/controllers/users.py --- a/lib/galaxy/webapps/reports/controllers/users.py Thu Oct 30 16:11:44 2008 -0400 +++ b/lib/galaxy/webapps/reports/controllers/users.py Thu Oct 30 16:17:46 2008 -0400 @@ -3,7 +3,7 @@ from galaxy.webapps.reports.base.controller import * import galaxy.model import pkg_resources -pkg_resources.require( "sqlalchemy>=0.3" ) +pkg_resources.require( "SQLAlchemy >= 0.4" ) import sqlalchemy as sa import logging log = logging.getLogger( __name__ ) diff -r ea92290c4e10 -r 1d113c5386da scripts/cleanup_datasets/cleanup_datasets.py --- a/scripts/cleanup_datasets/cleanup_datasets.py Thu Oct 30 16:11:44 2008 -0400 +++ b/scripts/cleanup_datasets/cleanup_datasets.py Thu Oct 30 16:17:46 2008 -0400 @@ -13,8 +13,8 @@ import galaxy.model.mapping import pkg_resources
-pkg_resources.require( "sqlalchemy>=0.3" ) -from sqlalchemy import eagerload +pkg_resources.require( "SQLAlchemy >= 0.4" ) +from sqlalchemy.orm import eagerload
assert sys.version_info[:2] >= ( 2, 4 )
@@ -191,8 +191,6 @@ if errmsg: errors = True print errmsg - else: - print "%s" % dataset.file_name else: dataset.purged = True dataset.flush() @@ -258,7 +256,6 @@ print errmsg else: dataset_count += 1 - print "%s" % dataset.file_name else: dataset.purged = True dataset.file_size = 0 @@ -302,6 +299,7 @@ else: # Remove dataset file from disk os.unlink( dataset.file_name ) + print "%s" % dataset.file_name # Mark all associated MetadataFiles as deleted and purged and remove them from disk print "The following metadata files associated with dataset '%s' have been purged" % dataset.file_name for hda in dataset.history_associations: diff -r ea92290c4e10 -r 1d113c5386da tools/stats/grouping.py --- a/tools/stats/grouping.py Thu Oct 30 16:11:44 2008 -0400 +++ b/tools/stats/grouping.py Thu Oct 30 16:17:46 2008 -0400 @@ -90,7 +90,7 @@
for ii, line in enumerate( file( tmpfile.name )): if line and not line.startswith( '#' ): - line = line.strip() + line = line.rstrip( '\r\n' ) try: fields = line.split("\t") item = fields[group_col]
galaxy-dev@lists.galaxyproject.org