details: http://www.bx.psu.edu/hg/galaxy/rev/2c4ed83f76ef changeset: 2909:2c4ed83f76ef user: Greg Von Kuster <greg@bx.psu.edu> date: Thu Oct 22 23:02:28 2009 -0400 description: Make sqlalchemy queries use sa_session.query( object ) rather than object.query(). This eliminates the need for the _monkeypatch_query_method() in assignmapper.py. Also eliminate the need for the _monkeypatch_session_method() for everything except object.flush() - I just ran out of time and will handle flush() asap. I also eliminated the .all() call on several queries where it was not necessary - should improve performance. I fixed several bugs I found as well. 62 file(s) affected in this change: lib/galaxy/datatypes/metadata.py lib/galaxy/jobs/__init__.py lib/galaxy/model/__init__.py lib/galaxy/model/mapping_tests.py lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py lib/galaxy/model/orm/ext/assignmapper.py lib/galaxy/security/__init__.py lib/galaxy/tags/tag_handler.py lib/galaxy/tools/__init__.py lib/galaxy/tools/actions/__init__.py lib/galaxy/tools/actions/metadata.py lib/galaxy/tools/actions/upload_common.py lib/galaxy/tools/parameters/basic.py lib/galaxy/web/controllers/admin.py lib/galaxy/web/controllers/async.py lib/galaxy/web/controllers/dataset.py lib/galaxy/web/controllers/forms.py lib/galaxy/web/controllers/genetrack.py lib/galaxy/web/controllers/history.py lib/galaxy/web/controllers/library.py lib/galaxy/web/controllers/library_admin.py lib/galaxy/web/controllers/library_common.py lib/galaxy/web/controllers/mobile.py lib/galaxy/web/controllers/page.py lib/galaxy/web/controllers/requests.py lib/galaxy/web/controllers/requests_admin.py lib/galaxy/web/controllers/root.py lib/galaxy/web/controllers/tag.py lib/galaxy/web/controllers/tool_runner.py lib/galaxy/web/controllers/tracks.py lib/galaxy/web/controllers/user.py lib/galaxy/web/controllers/workflow.py lib/galaxy/web/framework/__init__.py lib/galaxy/webapps/reports/controllers/users.py scripts/cleanup_datasets/cleanup_datasets.py templates/admin/dataset_security/deleted_groups.mako templates/admin/dataset_security/deleted_roles.mako templates/admin/dataset_security/groups.mako templates/admin/dataset_security/roles.mako templates/admin/library/folder_permissions.mako templates/admin/library/ldda_info.mako templates/admin/library/ldda_permissions.mako templates/admin/library/library_dataset_permissions.mako templates/admin/library/library_info.mako templates/admin/library/library_permissions.mako templates/admin/requests/grid.mako templates/admin/requests/show_request.mako templates/library/ldda_permissions.mako templates/library/library_dataset_permissions.mako templates/library/library_permissions.mako templates/mobile/manage_library.mako templates/requests/show_request.mako templates/user/address.mako test/base/twilltestcase.py test/functional/test_DNAse_flanked_genes.py test/functional/test_forms_and_requests.py test/functional/test_get_data.py test/functional/test_history_functions.py test/functional/test_metadata_editing.py test/functional/test_security_and_libraries.py test/functional/test_sniffing_and_metadata_settings.py test/functional/test_toolbox.py diffs (truncated from 6168 to 3000 lines): diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/datatypes/metadata.py Thu Oct 22 23:02:28 2009 -0400 @@ -489,15 +489,22 @@ #We will use JSON as the medium of exchange of information, except for the DatasetInstance object which will use pickle (in the future this could be JSONified as well) def __init__( self, job ): self.job_id = job.id - def get_output_filenames_by_dataset( self, dataset ): + def get_output_filenames_by_dataset( self, dataset, sa_session ): if isinstance( dataset, galaxy.model.HistoryDatasetAssociation ): - return galaxy.model.JobExternalOutputMetadata.filter_by( job_id = self.job_id, history_dataset_association_id = dataset.id ).first() #there should only be one or None + return sa_session.query( galaxy.model.JobExternalOutputMetadata ) \ + .filter_by( job_id = self.job_id, history_dataset_association_id = dataset.id ) \ + .first() #there should only be one or None elif isinstance( dataset, galaxy.model.LibraryDatasetDatasetAssociation ): - return galaxy.model.JobExternalOutputMetadata.filter_by( job_id = self.job_id, library_dataset_dataset_association_id = dataset.id ).first() #there should only be one or None + return sa_session.query( galaxy.model.JobExternalOutputMetadata ) \ + .filter_by( job_id = self.job_id, library_dataset_dataset_association_id = dataset.id ) \ + .first() #there should only be one or None return None def get_dataset_metadata_key( self, dataset ): - return "%s_%d" % ( dataset.__class__.__name__, dataset.id ) #set meta can be called on library items and history items, need to make different keys for them, since ids can overlap - def setup_external_metadata( self, datasets, exec_dir = None, tmp_dir = None, dataset_files_path = None, output_fnames = None, config_root = None, datatypes_config = None, kwds = {} ): + # Set meta can be called on library items and history items, + # need to make different keys for them, since ids can overlap + return "%s_%d" % ( dataset.__class__.__name__, dataset.id ) + def setup_external_metadata( self, datasets, sa_session, exec_dir=None, tmp_dir=None, dataset_files_path=None, + output_fnames=None, config_root=None, datatypes_config=None, kwds={} ): #fill in metadata_files_dict and return the command with args required to set metadata def __metadata_files_list_to_cmd_line( metadata_files ): def __get_filename_override(): @@ -527,7 +534,7 @@ #when setting metadata externally, via 'auto-detect' button in edit attributes, etc., #we don't want to overwrite (losing the ability to cleanup) our existing dataset keys and files, #so we will only populate the dictionary once - metadata_files = self.get_output_filenames_by_dataset( dataset ) + metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session ) if not metadata_files: metadata_files = galaxy.model.JobExternalOutputMetadata( dataset = dataset) metadata_files.job_id = self.job_id @@ -553,8 +560,8 @@ #return command required to build return "%s %s %s %s %s %s" % ( os.path.join( exec_dir, 'set_metadata.sh' ), dataset_files_path, tmp_dir, config_root, datatypes_config, " ".join( map( __metadata_files_list_to_cmd_line, metadata_files_list ) ) ) - def external_metadata_set_successfully( self, dataset ): - metadata_files = self.get_output_filenames_by_dataset( dataset ) + def external_metadata_set_successfully( self, dataset, sa_session ): + metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session ) if not metadata_files: return False # this file doesn't exist rval, rstring = simplejson.load( open( metadata_files.filename_results_code ) ) @@ -578,4 +585,3 @@ for metadata_files in galaxy.model.Job.get( self.job_id ).external_output_metadata: metadata_files.job_runner_external_pid = pid metadata_files.flush() - diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/jobs/__init__.py Thu Oct 22 23:02:28 2009 -0400 @@ -1,7 +1,6 @@ import logging, threading, sys, os, time, subprocess, string, tempfile, re, traceback, shutil from galaxy import util, model -from galaxy.model import mapping from galaxy.model.orm import lazyload from galaxy.datatypes.tabular import * from galaxy.datatypes.interval import * @@ -73,6 +72,7 @@ def __init__( self, app, dispatcher ): """Start the job manager""" self.app = app + self.sa_session = app.model.context # Should we read jobs form the database, or use an in memory queue self.track_jobs_in_database = app.config.get_bool( 'track_jobs_in_database', False ) # Check if any special scheduling policy should be used. If not, default is FIFO. @@ -126,14 +126,14 @@ job manager starts. """ model = self.app.model - for job in model.Job.filter( model.Job.c.state==model.Job.states.NEW ).all(): + for job in self.sa_session.query( model.Job ).filter( model.Job.state == model.Job.states.NEW ): if job.tool_id not in self.app.toolbox.tools_by_id: log.warning( "Tool '%s' removed from tool config, unable to recover job: %s" % ( job.tool_id, job.id ) ) JobWrapper( job, None, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' ) else: log.debug( "no runner: %s is still in new state, adding to the jobs queue" %job.id ) self.queue.put( ( job.id, job.tool_id ) ) - for job in model.Job.filter( (model.Job.c.state == model.Job.states.RUNNING) | (model.Job.c.state == model.Job.states.QUEUED) ).all(): + for job in self.sa_session.query( model.Job ).filter( ( model.Job.state == model.Job.states.RUNNING ) | ( model.Job.state == model.Job.states.QUEUED ) ): if job.tool_id not in self.app.toolbox.tools_by_id: log.warning( "Tool '%s' removed from tool config, unable to recover job: %s" % ( job.tool_id, job.id ) ) JobWrapper( job, None, self ).fail( 'This tool was disabled before the job completed. Please contact your Galaxy administrator, or' ) @@ -169,12 +169,12 @@ it is marked as having errors and removed from the queue. Otherwise, the job is dispatched. """ - # Get an orm session - session = mapping.Session() # Pull all new jobs from the queue at once new_jobs = [] if self.track_jobs_in_database: - for j in session.query( model.Job ).options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ).filter( model.Job.c.state == model.Job.states.NEW ).all(): + for j in session.query( model.Job ) \ + .options( lazyload( "external_output_metadata" ), lazyload( "parameters" ) ) \ + .filter( model.Job.c.state == model.Job.states.NEW ): job = JobWrapper( j, self.app.toolbox.tools_by_id[ j.tool_id ], self ) new_jobs.append( job ) else: @@ -186,7 +186,7 @@ # Unpack the message job_id, tool_id = message # Create a job wrapper from it - job_entity = session.query( model.Job ).get( job_id ) + job_entity = self.sa_session.query( model.Job ).get( job_id ) job = JobWrapper( job_entity, self.app.toolbox.tools_by_id[ tool_id ], self ) # Append to watch queue new_jobs.append( job ) @@ -199,11 +199,11 @@ try: # Clear the session for each job so we get fresh states for # job and all datasets - session.clear() + self.sa_session.clear() # Get the real job entity corresponding to the wrapper (if we # are tracking in the database this is probably cached in # the session from the origianl query above) - job_entity = session.query( model.Job ).get( job.job_id ) + job_entity = self.sa_session.query( model.Job ).get( job.job_id ) # Check the job's dependencies, requeue if they're not done job_state = self.__check_if_ready_to_run( job, job_entity ) if job_state == JOB_WAIT: @@ -254,7 +254,7 @@ job.fail( "failure running job %d: %s" % ( sjob.job_id, str( e ) ) ) log.exception( "failure running job %d" % sjob.job_id ) # Done with the session - mapping.Session.remove() + self.sa_session.remove() def __check_if_ready_to_run( self, job_wrapper, job ): """ @@ -319,6 +319,7 @@ self.tool = tool self.queue = queue self.app = queue.app + self.sa_session = self.app.model.context self.extra_filenames = [] self.command_line = None self.galaxy_lib_dir = None @@ -335,7 +336,7 @@ """ Restore the dictionary of parameters from the database. """ - job = model.Job.get( self.job_id ) + job = self.sa_session.query( model.Job ).get( self.job_id ) param_dict = dict( [ ( p.name, p.value ) for p in job.parameters ] ) param_dict = self.tool.params_from_strings( param_dict, self.app ) return param_dict @@ -345,11 +346,11 @@ Prepare the job to run by creating the working directory and the config files. """ - mapping.context.current.clear() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner + self.sa_session.clear() #this prevents the metadata reverting that has been seen in conjunction with the PBS job runner if not os.path.exists( self.working_directory ): os.mkdir( self.working_directory ) # Restore parameters from the database - job = model.Job.get( self.job_id ) + job = self.sa_session.query( model.Job ).get( self.job_id ) incoming = dict( [ ( p.name, p.value ) for p in job.parameters ] ) incoming = self.tool.params_from_strings( incoming, self.app ) # Do any validation that could not be done at job creation @@ -376,7 +377,7 @@ # Run the before queue ("exec_before_job") hook self.tool.call_hook( 'exec_before_job', self.queue.app, inp_data=inp_data, out_data=out_data, tool=self.tool, param_dict=incoming) - mapping.context.current.flush() + self.sa_session.flush() # Build any required config files config_filenames = self.tool.build_config_files( param_dict, self.working_directory ) # FIXME: Build the param file (might return None, DEPRECATED) @@ -403,8 +404,8 @@ Indicate job failure by setting state and message on all output datasets. """ - job = model.Job.get( self.job_id ) - job.refresh() + job = self.sa_session.query( model.Job ).get( self.job_id ) + self.sa_session.refresh( job ) # if the job was deleted, don't fail it if not job.state == model.Job.states.DELETED: # Check if the failure is due to an exception @@ -427,7 +428,7 @@ log.error( "fail(): Missing output file in working directory: %s" % e ) for dataset_assoc in job.output_datasets + job.output_library_datasets: dataset = dataset_assoc.dataset - dataset.refresh() + self.sa_session.refresh( dataset ) dataset.state = dataset.states.ERROR dataset.blurb = 'tool error' dataset.info = message @@ -443,11 +444,11 @@ self.cleanup() def change_state( self, state, info = False ): - job = model.Job.get( self.job_id ) - job.refresh() + job = self.sa_session.query( model.Job ).get( self.job_id ) + self.sa_session.refresh( job ) for dataset_assoc in job.output_datasets + job.output_library_datasets: dataset = dataset_assoc.dataset - dataset.refresh() + self.sa_session.refresh( dataset ) dataset.state = state if info: dataset.info = info @@ -458,13 +459,13 @@ job.flush() def get_state( self ): - job = model.Job.get( self.job_id ) - job.refresh() + job = self.sa_session.query( model.Job ).get( self.job_id ) + self.sa_session.refresh( job ) return job.state def set_runner( self, runner_url, external_id ): - job = model.Job.get( self.job_id ) - job.refresh() + job = self.sa_session.query( model.Job ).get( self.job_id ) + self.sa_session.refresh( job ) job.job_runner_name = runner_url job.job_runner_external_id = external_id job.flush() @@ -476,8 +477,8 @@ the contents of the output files. """ # default post job setup - mapping.context.current.clear() - job = model.Job.get( self.job_id ) + self.sa_session.clear() + job = self.sa_session.query( model.Job ).get( self.job_id ) # if the job was deleted, don't finish it if job.state == job.states.DELETED: self.cleanup() @@ -523,7 +524,7 @@ #either use the metadata from originating output dataset, or call set_meta on the copies #it would be quicker to just copy the metadata from the originating output dataset, #but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta() - if not self.external_output_metadata.external_metadata_set_successfully( dataset ): + if not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ): # Only set metadata values if they are missing... dataset.set_meta( overwrite = False ) else: @@ -563,7 +564,7 @@ # ERROR. The user will never see that the datasets are in error if # they were flushed as OK here, since upon doing so, the history # panel stops checking for updates. So allow the - # mapping.context.current.flush() at the bottom of this method set + # self.sa_session.flush() at the bottom of this method set # the state instead. #dataset_assoc.dataset.dataset.flush() @@ -596,7 +597,7 @@ # TODO # validate output datasets job.command_line = self.command_line - mapping.context.current.flush() + self.sa_session.flush() log.debug( 'job %d ended' % self.job_id ) self.cleanup() @@ -619,7 +620,7 @@ return self.session_id def get_input_fnames( self ): - job = model.Job.get( self.job_id ) + job = self.sa_session.query( model.Job ).get( self.job_id ) filenames = [] for da in job.input_datasets: #da is JobToInputDatasetAssociation object if da.dataset: @@ -646,7 +647,7 @@ else: return self.false_path - job = model.Job.get( self.job_id ) + job = self.sa_session.query( model.Job ).get( self.job_id ) if self.app.config.outputs_to_working_directory: self.output_paths = [] for name, data in [ ( da.name, da.dataset.dataset ) for da in job.output_datasets + job.output_library_datasets ]: @@ -709,12 +710,12 @@ return sizes def setup_external_metadata( self, exec_dir = None, tmp_dir = None, dataset_files_path = None, config_root = None, datatypes_config = None, **kwds ): # extension could still be 'auto' if this is the upload tool. - job = model.Job.get( self.job_id ) + job = self.sa_session.query( model.Job ).get( self.job_id ) for output_dataset_assoc in job.output_datasets: if output_dataset_assoc.dataset.ext == 'auto': context = self.get_dataset_finish_context( dict(), output_dataset_assoc.dataset.dataset ) output_dataset_assoc.dataset.extension = context.get( 'ext', 'data' ) - mapping.context.current.flush() + self.sa_session.flush() if tmp_dir is None: #this dir should should relative to the exec_dir tmp_dir = self.app.config.new_file_path @@ -724,7 +725,14 @@ config_root = self.app.config.root if datatypes_config is None: datatypes_config = self.app.config.datatypes_config - return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], exec_dir = exec_dir, tmp_dir = tmp_dir, dataset_files_path = dataset_files_path, config_root = config_root, datatypes_config = datatypes_config, **kwds ) + return self.external_output_metadata.setup_external_metadata( [ output_dataset_assoc.dataset for output_dataset_assoc in job.output_datasets ], + self.sa_session, + exec_dir = exec_dir, + tmp_dir = tmp_dir, + dataset_files_path = dataset_files_path, + config_root = config_root, + datatypes_config = datatypes_config, + **kwds ) class DefaultJobDispatcher( object ): def __init__( self, app ): @@ -772,6 +780,7 @@ STOP_SIGNAL = object() def __init__( self, app, dispatcher ): self.app = app + self.sa_session = app.model.context self.dispatcher = dispatcher # Keep track of the pid that started the job manager, only it @@ -821,8 +830,8 @@ pass for job_id, error_msg in jobs: - job = model.Job.get( job_id ) - job.refresh() + job = self.sa_session.query( model.Job ).get( job_id ) + self.sa_session.refresh( job ) # if desired, error the job so we can inform the user. if error_msg is not None: job.state = job.states.ERROR diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/model/__init__.py Thu Oct 22 23:02:28 2009 -0400 @@ -1292,8 +1292,7 @@ if self.phone: html = html + '<br/>' + 'Phone: ' + self.phone return html - - + class Page( object ): def __init__( self ): self.id = None @@ -1330,8 +1329,7 @@ def __str__ ( self ): return "%s(item_id=%s, item_tag=%s, user_tname=%s, value=%s, user_value=%s)" % (self.__class__.__name__, self.item_id, self.tag_id, self.user_tname, self.value. self.user_value ) - - + class HistoryTagAssociation ( ItemTagAssociation ): pass diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/model/mapping_tests.py --- a/lib/galaxy/model/mapping_tests.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/model/mapping_tests.py Thu Oct 22 23:02:28 2009 -0400 @@ -22,13 +22,13 @@ model.context.current.flush() model.context.current.clear() # Check - users = model.User.query().all() + users = model.context.current.query( model.User ).all() assert len( users ) == 1 assert users[0].email == "james@foo.bar.baz" assert users[0].password == "password" assert len( users[0].histories ) == 1 assert users[0].histories[0].name == "History 1" - hists = model.History.query().all() + hists = model.context.current.query( model.History ).all() assert hists[0].name == "History 1" assert hists[1].name == ( "H" * 255 ) assert hists[0].user == users[0] @@ -40,7 +40,7 @@ hists[1].name = "History 2b" model.context.current.flush() model.context.current.clear() - hists = model.History.query().all() + hists = model.context.current.query( model.History ).all() assert hists[0].name == "History 1" assert hists[1].name == "History 2b" # gvk TODO need to ad test for GalaxySessions, but not yet sure what they should look like. diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py --- a/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Thu Oct 22 23:02:28 2009 -0400 @@ -662,7 +662,7 @@ log.debug( "Fixing a discrepancy concerning deleted shared history items." ) affected_items = 0 start_time = time.time() - for dataset in Dataset.filter( and_( Dataset.c.deleted == True, Dataset.c.purged == False ) ).all(): + for dataset in context.query( Dataset ).filter( and_( Dataset.c.deleted == True, Dataset.c.purged == False ) ): for dataset_instance in dataset.history_associations + dataset.library_associations: if not dataset_instance.deleted: dataset.deleted = False @@ -679,7 +679,7 @@ dataset_by_filename = {} changed_associations = 0 start_time = time.time() - for dataset in Dataset.filter( Dataset.external_filename.like( '%dataset_%.dat' ) ).all(): + for dataset in context.query( Dataset ).filter( Dataset.external_filename.like( '%dataset_%.dat' ) ): if dataset.file_name in dataset_by_filename: guessed_dataset = dataset_by_filename[ dataset.file_name ] else: diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/model/orm/ext/assignmapper.py --- a/lib/galaxy/model/orm/ext/assignmapper.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/model/orm/ext/assignmapper.py Thu Oct 22 23:02:28 2009 -0400 @@ -3,20 +3,12 @@ with some compatibility fixes. It assumes that the session is a ScopedSession, and thus has the "mapper" method to attach contextual mappers to a class. It adds additional query and session methods to the class to support the -SQLAlchemy 0.3 style of access. The following methods which would normally be -accessed through "Object.query().method()" are available directly through the -object: +SQLAlchemy 0.3 style of access. - 'get', 'filter', 'filter_by', 'select', 'select_by', - 'selectfirst', 'selectfirst_by', 'selectone', 'selectone_by', - 'get_by', 'join_to', 'join_via', 'count', 'count_by', - 'options', 'instances' - -Additionally, the following Session methods, which normally accept an instance +The following Session methods, which normally accept an instance or list of instances, are available directly through the objects, e.g. "Session.flush( [instance] )" can be performed as "instance.flush()": - 'refresh', 'expire', 'delete', 'expunge', 'update' """ __all__ = [ 'assign_mapper' ] @@ -24,17 +16,6 @@ from sqlalchemy import util, exceptions import types from sqlalchemy.orm import mapper, Query - -def _monkeypatch_query_method( name, session, class_ ): - def do(self, *args, **kwargs): - ## util.warn_deprecated('Query methods on the class are deprecated; use %s.query.%s instead' % (class_.__name__, name)) - return getattr( class_.query, name)(*args, **kwargs) - try: - do.__name__ = name - except: - pass - if not hasattr(class_, name): - setattr(class_, name, classmethod(do)) def _monkeypatch_session_method(name, session, class_, make_list=False): def do(self, *args, **kwargs): @@ -50,13 +31,6 @@ def assign_mapper( session, class_, *args, **kwargs ): m = class_.mapper = session.mapper( class_, *args, **kwargs ) - for name in ('get', 'filter', 'filter_by', 'select', 'select_by', - 'selectfirst', 'selectfirst_by', 'selectone', 'selectone_by', - 'get_by', 'join_to', 'join_via', 'count', 'count_by', - 'options', 'instances'): - _monkeypatch_query_method(name, session, class_) - for name in ('refresh', 'expire', 'delete', 'expunge', 'update'): - _monkeypatch_session_method(name, session, class_) for name in ( 'flush', ): _monkeypatch_session_method( name, session, class_, make_list=True ) return m diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/security/__init__.py --- a/lib/galaxy/security/__init__.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/security/__init__.py Thu Oct 22 23:02:28 2009 -0400 @@ -205,8 +205,10 @@ self.associate_components( role=role, user=user ) return role def get_private_user_role( self, user, auto_create=False ): - role = self.model.Role.filter( and_( self.model.Role.table.c.name == user.email, - self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ).first() + role = self.sa_session.query( self.model.Role ) \ + .filter( and_( self.model.Role.table.c.name == user.email, + self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \ + .first() if not role: if auto_create: return self.create_private_user_role( user ) @@ -225,7 +227,7 @@ permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ] # Delete all of the current default permissions for the user for dup in user.default_permissions: - dup.delete() + self.sa_session.delete( dup ) dup.flush() # Add the new default permissions for the user for action, roles in permissions.items(): @@ -255,7 +257,7 @@ permissions = self.user_get_default_permissions( user ) # Delete all of the current default permission for the history for dhp in history.default_permissions: - dhp.delete() + self.sa_session.delete( dhp ) dhp.flush() # Add the new default permissions for the history for action, roles in permissions.items(): @@ -293,7 +295,7 @@ # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset, # or the dataset is inaccessible. See admin/library_dataset_dataset_association() for dp in dataset.actions: - dp.delete() + self.sa_session.delete( dp ) dp.flush() # Add the new permissions on the dataset for action, roles in permissions.items(): @@ -314,7 +316,7 @@ # Delete the current specific permission on the dataset if one exists for dp in dataset.actions: if dp.action == action: - dp.delete() + self.sa_session.delete( dp ) dp.flush() # Add the new specific permission on the dataset for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]: @@ -328,7 +330,7 @@ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant. for dp in dataset.actions: if dp.action == self.permitted_actions.DATASET_ACCESS.action: - dp.delete() + self.sa_session.delete( dp ) dp.flush() def get_dataset_permissions( self, dataset ): """ @@ -379,7 +381,7 @@ def set_all_library_permissions( self, library_item, permissions={} ): # Set new permissions on library_item, eliminating all current permissions for role_assoc in library_item.actions: - role_assoc.delete() + self.sa_session.delete( role_assoc ) role_assoc.flush() # Add the new permissions on library_item for item_class, permission_class in self.library_item_assocs: @@ -472,9 +474,9 @@ for user in users: if delete_existing_assocs: for a in user.non_private_roles + user.groups: - a.delete() + self.sa_session.delete( a ) a.flush() - user.refresh() + self.sa_session.refresh( user ) for role in roles: # Make sure we are not creating an additional association with a PRIVATE role if role not in user.roles: @@ -485,7 +487,7 @@ for group in groups: if delete_existing_assocs: for a in group.roles + group.users: - a.delete() + self.sa_session.delete( a ) a.flush() for role in roles: self.associate_components( group=group, role=role ) @@ -495,7 +497,7 @@ for role in roles: if delete_existing_assocs: for a in role.users + role.groups: - a.delete() + self.sa_session.delete( a ) a.flush() for user in users: self.associate_components( user=user, role=role ) @@ -505,15 +507,15 @@ assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.' if 'dataset' in kwd: if 'action' in kwd: - return self.model.DatasetPermissions.filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first() + return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first() elif 'user' in kwd: if 'group' in kwd: - return self.model.UserGroupAssociation.filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first() + return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first() elif 'role' in kwd: - return self.model.UserRoleAssociation.filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first() + return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first() elif 'group' in kwd: if 'role' in kwd: - return self.model.GroupRoleAssociation.filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first() + return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first() raise 'No valid method of associating provided components: %s' % kwd def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ): """ @@ -526,11 +528,11 @@ """ action = self.permitted_actions.DATASET_ACCESS lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \ - .join( "library_dataset" ) \ - .filter( self.model.LibraryDataset.folder == folder ) \ - .join( "dataset" ) \ - .options( eagerload_all( "dataset.actions" ) ) \ - .all() + .join( "library_dataset" ) \ + .filter( self.model.LibraryDataset.folder == folder ) \ + .join( "dataset" ) \ + .options( eagerload_all( "dataset.actions" ) ) \ + .all() for ldda in lddas: ldda_access_permissions = self.get_item_actions( action, ldda.dataset ) if not ldda_access_permissions: @@ -573,7 +575,8 @@ if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]: log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id ) return True # dataset has no roles associated with the access permission, thus is already public - hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization.filter_by( history_dataset_association_id = hda.id ).first() + hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \ + .filter_by( history_dataset_association_id = hda.id ).first() if not hdadaa: log.debug( 'Denying access to private dataset with hda: %i. No hdadaa record for this dataset.' % hda.id ) return False # no auth @@ -602,7 +605,8 @@ else: raise 'The dataset access permission is the only valid permission in the host security agent.' def set_dataset_permissions( self, hda, user, site ): - hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization.filter_by( history_dataset_association_id = hda.id ).first() + hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \ + .filter_by( history_dataset_association_id = hda.id ).first() if hdadaa: hdadaa.update_time = datetime.utcnow() else: diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tags/tag_handler.py --- a/lib/galaxy/tags/tag_handler.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tags/tag_handler.py Thu Oct 22 23:02:28 2009 -0400 @@ -21,7 +21,7 @@ def get_tag_assoc_class(self, entity_class): return self.tag_assoc_classes[entity_class] - def remove_item_tag(self, item, tag_name): + def remove_item_tag( self, trans, item, tag_name ): """Remove a tag from an item.""" # Get item tag association. item_tag_assoc = self._get_item_tag_assoc(item, tag_name) @@ -29,17 +29,17 @@ # Remove association. if item_tag_assoc: # Delete association. - item_tag_assoc.delete() + trans.sa_session.delete( item_tag_assoc ) item.tags.remove(item_tag_assoc) return True return False - def delete_item_tags(self, item): + def delete_item_tags( self, trans, item ): """Delete tags from an item.""" # Delete item-tag associations. for tag in item.tags: - tag.delete() + trans.sa_ession.delete( tag ) # Delete tags from item. del item.tags[:] diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tools/__init__.py Thu Oct 22 23:02:28 2009 -0400 @@ -167,7 +167,7 @@ which is encoded in the tool panel. """ id = self.app.security.decode_id( workflow_id ) - stored = self.app.model.StoredWorkflow.get( id ) + stored = self.app.model.context.query( self.app.model.StoredWorkflow ).get( id ) return stored.latest_workflow class ToolSection( object ): @@ -863,7 +863,7 @@ if 'async_datasets' in inputs and inputs['async_datasets'] not in [ 'None', '', None ]: for id in inputs['async_datasets'].split(','): try: - data = trans.model.HistoryDatasetAssociation.get( int( id ) ) + data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( int( id ) ) except: log.exception( 'Unable to load precreated dataset (%s) sent in upload form' % id ) continue @@ -1567,7 +1567,7 @@ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None ): for name, dataset in inp_data.iteritems(): external_metadata = galaxy.datatypes.metadata.JobExternalOutputMetadataWrapper( job ) - if external_metadata.external_metadata_set_successfully( dataset ): + if external_metadata.external_metadata_set_successfully( dataset, app.model.context ): dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset ).filename_out ) # If setting external metadata has failed, how can we inform the user? # For now, we'll leave the default metadata and set the state back to its original. diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tools/actions/__init__.py Thu Oct 22 23:02:28 2009 -0400 @@ -32,7 +32,8 @@ def visitor( prefix, input, value, parent = None ): def process_dataset( data ): if data and not isinstance( data.datatype, input.formats ): - data.refresh() #need to refresh in case this conversion just took place, i.e. input above in tool performed the same conversion + # Need to refresh in case this conversion just took place, i.e. input above in tool performed the same conversion + trans.sa_session.refresh( data ) target_ext, converted_dataset = data.find_conversion_destination( input.formats, converter_safe = input.converter_safe( param_values, trans ) ) if target_ext: if converted_dataset: @@ -172,7 +173,7 @@ # this happens i.e. as a result of the async controller if name in incoming: dataid = incoming[name] - data = trans.app.model.HistoryDatasetAssociation.get( dataid ) + data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataid ) assert data != None out_data[name] = data else: diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tools/actions/metadata.py Thu Oct 22 23:02:28 2009 -0400 @@ -29,9 +29,18 @@ job.flush() #ensure job.id is available #add parameters to job_parameter table - incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state #store original dataset state, so we can restore it. A seperate table might be better (no chance of 'loosing' the original state)? + # Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)? + incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state external_metadata_wrapper = JobExternalOutputMetadataWrapper( job ) - cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, exec_dir = None, tmp_dir = trans.app.config.new_file_path, dataset_files_path = trans.app.model.Dataset.file_path, output_fnames = None, config_root = None, datatypes_config = None, kwds = { 'overwrite' : True } ) + cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, + trans.sa_session, + exec_dir = None, + tmp_dir = trans.app.config.new_file_path, + dataset_files_path = trans.app.model.Dataset.file_path, + output_fnames = None, + config_root = None, + datatypes_config = None, + kwds = { 'overwrite' : True } ) incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line for name, value in tool.params_to_strings( incoming, trans.app ).iteritems(): job.add_parameter( name, value ) diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tools/actions/upload_common.py Thu Oct 22 23:02:28 2009 -0400 @@ -41,12 +41,12 @@ # See if we have any template field contents library_bunch.template_field_contents = [] template_id = params.get( 'template_id', None ) - library_bunch.folder = trans.app.model.LibraryFolder.get( folder_id ) + library_bunch.folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder_id ) # We are inheriting the folder's info_association, so we did not # receive any inherited contents, but we may have redirected here # after the user entered template contents ( due to errors ). if template_id not in [ None, 'None' ]: - library_bunch.template = trans.app.model.FormDefinition.get( template_id ) + library_bunch.template = trans.sa_session.query( trans.app.model.FormDefinition ).get( template_id ) for field_index in range( len( library_bunch.template.fields ) ): field_name = 'field_%i' % field_index if params.get( field_name, False ): @@ -56,7 +56,8 @@ library_bunch.template = None library_bunch.roles = [] for role_id in util.listify( params.get( 'roles', [] ) ): - library_bunch.roles.append( trans.app.model.Role.get( role_id ) ) + role = trans.sa_session.query( trans.app.model.Role ).get( role_id ) + library_bunch.roles.append( role ) return library_bunch def get_precreated_datasets( trans, params, data_obj, controller='root' ): @@ -132,7 +133,7 @@ if uploaded_dataset.get( 'in_folder', False ): # Create subfolders if desired for name in uploaded_dataset.in_folder.split( os.path.sep ): - folder.refresh() + trans.sa_session.refresh( folder ) matches = filter( lambda x: x.name == name, active_folders( trans, folder ) ) if matches: folder = matches[0] diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/tools/parameters/basic.py Thu Oct 22 23:02:28 2009 -0400 @@ -1239,7 +1239,7 @@ elif isinstance( value, trans.app.model.HistoryDatasetAssociation ): return value else: - return trans.app.model.HistoryDatasetAssociation.get( value ) + return trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( value ) def to_string( self, value, app ): if value is None or isinstance( value, str ): @@ -1253,7 +1253,7 @@ # indicates that the dataset is optional, while '' indicates that it is not. if value is None or value == '' or value == 'None': return value - return app.model.HistoryDatasetAssociation.get( int( value ) ) + return app.model.context.query( app.model.HistoryDatasetAssociation ).get( int( value ) ) def to_param_dict_string( self, value, other_values={} ): if value is None: return "None" diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/admin.py --- a/lib/galaxy/web/controllers/admin.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/admin.py Thu Oct 22 23:02:28 2009 -0400 @@ -119,9 +119,9 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - roles = trans.app.model.Role.filter( and_( trans.app.model.Role.table.c.deleted==False, - trans.app.model.Role.table.c.type != trans.app.model.Role.types.PRIVATE ) ) \ - .order_by( trans.app.model.Role.table.c.name ).all() + roles = trans.sa_session.query( trans.app.model.Role ).filter( and_( trans.app.model.Role.table.c.deleted==False, + trans.app.model.Role.table.c.type != trans.app.model.Role.types.PRIVATE ) ) \ + .order_by( trans.app.model.Role.table.c.name ) return trans.fill_template( '/admin/dataset_security/roles.mako', roles=roles, msg=msg, @@ -140,18 +140,18 @@ create_group_for_role = params.get( 'create_group_for_role', 'no' ) if not name or not description: msg = "Enter a valid name and a description" - elif trans.app.model.Role.filter( trans.app.model.Role.table.c.name==name ).first(): + elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==name ).first(): msg = "A role with that name already exists" else: # Create the role role = trans.app.model.Role( name=name, description=description, type=trans.app.model.Role.types.ADMIN ) role.flush() # Create the UserRoleAssociations - for user in [ trans.app.model.User.get( x ) for x in in_users ]: + for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]: ura = trans.app.model.UserRoleAssociation( user, role ) ura.flush() # Create the GroupRoleAssociations - for group in [ trans.app.model.Group.get( x ) for x in in_groups ]: + for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]: gra = trans.app.model.GroupRoleAssociation( group, role ) gra.flush() if create_group_for_role == 'yes': @@ -165,10 +165,14 @@ trans.response.send_redirect( web.url_for( controller='admin', action='roles', msg=util.sanitize_text( msg ), messagetype='done' ) ) trans.response.send_redirect( web.url_for( controller='admin', action='create_role', msg=util.sanitize_text( msg ), messagetype='error' ) ) out_users = [] - for user in trans.app.model.User.filter( trans.app.model.User.table.c.deleted==False ).order_by( trans.app.model.User.table.c.email ).all(): + for user in trans.sa_session.query( trans.app.model.User ) \ + .filter( trans.app.model.User.table.c.deleted==False ) \ + .order_by( trans.app.model.User.table.c.email ): out_users.append( ( user.id, user.email ) ) out_groups = [] - for group in trans.app.model.Group.filter( trans.app.model.Group.table.c.deleted==False ).order_by( trans.app.model.Group.table.c.name ).all(): + for group in trans.sa_session.query( trans.app.model.Group ) \ + .filter( trans.app.model.Group.table.c.deleted==False ) \ + .order_by( trans.app.model.Group.table.c.name ): out_groups.append( ( group.id, group.name ) ) return trans.fill_template( '/admin/dataset_security/role_create.mako', in_users=[], @@ -183,26 +187,26 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - role = trans.app.model.Role.get( int( params.role_id ) ) + role = trans.sa_session.query( trans.app.model.Role ).get( int( params.role_id ) ) if params.get( 'role_members_edit_button', False ): - in_users = [ trans.app.model.User.get( x ) for x in util.listify( params.in_users ) ] + in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ] for ura in role.users: - user = trans.app.model.User.get( ura.user_id ) + user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id ) if user not in in_users: # Delete DefaultUserPermissions for previously associated users that have been removed from the role for dup in user.default_permissions: if role == dup.role: - dup.delete() + trans.sa_session.delete( dup ) dup.flush() # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role for history in user.histories: for dhp in history.default_permissions: if role == dhp.role: - dhp.delete() + trans.sa_session.delete( dhp ) dhp.flush() - in_groups = [ trans.app.model.Group.get( x ) for x in util.listify( params.in_groups ) ] + in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ] trans.app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups ) - role.refresh() + trans.sa_session.refresh( role ) msg = "Role '%s' has been updated with %d associated users and %d associated groups" % ( role.name, len( in_users ), len( in_groups ) ) trans.response.send_redirect( web.url_for( action='roles', msg=util.sanitize_text( msg ), messagetype=messagetype ) ) elif params.get( 'rename', False ): @@ -213,7 +217,7 @@ if not new_name: msg = 'Enter a valid name' return trans.fill_template( '/admin/dataset_security/role_rename.mako', role=role, msg=msg, messagetype='error' ) - elif trans.app.model.Role.filter( trans.app.model.Role.table.c.name==new_name ).first(): + elif trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.name==new_name ).first(): msg = 'A role with that name already exists' return trans.fill_template( '/admin/dataset_security/role_rename.mako', role=role, msg=msg, messagetype='error' ) else: @@ -227,12 +231,16 @@ out_users = [] in_groups = [] out_groups = [] - for user in trans.app.model.User.filter( trans.app.model.User.table.c.deleted==False ).order_by( trans.app.model.User.table.c.email ).all(): + for user in trans.sa_session.query( trans.app.model.User ) \ + .filter( trans.app.model.User.table.c.deleted==False ) \ + .order_by( trans.app.model.User.table.c.email ): if user in [ x.user for x in role.users ]: in_users.append( ( user.id, user.email ) ) else: out_users.append( ( user.id, user.email ) ) - for group in trans.app.model.Group.filter( trans.app.model.Group.table.c.deleted==False ).order_by( trans.app.model.Group.table.c.name ).all(): + for group in trans.sa_session.query( trans.app.model.Group ) \ + .filter( trans.app.model.Group.table.c.deleted==False ) \ + .order_by( trans.app.model.Group.table.c.name ): if group in [ x.group for x in role.groups ]: in_groups.append( ( group.id, group.name ) ) else: @@ -242,9 +250,8 @@ # [ ( LibraryDatasetDatasetAssociation [ action, action ] ) ] library_dataset_actions = {} for dp in role.dataset_actions: - for ldda in trans.app.model.LibraryDatasetDatasetAssociation \ - .filter( trans.app.model.LibraryDatasetDatasetAssociation.dataset_id==dp.dataset_id ) \ - .all(): + for ldda in trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ) \ + .filter( trans.app.model.LibraryDatasetDatasetAssociation.dataset_id==dp.dataset_id ): root_found = False folder_path = '' folder = ldda.library_dataset.folder @@ -255,7 +262,9 @@ else: folder = folder.parent folder_path = '%s %s' % ( folder_path, ldda.name ) - library = trans.app.model.Library.filter( trans.app.model.Library.table.c.root_folder_id == folder.id ).first() + library = trans.sa_session.query( trans.app.model.Library ) \ + .filter( trans.app.model.Library.table.c.root_folder_id == folder.id ) \ + .first() if library not in library_dataset_actions: library_dataset_actions[ library ] = {} try: @@ -275,7 +284,7 @@ @web.require_admin def mark_role_deleted( self, trans, **kwd ): params = util.Params( kwd ) - role = trans.app.model.Role.get( int( params.role_id ) ) + role = trans.sa_session.query( trans.app.model.Role ).get( int( params.role_id ) ) role.deleted = True role.flush() msg = "Role '%s' has been marked as deleted." % role.name @@ -286,10 +295,9 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - roles = trans.app.model.Role.query() \ - .filter( trans.app.model.Role.table.c.deleted==True ) \ - .order_by( trans.app.model.Role.table.c.name ) \ - .all() + roles = trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==True ) \ + .order_by( trans.app.model.Role.table.c.name ) return trans.fill_template( '/admin/dataset_security/deleted_roles.mako', roles=roles, msg=msg, @@ -298,7 +306,7 @@ @web.require_admin def undelete_role( self, trans, **kwd ): params = util.Params( kwd ) - role = trans.app.model.Role.get( int( params.role_id ) ) + role = trans.sa_session.query( trans.app.model.Role ).get( int( params.role_id ) ) role.deleted = False role.flush() msg = "Role '%s' has been marked as not deleted." % role.name @@ -314,34 +322,34 @@ # - GroupRoleAssociations where role_id == Role.id # - DatasetPermissionss where role_id == Role.id params = util.Params( kwd ) - role = trans.app.model.Role.get( int( params.role_id ) ) + role = trans.sa_session.query( trans.app.model.Role ).get( int( params.role_id ) ) if not role.deleted: # We should never reach here, but just in case there is a bug somewhere... msg = "Role '%s' has not been deleted, so it cannot be purged." % role.name trans.response.send_redirect( web.url_for( action='roles', msg=util.sanitize_text( msg ), messagetype='error' ) ) # Delete UserRoleAssociations for ura in role.users: - user = trans.app.model.User.get( ura.user_id ) + user = trans.sa_session.query( trans.app.model.User ).get( ura.user_id ) # Delete DefaultUserPermissions for associated users for dup in user.default_permissions: if role == dup.role: - dup.delete() + trans.sa_session.delete( dup ) dup.flush() # Delete DefaultHistoryPermissions for associated users for history in user.histories: for dhp in history.default_permissions: if role == dhp.role: - dhp.delete() + trans.sa_session.delete( dhp ) dhp.flush() - ura.delete() + trans.sa_session.delete( ura ) ura.flush() # Delete GroupRoleAssociations for gra in role.groups: - gra.delete() + trans.sa_session.delete( gra ) gra.flush() # Delete DatasetPermissionss for dp in role.dataset_actions: - dp.delete() + trans.sa_session.delete( dp ) dp.flush() msg = "The following have been purged from the database for role '%s': " % role.name msg += "DefaultUserPermissions, DefaultHistoryPermissions, UserRoleAssociations, GroupRoleAssociations, DatasetPermissionss." @@ -354,10 +362,9 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - groups = trans.app.model.Group.query() \ - .filter( trans.app.model.Group.table.c.deleted==False ) \ - .order_by( trans.app.model.Group.table.c.name ) \ - .all() + groups = trans.sa_session.query( trans.app.model.Group ) \ + .filter( trans.app.model.Group.table.c.deleted==False ) \ + .order_by( trans.app.model.Group.table.c.name ) return trans.fill_template( '/admin/dataset_security/groups.mako', groups=groups, msg=msg, @@ -368,12 +375,12 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - group = trans.app.model.Group.get( int( params.group_id ) ) + group = trans.sa_session.query( trans.app.model.Group ).get( int( params.group_id ) ) if params.get( 'group_roles_users_edit_button', False ): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.in_roles ) ] - in_users = [ trans.app.model.User.get( x ) for x in util.listify( params.in_users ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.in_roles ) ] + in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ] trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=in_roles, users=in_users ) - group.refresh() + trans.sa_session.refresh( group ) msg += "Group '%s' has been updated with %d associated roles and %d associated users" % ( group.name, len( in_roles ), len( in_users ) ) trans.response.send_redirect( web.url_for( action='groups', msg=util.sanitize_text( msg ), messagetype=messagetype ) ) if params.get( 'rename', False ): @@ -383,7 +390,7 @@ if not new_name: msg = 'Enter a valid name' return trans.fill_template( '/admin/dataset_security/group_rename.mako', group=group, msg=msg, messagetype='error' ) - elif trans.app.model.Group.filter( trans.app.model.Group.table.c.name==new_name ).first(): + elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==new_name ).first(): msg = 'A group with that name already exists' return trans.fill_template( '/admin/dataset_security/group_rename.mako', group=group, msg=msg, messagetype='error' ) else: @@ -396,12 +403,16 @@ out_roles = [] in_users = [] out_users = [] - for role in trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.table.c.name ).all(): + for role in trans.sa_session.query(trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ): if role in [ x.role for x in group.roles ]: in_roles.append( ( role.id, role.name ) ) else: out_roles.append( ( role.id, role.name ) ) - for user in trans.app.model.User.filter( trans.app.model.User.table.c.deleted==False ).order_by( trans.app.model.User.table.c.email ).all(): + for user in trans.sa_session.query( trans.app.model.User ) \ + .filter( trans.app.model.User.table.c.deleted==False ) \ + .order_by( trans.app.model.User.table.c.email ): if user in [ x.user for x in group.users ]: in_users.append( ( user.id, user.email ) ) else: @@ -427,28 +438,32 @@ in_roles = util.listify( params.get( 'in_roles', [] ) ) if not name: msg = "Enter a valid name" - elif trans.app.model.Group.filter( trans.app.model.Group.table.c.name==name ).first(): + elif trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first(): msg = "A group with that name already exists" else: # Create the group group = trans.app.model.Group( name=name ) group.flush() # Create the UserRoleAssociations - for user in [ trans.app.model.User.get( x ) for x in in_users ]: + for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]: uga = trans.app.model.UserGroupAssociation( user, group ) uga.flush() # Create the GroupRoleAssociations - for role in [ trans.app.model.Role.get( x ) for x in in_roles ]: + for role in [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]: gra = trans.app.model.GroupRoleAssociation( group, role ) gra.flush() msg = "Group '%s' has been created with %d associated users and %d associated roles" % ( name, len( in_users ), len( in_roles ) ) trans.response.send_redirect( web.url_for( controller='admin', action='groups', msg=util.sanitize_text( msg ), messagetype='done' ) ) trans.response.send_redirect( web.url_for( controller='admin', action='create_group', msg=util.sanitize_text( msg ), messagetype='error' ) ) out_users = [] - for user in trans.app.model.User.filter( trans.app.model.User.table.c.deleted==False ).order_by( trans.app.model.User.table.c.email ).all(): + for user in trans.sa_session.query( trans.app.model.User ) \ + .filter( trans.app.model.User.table.c.deleted==False ) \ + .order_by( trans.app.model.User.table.c.email ): out_users.append( ( user.id, user.email ) ) out_roles = [] - for role in trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.table.c.name ).all(): + for role in trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ): out_roles.append( ( role.id, role.name ) ) return trans.fill_template( '/admin/dataset_security/group_create.mako', in_users=[], @@ -461,7 +476,7 @@ @web.require_admin def mark_group_deleted( self, trans, **kwd ): params = util.Params( kwd ) - group = trans.app.model.Group.get( int( params.group_id ) ) + group = trans.sa_session.query( trans.app.model.Group ).get( int( params.group_id ) ) group.deleted = True group.flush() msg = "Group '%s' has been marked as deleted." % group.name @@ -472,10 +487,9 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - groups = trans.app.model.Group.query() \ - .filter( trans.app.model.Group.table.c.deleted==True ) \ - .order_by( trans.app.model.Group.table.c.name ) \ - .all() + groups = trans.sa_session.query( trans.app.model.Group ) \ + .filter( trans.app.model.Group.table.c.deleted==True ) \ + .order_by( trans.app.model.Group.table.c.name ) return trans.fill_template( '/admin/dataset_security/deleted_groups.mako', groups=groups, msg=msg, @@ -484,7 +498,7 @@ @web.require_admin def undelete_group( self, trans, **kwd ): params = util.Params( kwd ) - group = trans.app.model.Group.get( int( params.group_id ) ) + group = trans.sa_session.query( trans.app.model.Group ).get( int( params.group_id ) ) group.deleted = False group.flush() msg = "Group '%s' has been marked as not deleted." % group.name @@ -495,18 +509,18 @@ # This method should only be called for a Group that has previously been deleted. # Purging a deleted Group simply deletes all UserGroupAssociations and GroupRoleAssociations. params = util.Params( kwd ) - group = trans.app.model.Group.get( int( params.group_id ) ) + group = trans.sa_session.query( trans.app.model.Group ).get( int( params.group_id ) ) if not group.deleted: # We should never reach here, but just in case there is a bug somewhere... msg = "Group '%s' has not been deleted, so it cannot be purged." % group.name trans.response.send_redirect( web.url_for( action='groups', msg=util.sanitize_text( msg ), messagetype='error' ) ) # Delete UserGroupAssociations for uga in group.users: - uga.delete() + trans.sa_session.delete( uga ) uga.flush() # Delete GroupRoleAssociations for gra in group.roles: - gra.delete() + trans.sa_session.delete( gra ) gra.flush() # Delete the Group msg = "The following have been purged from the database for group '%s': UserGroupAssociations, GroupRoleAssociations." % group.name @@ -538,7 +552,7 @@ message = 'Enter a real email address' elif len( email) > 255: message = 'Email address exceeds maximum allowable length' - elif trans.app.model.User.filter_by( email=email ).all(): + elif trans.sa_session.query( trans.app.model.User ).filter_by( email=email ).first(): message = 'User with that email already exists' elif len( password ) < 6: message = 'Use a password of at least 6 characters' @@ -683,10 +697,10 @@ private_role = trans.app.security_agent.get_private_user_role( user ) # Delete History for h in user.active_histories: - h.refresh() + trans.sa_session.refresh( h ) for hda in h.active_datasets: # Delete HistoryDatasetAssociation - d = trans.app.model.Dataset.get( hda.dataset_id ) + d = trans.sa_session.query( trans.app.model.Dataset ).get( hda.dataset_id ) # Delete Dataset if not d.deleted: d.deleted = True @@ -697,12 +711,12 @@ h.flush() # Delete UserGroupAssociations for uga in user.groups: - uga.delete() + trans.sa_session.delete( uga ) uga.flush() # Delete UserRoleAssociations EXCEPT FOR THE PRIVATE ROLE for ura in user.roles: if ura.role_id != private_role.id: - ura.delete() + trans.sa_session.delete( ura ) ura.flush() # Purge the user user.purged = True @@ -749,22 +763,22 @@ # Make sure the user is not dis-associating himself from his private role out_roles = kwd.get( 'out_roles', [] ) if out_roles: - out_roles = [ trans.app.model.Role.get( x ) for x in util.listify( out_roles ) ] + out_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( out_roles ) ] if private_role in out_roles: message += "You cannot eliminate a user's private role association. " status = 'error' in_roles = kwd.get( 'in_roles', [] ) if in_roles: - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( in_roles ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( in_roles ) ] out_groups = kwd.get( 'out_groups', [] ) if out_groups: - out_groups = [ trans.app.model.Group.get( x ) for x in util.listify( out_groups ) ] + out_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( out_groups ) ] in_groups = kwd.get( 'in_groups', [] ) if in_groups: - in_groups = [ trans.app.model.Group.get( x ) for x in util.listify( in_groups ) ] + in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( in_groups ) ] if in_roles: trans.app.security_agent.set_entity_user_associations( users=[ user ], roles=in_roles, groups=in_groups ) - user.refresh() + trans.sa_session.refresh( user ) message += "User '%s' has been updated with %d associated roles and %d associated groups (private roles are not displayed)" % \ ( user.email, len( in_roles ), len( in_groups ) ) trans.response.send_redirect( web.url_for( action='users', @@ -774,8 +788,8 @@ out_roles = [] in_groups = [] out_groups = [] - for role in trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ) \ - .order_by( trans.app.model.Role.table.c.name ).all(): + for role in trans.sa_session.query( trans.app.model.Role ).filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ): if role in [ x.role for x in user.roles ]: in_roles.append( ( role.id, role.name ) ) elif role.type != trans.app.model.Role.types.PRIVATE: @@ -784,8 +798,8 @@ # role, which should always be in in_roles. The check above is added as an additional # precaution, since for a period of time we were including private roles in the form fields. out_roles.append( ( role.id, role.name ) ) - for group in trans.app.model.Group.filter( trans.app.model.Group.table.c.deleted==False ) \ - .order_by( trans.app.model.Group.table.c.name ).all(): + for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted==False ) \ + .order_by( trans.app.model.Group.table.c.name ): if group in [ x.group for x in user.groups ]: in_groups.append( ( group.id, group.name ) ) else: @@ -867,15 +881,13 @@ msg += ', '.join( deleted ) messagetype = 'done' cutoff_time = datetime.utcnow() - timedelta( seconds=int( cutoff ) ) - jobs = trans.app.model.Job.filter( - and_( trans.app.model.Job.table.c.update_time < cutoff_time, - or_( trans.app.model.Job.c.state == trans.app.model.Job.states.NEW, - trans.app.model.Job.c.state == trans.app.model.Job.states.QUEUED, - trans.app.model.Job.c.state == trans.app.model.Job.states.RUNNING, - trans.app.model.Job.c.state == trans.app.model.Job.states.UPLOAD, - ) - ) - ).order_by(trans.app.model.Job.c.update_time.desc()).all() + jobs = trans.sa_session.query( trans.app.model.Job ) \ + .filter( and_( trans.app.model.Job.table.c.update_time < cutoff_time, + or_( trans.app.model.Job.state == trans.app.model.Job.states.NEW, + trans.app.model.Job.state == trans.app.model.Job.states.QUEUED, + trans.app.model.Job.state == trans.app.model.Job.states.RUNNING, + trans.app.model.Job.state == trans.app.model.Job.states.UPLOAD ) ) ) \ + .order_by( trans.app.model.Job.table.c.update_time.desc() ) last_updated = {} for job in jobs: delta = datetime.utcnow() - job.update_time diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/async.py --- a/lib/galaxy/web/controllers/async.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/async.py Thu Oct 22 23:02:28 2009 -0400 @@ -52,7 +52,7 @@ if data_id: if not URL: return "No URL parameter was submitted for data %s" % data_id - data = trans.model.HistoryDatasetAssociation.get( data_id ) + data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( data_id ) if not data: return "Data %s does not exist or has already been deleted" % data_id diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/dataset.py --- a/lib/galaxy/web/controllers/dataset.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/dataset.py Thu Oct 22 23:02:28 2009 -0400 @@ -138,12 +138,12 @@ @web.expose def errors( self, trans, id ): - dataset = model.HistoryDatasetAssociation.get( id ) + dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) return trans.fill_template( "dataset/errors.mako", dataset=dataset ) @web.expose def stderr( self, trans, id ): - dataset = model.HistoryDatasetAssociation.get( id ) + dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) job = dataset.creating_job_associations[0].job trans.response.set_content_type( 'text/plain' ) return job.stderr @@ -157,7 +157,7 @@ if to_address is None: return trans.show_error_message( "Sorry, error reporting has been disabled for this galaxy instance" ) # Get the dataset and associated job - dataset = model.HistoryDatasetAssociation.get( id ) + dataset = trans.sa_session.query( model.HistoryDatasetAssociation ).get( id ) job = dataset.creating_job_associations[0].job # Get the name of the server hosting the Galaxy instance from which this report originated host = trans.request.host @@ -207,7 +207,7 @@ dataset_id = int( dataset_id ) except ValueError: dataset_id = trans.security.decode_id( dataset_id ) - data = trans.app.model.HistoryDatasetAssociation.get( dataset_id ) + data = data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id ) if not data: raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) user, roles = trans.get_user_and_roles() @@ -305,7 +305,7 @@ def display_at( self, trans, dataset_id, filename=None, **kwd ): """Sets up a dataset permissions so it is viewable at an external site""" site = filename - data = trans.app.model.HistoryDatasetAssociation.get( dataset_id ) + data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id ) if not data: raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) if 'display_url' not in kwd or 'redirect_url' not in kwd: @@ -326,7 +326,7 @@ except ValueError, e: return False history = trans.get_history() - data = self.app.model.HistoryDatasetAssociation.get( id ) + data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) if data and data.undeletable: # Walk up parent datasets to find the containing history topmost_parent = data @@ -390,12 +390,12 @@ new_history.flush() target_history_ids.append( new_history.id ) if user: - target_histories = [ hist for hist in map( trans.app.model.History.get, target_history_ids ) if ( hist is not None and hist.user == user )] + target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )] else: target_histories = [ history ] if len( target_histories ) != len( target_history_ids ): error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) ) - for data in map( trans.app.model.HistoryDatasetAssociation.get, source_dataset_ids ): + for data in map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, source_dataset_ids ): if data is None: error_msg = error_msg + "You tried to copy a dataset that does not exist. " invalid_datasets += 1 @@ -409,7 +409,7 @@ refresh_frames = ['history'] trans.app.model.flush() done_msg = "%i datasets copied to %i histories." % ( len( source_dataset_ids ) - invalid_datasets, len( target_histories ) ) - history.refresh() + trans.sa_session.refresh( history ) elif create_new_history: target_history_ids.append( "create_new_history" ) source_datasets = history.active_datasets diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/forms.py --- a/lib/galaxy/web/controllers/forms.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/forms.py Thu Oct 22 23:02:28 2009 -0400 @@ -39,7 +39,7 @@ show_filter = params.get( 'show_filter', 'Active' ) return self._show_forms_list(trans, msg, messagetype, show_filter) def _show_forms_list(self, trans, msg, messagetype, show_filter='Active'): - all_forms = trans.app.model.FormDefinitionCurrent.query().all() + all_forms = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ) if show_filter == 'All': forms_list = all_forms elif show_filter == 'Deleted': @@ -109,7 +109,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - fd = trans.app.model.FormDefinition.get(int(util.restore_text( params.form_id ))) + fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) ) fd.form_definition_current.deleted = True fd.form_definition_current.flush() return self._show_forms_list(trans, @@ -121,7 +121,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - fd = trans.app.model.FormDefinition.get(int(util.restore_text( params.form_id ))) + fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( util.restore_text( params.form_id ) ) ) fd.form_definition_current.deleted = False fd.form_definition_current.flush() return self._show_forms_list(trans, @@ -138,7 +138,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - fd = trans.app.model.FormDefinition.get( int( params.get( 'form_id', None ) ) ) + fd = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.get( 'form_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='forms', action='manage', @@ -444,7 +444,7 @@ if fdc_id: # save changes to the existing form # change the pointer in the form_definition_current table to point # to this new record - fdc = trans.app.model.FormDefinitionCurrent.get(fdc_id) + fdc = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).get( fdc_id ) else: # create a new form fdc = trans.app.model.FormDefinitionCurrent() # create corresponding row in the form_definition_current table @@ -578,11 +578,11 @@ of all the forms from the form_definition table. ''' if all_versions: - return trans.app.model.FormDefinition.query().all() + return trans.sa_session.query( trans.app.model.FormDefinition ) if filter: - fdc_list = trans.app.model.FormDefinitionCurrent.query().filter_by(**filter) + fdc_list = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ).filter_by( **filter ) else: - fdc_list = trans.app.model.FormDefinitionCurrent.query().all() + fdc_list = trans.sa_session.query( trans.app.model.FormDefinitionCurrent ) if form_type == 'All': return [ fdc.latest_form for fdc in fdc_list ] else: diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/genetrack.py --- a/lib/galaxy/web/controllers/genetrack.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/genetrack.py Thu Oct 22 23:02:28 2009 -0400 @@ -154,7 +154,7 @@ """ Default search page """ - data = trans.app.model.HistoryDatasetAssociation.get( dataset_id ) + data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id ) if not data: raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) # the main configuration file @@ -202,7 +202,7 @@ Main request handler """ color = cycle( [LIGHT, WHITE] ) - data = trans.app.model.HistoryDatasetAssociation.get( dataset_id ) + data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( dataset_id ) if not data: raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) ) # the main configuration file diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/history.py --- a/lib/galaxy/web/controllers/history.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/history.py Thu Oct 22 23:02:28 2009 -0400 @@ -233,9 +233,9 @@ status, message = self._list_undelete( trans, histories ) elif operation == "unshare": for history in histories: - husas = trans.app.model.HistoryUserShareAssociation.filter_by( history=history ).all() - for husa in husas: - husa.delete() + for husa in trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ + .filter_by( history=history ): + trans.sa_session.delete( husa ) elif operation == "enable import via link": for history in histories: if not history.importable: @@ -306,8 +306,9 @@ new_history = histories[0] galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation \ - .filter_by( session_id=galaxy_session.id, history_id=trans.security.decode_id( new_history.id ) ).first() + association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \ + .filter_by( session_id=galaxy_session.id, history_id=trans.security.decode_id( new_history.id ) ) \ + .first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) @@ -338,8 +339,8 @@ histories = [ get_history( trans, history_id ) for history_id in ids ] for history in histories: # Current user is the user with which the histories were shared - association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one() - association.delete() + association = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=trans.user, history=history ).one() + trans.sa_session.delete( association ) association.flush() message = "Unshared %d shared histories" % len( ids ) status = 'done' @@ -360,7 +361,7 @@ return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] ) @web.expose def rename_async( self, trans, id=None, new_name=None ): - history = model.History.get( id ) + history = trans.sa_session.query( model.History ).get( id ) # Check that the history exists, and is either owned by the current # user (if logged in) or the current history assert history is not None @@ -393,8 +394,9 @@ new_history.user_id = user.id galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation \ - .filter_by( session_id=galaxy_session.id, history_id=new_history.id ).first() + association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \ + .filter_by( session_id=galaxy_session.id, history_id=new_history.id ) \ + .first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) @@ -410,8 +412,9 @@ new_history.user_id = None galaxy_session = trans.get_galaxy_session() try: - association = trans.app.model.GalaxySessionToHistoryAssociation \ - .filter_by( session_id=galaxy_session.id, history_id=new_history.id ).first() + association = trans.sa_session.query( trans.app.model.GalaxySessionToHistoryAssociation ) \ + .filter_by( session_id=galaxy_session.id, history_id=new_history.id ) \ + .first() except: association = None new_history.add_galaxy_session( galaxy_session, association=association ) @@ -443,10 +446,10 @@ # View history. query = trans.sa_session.query( model.HistoryDatasetAssociation ) \ - .filter( model.HistoryDatasetAssociation.history == history_to_view ) \ - .options( eagerload( "children" ) ) \ - .join( "dataset" ).filter( model.Dataset.purged == False ) \ - .options( eagerload_all( "dataset.actions" ) ) + .filter( model.HistoryDatasetAssociation.history == history_to_view ) \ + .options( eagerload( "children" ) ) \ + .join( "dataset" ).filter( model.Dataset.purged == False ) \ + .options( eagerload_all( "dataset.actions" ) ) # Do not show deleted datasets. query = query.filter( model.HistoryDatasetAssociation.deleted == False ) user_owns_history = ( trans.get_user() == history_to_view.user ) @@ -547,10 +550,10 @@ for send_to_user, history_dict in can_change.items(): for history in history_dict: # Make sure the current history has not already been shared with the current send_to_user - if trans.app.model.HistoryUserShareAssociation \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, - trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ - .count() > 0: + if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ + .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) else: # Only deal with datasets that have not been purged @@ -590,8 +593,10 @@ if email_address == user.email: send_to_err += "You cannot send histories to yourself. " else: - send_to_user = trans.app.model.User.filter( and_( trans.app.model.User.table.c.email==email_address, - trans.app.model.User.table.c.deleted==False ) ).first() + send_to_user = trans.sa_session.query( trans.app.model.User ) \ + .filter( and_( trans.app.model.User.table.c.email==email_address, + trans.app.model.User.table.c.deleted==False ) ) \ + .first() if send_to_user: send_to_users.append( send_to_user ) else: @@ -608,10 +613,10 @@ for send_to_user, history_dict in other.items(): for history in history_dict: # Make sure the current history has not already been shared with the current send_to_user - if trans.app.model.HistoryUserShareAssociation \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, - trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ - .count() > 0: + if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ + .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) else: # Build the dict that will be used for sharing @@ -640,10 +645,10 @@ for history in histories: for send_to_user in send_to_users: # Make sure the current history has not already been shared with the current send_to_user - if trans.app.model.HistoryUserShareAssociation \ - .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, - trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ - .count() > 0: + if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ + .filter( and_( trans.app.model.HistoryUserShareAssociation.table.c.user_id == send_to_user.id, + trans.app.model.HistoryUserShareAssociation.table.c.history_id == history.id ) ) \ + .count() > 0: send_to_err += "History (%s) already shared with user (%s)" % ( history.name, send_to_user.email ) else: # Only deal with datasets that have not been purged @@ -748,14 +753,14 @@ history.importable = False history.flush() elif params.get( 'unshare_user', False ): - user = trans.app.model.User.get( trans.security.decode_id( kwd[ 'unshare_user' ] ) ) + user = trans.sa_session.query( trans.app.model.User ).get( trans.security.decode_id( kwd[ 'unshare_user' ] ) ) if not user: msg = 'History (%s) does not seem to be shared with user (%s)' % ( history.name, user.email ) return trans.fill_template( 'history/sharing.mako', histories=histories, msg=msg, messagetype='error' ) - husas = trans.app.model.HistoryUserShareAssociation.filter_by( user=user, history=history ).all() + husas = trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ).filter_by( user=user, history=history ).all() if husas: for husa in husas: - husa.delete() + trans.sa_session.delete( husa ) husa.flush() histories = [] # Get all histories that have been shared with others @@ -763,8 +768,7 @@ .join( "history" ) \ .filter( and_( trans.app.model.History.user == trans.user, trans.app.model.History.deleted == False ) ) \ - .order_by( trans.app.model.History.table.c.name ) \ - .all() + .order_by( trans.app.model.History.table.c.name ) for husa in husas: history = husa.history if history not in histories: @@ -772,8 +776,7 @@ # Get all histories that are importable importables = trans.sa_session.query( trans.app.model.History ) \ .filter_by( user=trans.user, importable=True, deleted=False ) \ - .order_by( trans.app.model.History.table.c.name ) \ - .all() + .order_by( trans.app.model.History.table.c.name ) for importable in importables: if importable not in histories: histories.append( importable ) @@ -843,7 +846,8 @@ owner = True else: if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \ - .filter_by( user=user, history=history ).count() == 0: + .filter_by( user=user, history=history ) \ + .count() == 0: return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " ) owner = False name = "Clone of '%s'" % history.name diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/library.py --- a/lib/galaxy/web/controllers/library.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/library.py Thu Oct 22 23:02:28 2009 -0400 @@ -63,8 +63,9 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) user, roles = trans.get_user_and_roles() - all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted==False ) \ - .order_by( trans.app.model.Library.name ).all() + all_libraries = trans.sa_session.query( trans.app.model.Library ) \ + .filter( trans.app.model.Library.table.c.deleted==False ) \ + .order_by( trans.app.model.Library.name ) library_actions = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE ] @@ -102,7 +103,7 @@ default_action=params.get( 'default_action', None ), msg=util.sanitize_text( msg ), messagetype='error' ) ) - library = library=trans.app.model.Library.get( library_id ) + library = trans.sa_session.query( trans.app.model.Library ).get( library_id ) if not library: # To handle bots msg = "Invalid library id ( %s )." % str( library_id ) @@ -144,7 +145,7 @@ action='browse_libraries', msg=util.sanitize_text( msg ), messagetype='error' ) ) - library = trans.app.model.Library.get( int( library_id ) ) + library = trans.sa_session.query( trans.app.model.Library ).get( int( library_id ) ) if not library: msg = "Invalid library id ( %s ) specified." % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library', @@ -190,10 +191,10 @@ # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( library, permissions ) - library.refresh() + trans.sa_session.refresh( library ) # Copy the permissions to the root folder trans.app.security_agent.copy_library_permissions( library, library.root_folder ) msg = "Permissions updated for library '%s'" % library.name @@ -221,7 +222,7 @@ else: # 'information' will be the default action = 'information' - folder = trans.app.model.LibraryFolder.get( int( obj_id ) ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( obj_id ) ) if not folder: msg = "Invalid folder specified, id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library', @@ -301,10 +302,10 @@ if trans.app.security_agent.can_manage_library_item( user, roles, folder ): permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( folder, permissions ) - folder.refresh() + trans.sa_session.refresh( folder ) msg = 'Permissions updated for folder %s' % folder.name return trans.response.send_redirect( web.url_for( controller='library', action='folder', @@ -336,7 +337,7 @@ action = 'permissions' else: action = 'information' - library_dataset = trans.app.model.LibraryDataset.get( obj_id ) + library_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( obj_id ) if not library_dataset: msg = "Invalid library dataset specified, id: %s" %str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library', @@ -375,15 +376,15 @@ # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( library_dataset, permissions ) - library_dataset.refresh() + trans.sa_session.refresh( library_dataset ) # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( library_dataset.library_dataset_dataset_association, permissions ) - library_dataset.library_dataset_dataset_association.refresh() + trans.sa_session.refresh( library_dataset.library_dataset_dataset_association ) msg = 'Permissions and roles have been updated for library dataset %s' % library_dataset.name messagetype = 'done' else: @@ -399,7 +400,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if not ldda: msg = "Invalid LibraryDatasetDatasetAssociation specified, obj_id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library', @@ -535,7 +536,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if not ldda: msg = "Invalid LibraryDatasetDatasetAssociation specified, id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='admin', @@ -560,7 +561,7 @@ # Display permission form, permissions will be updated for all lddas simultaneously. lddas = [] for obj_id in [ int( obj_id ) for obj_id in obj_ids ]: - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if ldda is None: msg = 'You specified an invalid LibraryDatasetDatasetAssociation id: %s' %str( obj_id ) trans.response.send_redirect( web.url_for( controller='library', @@ -574,24 +575,24 @@ trans.app.security_agent.can_manage_dataset( roles, ldda.dataset ): permissions = {} for k, v in trans.app.model.Dataset.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles for ldda in lddas: # Set the DATASET permissions on the Dataset trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, permissions ) - ldda.dataset.refresh() + trans.sa_session.refresh( ldda.dataset ) permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles for ldda in lddas: # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( ldda.library_dataset, permissions ) - ldda.library_dataset.refresh() + trans.sa_session.refresh( ldda.library_dataset ) # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( ldda, permissions ) - ldda.refresh() + trans.sa_session.refresh( ldda ) msg = 'Permissions and roles have been updated on %d datasets' % len( lddas ) messagetype = 'done' else: @@ -645,12 +646,12 @@ last_used_build = dbkey[0] else: last_used_build = dbkey - folder = trans.app.model.LibraryFolder.get( folder_id ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder_id ) if folder and last_used_build in [ 'None', None, '?' ]: last_used_build = folder.genome_build replace_id = params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: - replace_dataset = trans.app.model.LibraryDataset.get( params.get( 'replace_id', None ) ) + replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( replace_id ) if not last_used_build: last_used_build = replace_dataset.library_dataset_dataset_association.dbkey # Don't allow multiple datasets to be uploaded when replacing a dataset with a new version @@ -691,7 +692,7 @@ msg = "Added %d datasets to the folder '%s' ( each is selected ). " % ( total_added, folder.name ) # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need # to check one of them to see if the current user can manage permissions on them. - check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] ) + check_ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id_list[0] ) if trans.app.security_agent.can_manage_library_item( user, roles, check_ldda ): if replace_dataset: default_action = '' @@ -728,10 +729,12 @@ yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. - roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() + roles = trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ) # Send the current history to the form to enable importing datasets from history to library history = trans.get_history() - history.refresh() + trans.sa_session.refresh( history ) # If we're using nginx upload, override the form action action = web.url_for( controller='library', action='upload_library_dataset' ) if upload_option == 'upload_file' and trans.app.config.nginx_upload_path: @@ -756,7 +759,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - folder = trans.app.model.LibraryFolder.get( int( folder_id ) ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( folder_id ) ) except: msg = "Invalid folder id: %s" % str( folder_id ) return trans.response.send_redirect( web.url_for( controller='library', @@ -766,12 +769,12 @@ messagetype='error' ) ) replace_id = params.get( 'replace_id', None ) if replace_id: - replace_dataset = trans.app.model.LibraryDataset.get( replace_id ) + replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( replace_id ) else: replace_dataset = None # See if the current history is empty history = trans.get_history() - history.refresh() + trans.sa_session.refresh( history ) if not history.active_datasets: msg = 'Your current history is empty' return trans.response.send_redirect( web.url_for( controller='library', @@ -785,7 +788,7 @@ dataset_names = [] created_ldda_ids = '' for hda_id in hda_ids: - hda = trans.app.model.HistoryDatasetAssociation.get( hda_id ) + hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( hda_id ) if hda: ldda = hda.to_library_dataset_dataset_association( target_folder=folder, replace_dataset=replace_dataset ) created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( ldda.id ) ) @@ -818,7 +821,7 @@ msg = "Added %d datasets to the folder '%s' ( each is selected ). " % ( total_added, folder.name ) # Since permissions on all LibraryDatasetDatasetAssociations must be the same at this point, we only need # to check one of them to see if the current user can manage permissions on them. - check_ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id_list[0] ) + check_ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id_list[0] ) user, roles = trans.get_user_and_roles() if trans.app.security_agent.can_manage_library_item( user, roles, check_ldda ): if replace_dataset: @@ -848,7 +851,9 @@ yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. - roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() + roles = trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ) return trans.fill_template( "/library/upload.mako", upload_option=upload_option, library_id=library_id, @@ -887,7 +892,7 @@ if params.do_action == 'add': history = trans.get_history() for ldda_id in ldda_ids: - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) hda = ldda.to_history_dataset_association( target_history=history, add_to_history = True ) history.flush() msg = "%i dataset(s) have been imported into your history" % len( ldda_ids ) @@ -898,7 +903,7 @@ messagetype='done' ) ) elif params.do_action == 'manage_permissions': # We need the folder containing the LibraryDatasetDatasetAssociation(s) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_ids[0] ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_ids[0] ) trans.response.send_redirect( web.url_for( controller='library', action='upload_library_dataset', library_id=library_id, @@ -933,7 +938,7 @@ seen = [] user, roles = trans.get_user_and_roles() for ldda_id in ldda_ids: - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) if not ldda or not trans.app.security_agent.can_access_dataset( roles, ldda.dataset ): continue path = "" diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/library_admin.py --- a/lib/galaxy/web/controllers/library_admin.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/library_admin.py Thu Oct 22 23:02:28 2009 -0400 @@ -19,8 +19,9 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) return trans.fill_template( '/admin/library/browse_libraries.mako', - libraries=trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted==False ) \ - .order_by( trans.app.model.Library.name ).all(), + libraries=trans.sa_session.query( trans.app.model.Library ) \ + .filter( trans.app.model.Library.table.c.deleted==False ) \ + .order_by( trans.app.model.Library.name ), deleted=False, show_deleted=False, msg=msg, @@ -41,7 +42,7 @@ messagetype='error' ) ) deleted = util.string_as_bool( params.get( 'deleted', False ) ) show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) ) - library = library=trans.app.model.Library.get( library_id ) + library = trans.sa_session.query( trans.app.model.Library ).get( library_id ) if not library: msg = "Invalid library id ( %s )." % str( library_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -82,7 +83,7 @@ msg=util.sanitize_text( msg ), messagetype='error' ) ) if not action == 'new': - library = trans.app.model.Library.get( int( library_id ) ) + library = trans.sa_session.query( trans.app.model.Library ).get( int( library_id ) ) if action == 'new': if params.new == 'submitted': library = trans.app.model.Library( name = util.restore_text( params.name ), @@ -134,14 +135,14 @@ messagetype=messagetype ) elif action == 'delete': def delete_folder( library_folder ): - library_folder.refresh() + trans.sa_session.refresh( library_folder ) for folder in library_folder.folders: delete_folder( folder ) for library_dataset in library_folder.datasets: - library_dataset.refresh() + trans.sa_session.refresh( library_dataset ) ldda = library_dataset.library_dataset_dataset_association if ldda: - ldda.refresh() + trans.sa_session.refresh( ldda ) # We don't set ldda.dataset.deleted to True here because the cleanup_dataset script # will eventually remove it from disk. The purge_library method below sets the dataset # to deleted. This allows for the library to be undeleted ( before it is purged ), @@ -152,7 +153,7 @@ library_dataset.flush() library_folder.deleted = True library_folder.flush() - library.refresh() + trans.sa_session.refresh( library ) delete_folder( library.root_folder ) library.deleted = True library.flush() @@ -163,10 +164,10 @@ # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( library, permissions ) - library.refresh() + trans.sa_session.refresh( library ) # Copy the permissions to the root folder trans.app.security_agent.copy_library_permissions( library, library.root_folder ) msg = "Permissions updated for library '%s'" % library.name @@ -186,9 +187,10 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - libraries=trans.app.model.Library.filter( and_( trans.app.model.Library.table.c.deleted==True, - trans.app.model.Library.table.c.purged==False ) ) \ - .order_by( trans.app.model.Library.table.c.name ).all() + libraries = trans.sa_session.query( trans.app.model.Library ) \ + .filter( and_( trans.app.model.Library.table.c.deleted==True, + trans.app.model.Library.table.c.purged==False ) ) \ + .order_by( trans.app.model.Library.table.c.name ) return trans.fill_template( '/admin/library/browse_libraries.mako', libraries=libraries, deleted=True, @@ -199,18 +201,18 @@ @web.require_admin def purge_library( self, trans, **kwd ): params = util.Params( kwd ) - library = trans.app.model.Library.get( int( params.obj_id ) ) + library = trans.sa_session.query( trans.app.model.Library ).get( int( params.obj_id ) ) def purge_folder( library_folder ): for lf in library_folder.folders: purge_folder( lf ) - library_folder.refresh() + trans.sa_session.refresh( library_folder ) for library_dataset in library_folder.datasets: - library_dataset.refresh() + trans.sa_session.refresh( library_dataset ) ldda = library_dataset.library_dataset_dataset_association if ldda: - ldda.refresh() + trans.sa_session.refresh( ldda ) dataset = ldda.dataset - dataset.refresh() + trans.sa_session.refresh( dataset ) # If the dataset is not associated with any additional undeleted folders, then we can delete it. # We don't set dataset.purged to True here because the cleanup_datasets script will do that for # us, as well as removing the file from disk. @@ -254,7 +256,7 @@ else: # 'information' will be the default action = 'information' - folder = trans.app.model.LibraryFolder.get( int( obj_id ) ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( obj_id ) ) if not folder: msg = "Invalid folder specified, id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -331,10 +333,10 @@ # The user clicked the Save button on the 'Associate With Roles' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( int( x ) ) for x in util.listify( params.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles trans.app.security_agent.set_all_library_permissions( folder, permissions ) - folder.refresh() + trans.sa_session.refresh( folder ) msg = "Permissions updated for folder '%s'" % folder.name return trans.response.send_redirect( web.url_for( controller='library_admin', action='folder', @@ -358,7 +360,7 @@ action = 'permissions' else: action = 'information' - library_dataset = trans.app.model.LibraryDataset.get( obj_id ) + library_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( obj_id ) if not library_dataset: msg = "Invalid library dataset specified, id: %s" %str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -390,15 +392,15 @@ # The user clicked the Save button on the 'Edit permissions and role associations' form permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( library_dataset, permissions ) - library_dataset.refresh() + trans.sa_session.refresh( library_dataset ) # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( library_dataset.library_dataset_dataset_association, permissions ) - library_dataset.library_dataset_dataset_association.refresh() + trans.sa_session.refresh( library_dataset.library_dataset_dataset_association ) msg = 'Permissions and roles have been updated for library dataset %s' % library_dataset.name return trans.fill_template( '/admin/library/library_dataset_permissions.mako', library_dataset=library_dataset, @@ -411,7 +413,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if not ldda: msg = "Invalid LibraryDatasetDatasetAssociation specified, obj_id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -528,7 +530,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) ) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if not ldda: msg = "Invalid LibraryDatasetDatasetAssociation specified, obj_id: %s" % str( obj_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -555,7 +557,7 @@ # Display permission form, permissions will be updated for all lddas simultaneously. lddas = [] for obj_id in [ int( obj_id ) for obj_id in obj_ids ]: - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if ldda is None: msg = 'You specified an invalid LibraryDatasetDatasetAssociation obj_id: %s' %str( obj_id ) trans.response.send_redirect( web.url_for( controller='library_admin', @@ -568,7 +570,7 @@ permissions = {} accessible = False for k, v in trans.app.model.Dataset.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( params.get( k + '_in', [] ) ) ] # At least 1 user must have every role associated with this dataset, or the dataset is inaccessible if v == trans.app.security_agent.permitted_actions.DATASET_ACCESS: if len( in_roles ) > 1: @@ -599,19 +601,19 @@ for ldda in lddas: # Set the DATASET permissions on the Dataset trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, permissions ) - ldda.dataset.refresh() + trans.sa_session.refresh( ldda.dataset ) permissions = {} for k, v in trans.app.model.Library.permitted_actions.items(): - in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] + in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ] permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles for ldda in lddas: # Set the LIBRARY permissions on the LibraryDataset # NOTE: the LibraryDataset and LibraryDatasetDatasetAssociation will be set with the same permissions trans.app.security_agent.set_all_library_permissions( ldda.library_dataset, permissions ) - ldda.library_dataset.refresh() + trans.sa_session.refresh( ldda.library_dataset ) # Set the LIBRARY permissions on the LibraryDatasetDatasetAssociation trans.app.security_agent.set_all_library_permissions( ldda, permissions ) - ldda.refresh() + trans.sa_session.refresh( ldda ) if not accessible: msg = "At least 1 user must have every role associated with accessing these %d datasets. " % len( lddas ) msg += "The roles you attempted to associate for access would make these datasets inaccessible by everyone, " @@ -666,12 +668,12 @@ last_used_build = dbkey[0] else: last_used_build = dbkey - folder = trans.app.model.LibraryFolder.get( folder_id ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder_id ) if folder and last_used_build in [ 'None', None, '?' ]: last_used_build = folder.genome_build replace_id = params.get( 'replace_id', None ) if replace_id not in [ None, 'None' ]: - replace_dataset = trans.app.model.LibraryDataset.get( int( replace_id ) ) + replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( int( replace_id ) ) if not last_used_build: last_used_build = replace_dataset.library_dataset_dataset_association.dbkey # Don't allow multiple datasets to be uploaded when replacing a dataset with a new version @@ -729,10 +731,12 @@ yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. - roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() + roles = trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ) # Send the current history to the form to enable importing datasets from history to library history = trans.get_history() - history.refresh() + trans.sa_session.refresh( history ) # If we're using nginx upload, override the form action action = web.url_for( controller='library_admin', action='upload_library_dataset' ) if upload_option == 'upload_file' and trans.app.config.nginx_upload_path: @@ -758,7 +762,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - folder = trans.app.model.LibraryFolder.get( int( folder_id ) ) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( folder_id ) ) except: msg = "Invalid folder id: %s" % str( folder_id ) return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -768,12 +772,12 @@ messagetype='error' ) ) replace_id = params.get( 'replace_id', None ) if replace_id: - replace_dataset = trans.app.model.LibraryDataset.get( replace_id ) + replace_dataset = trans.sa_session.query( trans.app.model.LibraryDataset ).get( replace_id ) else: replace_dataset = None # See if the current history is empty history = trans.get_history() - history.refresh() + trans.sa_session.refresh( history ) if not history.active_datasets: msg = 'Your current history is empty' return trans.response.send_redirect( web.url_for( controller='library_admin', @@ -787,7 +791,7 @@ dataset_names = [] created_ldda_ids = '' for hda_id in hda_ids: - hda = trans.app.model.HistoryDatasetAssociation.get( hda_id ) + hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( hda_id ) if hda: ldda = hda.to_library_dataset_dataset_association( target_folder=folder, replace_dataset=replace_dataset ) created_ldda_ids = '%s,%s' % ( created_ldda_ids, str( ldda.id ) ) @@ -838,7 +842,9 @@ yield build_name, dbkey, ( dbkey==last_used_build ) dbkeys = get_dbkey_options( last_used_build ) # Send list of roles to the form so the dataset can be associated with 1 or more of them. - roles = trans.app.model.Role.filter( trans.app.model.Role.table.c.deleted==False ).order_by( trans.app.model.Role.c.name ).all() + roles = trans.sa_session.query( trans.app.model.Role ) \ + .filter( trans.app.model.Role.table.c.deleted==False ) \ + .order_by( trans.app.model.Role.table.c.name ) return trans.fill_template( "/admin/library/upload.mako", upload_option=upload_option, library_id=library_id, @@ -871,7 +877,7 @@ messagetype='error' ) ) if params.action == 'manage_permissions': # We need the folder containing the LibraryDatasetDatasetAssociation(s) - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( int( ldda_ids[0] ) ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( int( ldda_ids[0] ) ) trans.response.send_redirect( web.url_for( controller='library_admin', action='ldda_manage_permissions', library_id=library_id, @@ -881,7 +887,7 @@ messagetype=messagetype ) ) elif params.action == 'delete': for ldda_id in ldda_ids: - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) ldda.deleted = True ldda.flush() msg = "The selected datasets have been removed from this data library" @@ -918,7 +924,7 @@ library_item_desc = 'Dataset' else: library_item_desc = library_item_type.capitalize() - library_item = library_item_types[ library_item_type ].get( int( library_item_id ) ) + library_item = trans.sa_session.query( library_item_types[ library_item_type ] ).get( int( library_item_id ) ) library_item.deleted = True library_item.flush() msg = util.sanitize_text( "%s '%s' has been marked deleted" % ( library_item_desc, library_item.name ) ) @@ -945,7 +951,7 @@ library_item_desc = 'Dataset' else: library_item_desc = library_item_type.capitalize() - library_item = library_item_types[ library_item_type ].get( int( library_item_id ) ) + library_item = trans.sa_session.query( library_item_types[ library_item_type ] ).get( int( library_item_id ) ) if library_item.purged: msg = '%s %s has been purged, so it cannot be undeleted' % ( library_item_desc, library_item.name ) messagetype = 'error' diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/library_common.py --- a/lib/galaxy/web/controllers/library_common.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/library_common.py Thu Oct 22 23:02:28 2009 -0400 @@ -22,7 +22,7 @@ ids = map( int, ids.split( "," ) ) states = states.split( "," ) for id, state in zip( ids, states ): - data = self.app.model.LibraryDatasetDatasetAssociation.get( id ) + data = trans.sa_session.query( self.app.model.LibraryDatasetDatasetAssociation ).get( id ) if data.state != state: job_ldda = data while job_ldda.copied_from_library_dataset_dataset_association: @@ -184,7 +184,7 @@ def download_dataset_from_folder( self, trans, cntrller, obj_id, library_id=None, **kwd ): """Catches the dataset id and displays file contents as directed""" # id must refer to a LibraryDatasetDatasetAssociation object - ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( obj_id ) + ldda = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( obj_id ) if not ldda.dataset: msg = 'Invalid LibraryDatasetDatasetAssociation id %s received for file downlaod' % str( obj_id ) return trans.response.send_redirect( web.url_for( controller=cntrller, @@ -218,19 +218,19 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) if obj_id: - library_item = trans.app.model.FormDefinition.get( int( obj_id ) ) + library_item = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( obj_id ) ) library_item_desc = 'information template' response_id = obj_id elif folder_id: - library_item = trans.app.model.LibraryFolder.get( int( folder_id ) ) + library_item = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( folder_id ) ) library_item_desc = 'folder' response_id = folder_id elif ldda_id: - library_item = trans.app.model.LibraryDatasetDatasetAssociation.get( int( ldda_id ) ) + library_item = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( int( ldda_id ) ) library_item_desc = 'library dataset' response_id = ldda_id else: - library_item = trans.app.model.Library.get( int( library_id ) ) + library_item = trans.sa_session.query( trans.app.model.Library ).get( int( library_id ) ) library_item_desc = 'library' response_id = library_id forms = get_all_forms( trans, @@ -244,7 +244,7 @@ msg=msg, messagetype='done' ) ) if params.get( 'add_info_template_button', False ): - form = trans.app.model.FormDefinition.get( int( kwd[ 'form_id' ] ) ) + form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( kwd[ 'form_id' ] ) ) #fields = list( copy.deepcopy( form.fields ) ) form_values = trans.app.model.FormValues( form, [] ) form_values.flush() @@ -280,13 +280,13 @@ messagetype = params.get( 'messagetype', 'done' ) folder_id = None if library_item_type == 'library': - library_item = trans.app.model.Library.get( library_item_id ) + library_item = trans.sa_session.query( trans.app.model.Library ).get( library_item_id ) elif library_item_type == 'library_dataset': - library_item = trans.app.model.LibraryDataset.get( library_item_id ) + library_item = trans.sa_session.query( trans.app.model.LibraryDataset ).get( library_item_id ) elif library_item_type == 'folder': - library_item = trans.app.model.LibraryFolder.get( library_item_id ) + library_item = trans.sa_session.query( trans.app.model.LibraryFolder ).get( library_item_id ) elif library_item_type == 'library_dataset_dataset_association': - library_item = trans.app.model.LibraryDatasetDatasetAssociation.get( library_item_id ) + library_item = trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get( library_item_id ) # This response_action method requires a folder_id folder_id = library_item.library_dataset.folder.id else: @@ -310,7 +310,7 @@ if info_association: template = info_association.template info = info_association.info - form_values = trans.app.model.FormValues.get( info.id ) + form_values = trans.sa_session.query( trans.app.model.FormValues ).get( info.id ) # Update existing content only if it has changed if form_values.content != field_contents: form_values.content = field_contents diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/mobile.py --- a/lib/galaxy/web/controllers/mobile.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/mobile.py Thu Oct 22 23:02:28 2009 -0400 @@ -11,19 +11,19 @@ @web.expose def history_detail( self, trans, id ): - history = trans.app.model.History.get( id ) + history = trans.sa_session.query( trans.app.model.History ).get( id ) assert history.user == trans.user return trans.fill_template( "mobile/history/detail.mako", history=history ) @web.expose def dataset_detail( self, trans, id ): - dataset = trans.app.model.HistoryDatasetAssociation.get( id ) + dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id ) assert dataset.history.user == trans.user return trans.fill_template( "mobile/dataset/detail.mako", dataset=dataset ) @web.expose def dataset_peek( self, trans, id ): - dataset = trans.app.model.HistoryDatasetAssociation.get( id ) + dataset = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id ) assert dataset.history.user == trans.user return trans.fill_template( "mobile/dataset/peek.mako", dataset=dataset ) @@ -45,7 +45,7 @@ def __login( self, trans, email="", password="" ): error = password_error = None - user = model.User.filter( model.User.table.c.email==email ).first() + user = trans.sa_session.query( model.User ).filter_by( email = email ).first() if not user: error = "No such user" elif user.deleted: diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/page.py --- a/lib/galaxy/web/controllers/page.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/page.py Thu Oct 22 23:02:28 2009 -0400 @@ -65,7 +65,7 @@ page_slug_err = "Page id is required" elif not VALID_SLUG_RE.match( page_slug ): page_slug_err = "Page identifier must consist of only lowercase letters, numbers, and the '-' character" - elif model.Page.filter_by( user=user, slug=page_slug ).first(): + elif trans.sa_session.query( model.Page ).filter_by( user=user, slug=page_slug ).first(): page_slug_err = "Page id must be unique" else: # Create the new stored workflow diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/requests.py --- a/lib/galaxy/web/controllers/requests.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/requests.py Thu Oct 22 23:02:28 2009 -0400 @@ -108,7 +108,7 @@ def __show_request(self, trans, id, add_sample=False): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: return trans.response.send_redirect( web.url_for( controller='requests', action='list', @@ -130,7 +130,7 @@ ''' Shows the request details ''' - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) # list of widgets to be rendered on the request form request_details = [] # main details @@ -171,7 +171,7 @@ if field['type'] == 'AddressField': if request.values.content[index]: request_details.append(dict(label=field['label'], - value=trans.app.model.UserAddress.get(int(request.values.content[index])).get_html(), + value=trans.sa_session.query( trans.app.model.UserAddress ).get( int( request.values.content[index] ) ).get_html(), helptext=field['helptext']+' ('+req+')')) else: request_details.append(dict(label=field['label'], @@ -220,7 +220,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request = trans.app.model.Request.get(int(params.get('request_id', None))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='requests', action='list', @@ -313,7 +313,7 @@ sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) )) sample = request.has_sample(sample_name) if sample: - form_values = trans.app.model.FormValues.get(sample.values.id) + form_values = trans.sa_session.query( trans.app.model.FormValues ).get( sample.values.id ) form_values.content = sample_values form_values.flush() sample.name = new_sample_name @@ -344,13 +344,13 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - request = trans.app.model.Request.get(int(params.get('request_id', 0))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', 0 ) ) ) current_samples, details, edit_mode = self.__update_samples( request, **kwd ) sample_index = int(params.get('sample_id', 0)) sample_name = current_samples[sample_index][0] s = request.has_sample(sample_name) if s: - s.delete() + trans.sa_session.delete( s ) s.flush() request.flush() del current_samples[sample_index] @@ -368,7 +368,8 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - request = trans.app.model.Request.get(int(params.get('request_id', 0))) + # TODO: Fix the following - can we get a Request.id == 0??? + request = trans.sa_session.query( trans.app.model.Request ).get(int(params.get('request_id', 0))) current_samples, details, edit_mode = self.__update_samples( request, **kwd ) return trans.fill_template( '/requests/show_request.mako', request=request, @@ -379,7 +380,7 @@ edit_mode=edit_mode) def __select_request_type(self, trans, rtid): rt_ids = ['none'] - for rt in trans.app.model.RequestType.query().all(): + for rt in trans.sa_session.query( trans.app.model.RequestType ): if not rt.deleted: rt_ids.append(str(rt.id)) select_reqtype = SelectField('select_request_type', @@ -389,7 +390,7 @@ select_reqtype.add_option('Select one', 'none', selected=True) else: select_reqtype.add_option('Select one', 'none') - for rt in trans.app.model.RequestType.query().all(): + for rt in trans.sa_session.query( trans.app.model.RequestType ): if not rt.deleted: if rtid == rt.id: select_reqtype.add_option(rt.name, rt.id, selected=True) @@ -411,7 +412,7 @@ elif params.get('create', False) == 'True': if params.get('create_request_button', False) == 'Save' \ or params.get('create_request_samples_button', False) == 'Add samples': - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) if not util.restore_text(params.get('name', '')): msg = 'Please enter the <b>Name</b> of the request' kwd['create'] = 'True' @@ -448,7 +449,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) except: return trans.fill_template( '/requests/new_request.mako', select_request_type=self.__select_request_type(trans, 'none'), @@ -496,8 +497,9 @@ lib_id = str(request.library.id) selected_lib = request.library # get all permitted libraries for this user - all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \ - .order_by( trans.app.model.Library.name ).all() + all_libraries = trans.sa_session.query( trans.app.model.Library ) \ + .filter( trans.app.model.Library.table.c.deleted == False ) \ + .order_by( trans.app.model.Library.name ) user, roles = trans.get_user_and_roles() actions_to_check = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ] libraries = odict() @@ -600,16 +602,16 @@ This method saves a new request if request_id is None. ''' params = util.Params( kwd ) - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) name = util.restore_text(params.get('name', '')) desc = util.restore_text(params.get('desc', '')) # library try: - library = trans.app.model.Library.get(int(params.get('library_id', None))) + library = trans.sa_session.query( trans.app.model.Library ).get( int( params.get( 'library_id', None ) ) ) except: library = None try: - folder = trans.app.model.LibraryFolder.get(int(params.get('folder_id', None))) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( params.get( 'folder_id', None ) ) ) except: if library: folder = library.root_folder @@ -633,7 +635,7 @@ user_address.country = util.restore_text(params.get('field_%i_country' % index, '')) user_address.phone = util.restore_text(params.get('field_%i_phone' % index, '')) user_address.flush() - trans.user.refresh() + trans.sa_session.refresh( trans.user ) values.append(int(user_address.id)) elif value == unicode('none'): values.append('') @@ -667,7 +669,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request = trans.app.model.Request.get(int(params.get('request_id', None))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='requests', action='list', @@ -678,7 +680,7 @@ return self.__edit_request(trans, request.id, **kwd) elif params.get('save_changes_request_button', False) == 'Save changes' \ or params.get('edit_samples_button', False) == 'Edit samples': - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) if not util.restore_text(params.get('name', '')): msg = 'Please enter the <b>Name</b> of the request' kwd['messagetype'] = 'error' @@ -708,7 +710,7 @@ def __edit_request(self, trans, id, **kwd): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -750,7 +752,7 @@ return self.__show_request_form(trans) def __delete_request(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -777,7 +779,7 @@ **kwd) ) def __undelete_request(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -798,7 +800,7 @@ **kwd) ) def __submit(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -837,7 +839,7 @@ params = util.Params( kwd ) try: id = int(params.get('id', False)) - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -876,7 +878,7 @@ params = util.Params( kwd ) try: sample_id = int(params.get('sample_id', False)) - sample = trans.app.model.Sample.get(sample_id) + sample = trans.sa_session.query( trans.app.model.Sample ).get( sample_id ) except: msg = "Invalid sample ID" return trans.response.send_redirect( web.url_for( controller='requests', @@ -897,7 +899,3 @@ events_list=events_list, sample_name=sample.name, request=sample.request) - - - - diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/requests_admin.py --- a/lib/galaxy/web/controllers/requests_admin.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/requests_admin.py Thu Oct 22 23:02:28 2009 -0400 @@ -50,11 +50,11 @@ grids.GridColumnFilter( "All", args=dict( deleted=False ) ) ] def get_user(self, trans, request): - return trans.app.model.User.get(request.user_id).email + return trans.sa_session.query( trans.app.model.User ).get( request.user_id ).email def get_current_item( self, trans ): return None def get_request_type(self, trans, request): - request_type = trans.app.model.RequestType.get(request.request_type_id) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( request.request_type_id ) return request_type.name def number_of_samples(self, trans, request): return str(len(request.samples)) @@ -116,7 +116,7 @@ return self.request_grid( trans, **kwargs ) def __show_request(self, trans, id, messagetype, msg): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: return trans.response.send_redirect( web.url_for( controller='requests_admin', action='list', @@ -136,7 +136,7 @@ def __edit_request(self, trans, id, **kwd): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -178,7 +178,7 @@ return self.__show_request_form(trans) def __delete_request(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -206,7 +206,7 @@ **kwd) ) def __undelete_request(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -228,7 +228,7 @@ **kwd) ) def __submit(self, trans, id): try: - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -267,7 +267,7 @@ # def __select_request_type(self, trans, rtid): rt_ids = ['none'] - for rt in trans.app.model.RequestType.query().all(): + for rt in trans.sa_session.query( trans.app.model.RequestType ): if not rt.deleted: rt_ids.append(str(rt.id)) select_reqtype = SelectField('select_request_type', @@ -277,7 +277,7 @@ select_reqtype.add_option('Select one', 'none', selected=True) else: select_reqtype.add_option('Select one', 'none') - for rt in trans.app.model.RequestType.query().all(): + for rt in trans.sa_session.query( trans.app.model.RequestType ): if not rt.deleted: if rtid == rt.id: select_reqtype.add_option(rt.name, rt.id, selected=True) @@ -299,7 +299,7 @@ elif params.get('create', False) == 'True': if params.get('create_request_button', False) == 'Save' \ or params.get('create_request_samples_button', False) == 'Add samples': - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) if not util.restore_text(params.get('name', '')) \ or util.restore_text(params.get('select_user', '')) == unicode('none'): msg = 'Please enter the <b>Name</b> of the request and the <b>user</b> on behalf of whom this request will be submitted before saving this request' @@ -338,7 +338,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) except: return trans.fill_template( '/admin/requests/new_request.mako', select_request_type=self.__select_request_type(trans, 'none'), @@ -350,7 +350,7 @@ # user user_id = params.get( 'select_user', 'none' ) try: - user = trans.app.model.User.get(int(user_id)) + user = trans.sa_session.query( trans.app.model.User ).get( int( user_id ) ) except: user = None # list of widgets to be rendered on the request form @@ -378,7 +378,7 @@ messagetype=messagetype) def __select_user(self, trans, userid): user_ids = ['none'] - for user in trans.app.model.User.query().all(): + for user in trans.sa_session.query( trans.app.model.User ): if not user.deleted: user_ids.append(str(user.id)) select_user = SelectField('select_user', @@ -390,7 +390,7 @@ select_user.add_option('Select one', 'none') def __get_email(user): return user.email - user_list = trans.app.model.User.query().all() + user_list = trans.sa_session.query( trans.app.model.User ) user_list.sort(key=__get_email) for user in user_list: if not user.deleted: @@ -423,8 +423,9 @@ libraries = {} else: # get all permitted libraries for this user - all_libraries = trans.app.model.Library.filter( trans.app.model.Library.table.c.deleted == False ) \ - .order_by( trans.app.model.Library.name ).all() + all_libraries = trans.sa_session.query( trans.app.model.Library ) \ + .filter( trans.app.model.Library.table.c.deleted == False ) \ + .order_by( trans.app.model.Library.name ) roles = user.all_roles() actions_to_check = [ trans.app.security_agent.permitted_actions.LIBRARY_ADD ] # The libraries dictionary looks like: { library : '1,2' }, library : '3' } @@ -539,20 +540,20 @@ This method saves a new request if request_id is None. ''' params = util.Params( kwd ) - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) if request: user = request.user else: - user = trans.app.model.User.get(int(params.get('select_user', ''))) + user = trans.sa_session.query( trans.app.model.User ).get( int( params.get( 'select_user', '' ) ) ) name = util.restore_text(params.get('name', '')) desc = util.restore_text(params.get('desc', '')) # library try: - library = trans.app.model.Library.get(int(params.get('library_id', None))) + library = trans.sa_session.query( trans.app.model.Library ).get( int( params.get( 'library_id', None ) ) ) except: library = None try: - folder = trans.app.model.LibraryFolder.get(int(params.get('folder_id', None))) + folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( int( params.get( 'folder_id', None ) ) ) except: if library: folder = library.root_folder @@ -576,7 +577,7 @@ user_address.country = util.restore_text(params.get('field_%i_country' % index, '')) user_address.phone = util.restore_text(params.get('field_%i_phone' % index, '')) user_address.flush() - trans.user.refresh() + trans.sa_session.refresh( trans.user ) values.append(int(user_address.id)) elif value == unicode('none'): values.append('') @@ -614,7 +615,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request = trans.app.model.Request.get(int(params.get('request_id', None))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='requests_admin', action='list', @@ -625,7 +626,7 @@ return self.__edit_request(trans, request.id, **kwd) elif params.get('save_changes_request_button', False) == 'Save changes' \ or params.get('edit_samples_button', False) == 'Edit samples': - request_type = trans.app.model.RequestType.get(int(params.select_request_type)) + request_type = trans.sa_session.query( trans.app.model.RequestType ).get( int( params.select_request_type ) ) if not util.restore_text(params.get('name', '')): msg = 'Please enter the <b>Name</b> of the request' kwd['messagetype'] = 'error' @@ -658,7 +659,7 @@ params = util.Params( kwd ) try: id = int(params.get('id', False)) - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) except: msg = "Invalid request ID" log.warn( msg ) @@ -727,7 +728,7 @@ msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) try: - request = trans.app.model.Request.get(int(params.get('request_id', None))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='requests_admin', action='list', @@ -820,7 +821,7 @@ sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) )) sample = request.has_sample(sample_name) if sample: - form_values = trans.app.model.FormValues.get(sample.values.id) + form_values = trans.sa_session.query( trans.app.model.FormValues ).get( sample.values.id ) form_values.content = sample_values form_values.flush() sample.name = new_sample_name @@ -851,13 +852,13 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - request = trans.app.model.Request.get(int(params.get('request_id', 0))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', 0 ) ) ) current_samples, details, edit_mode = self.__update_samples( request, **kwd ) sample_index = int(params.get('sample_id', 0)) sample_name = current_samples[sample_index][0] s = request.has_sample(sample_name) if s: - s.delete() + trans.sa_session.delete( s ) s.flush() request.flush() del current_samples[sample_index] @@ -875,7 +876,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - request = trans.app.model.Request.get(int(params.get('request_id', 0))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', 0 ) ) ) current_samples, details, edit_mode = self.__update_samples( request, **kwd ) return trans.fill_template( '/admin/requests/show_request.mako', request=request, @@ -890,7 +891,7 @@ ''' Shows the request details ''' - request = trans.app.model.Request.get(id) + request = trans.sa_session.query( trans.app.model.Request ).get( id ) # list of widgets to be rendered on the request form request_details = [] # main details @@ -935,7 +936,7 @@ if field['type'] == 'AddressField': if request.values.content[index]: request_details.append(dict(label=field['label'], - value=trans.app.model.UserAddress.get(int(request.values.content[index])).get_html(), + value=trans.sa_session.query( trans.app.model.UserAddress ).get( int( request.values.content[index] ) ).get_html(), helptext=field['helptext']+' ('+req+')')) else: request_details.append(dict(label=field['label'], @@ -954,7 +955,7 @@ messagetype = params.get( 'messagetype', 'done' ) request_id = params.get( 'request_id', None ) if request_id: - request = trans.app.model.Request.get( int( request_id )) + request = trans.sa_session.query( trans.app.model.Request ).get( int( request_id )) if not request: return trans.response.send_redirect( web.url_for( controller='requests_admin', action='list', @@ -980,7 +981,7 @@ def save_bar_codes(self, trans, **kwd): params = util.Params( kwd ) try: - request = trans.app.model.Request.get(int(params.get('request_id', None))) + request = trans.sa_session.query( trans.app.model.Request ).get( int( params.get( 'request_id', None ) ) ) except: return trans.response.send_redirect( web.url_for( controller='requests_admin', action='list', @@ -1008,7 +1009,7 @@ (bar_code, request.samples[index].name) break # check all the saved bar codes - all_samples = trans.app.model.Sample.query.all() + all_samples = trans.sa_session.query( trans.app.model.Sample ) for sample in all_samples: if bar_code == sample.bar_code: msg = '''The bar code <b>%s</b> of sample <b>%s</b> already @@ -1077,7 +1078,7 @@ params = util.Params( kwd ) try: sample_id = int(params.get('sample_id', False)) - sample = trans.app.model.Sample.get(sample_id) + sample = trans.sa_session.query( trans.app.model.Sample ).get( sample_id ) except: msg = "Invalid sample ID" return trans.response.send_redirect( web.url_for( controller='requests_admin', @@ -1087,8 +1088,10 @@ **kwd) ) comments = util.restore_text( params.comment ) selected_state = int( params.select_state ) - new_state = trans.app.model.SampleState.filter(trans.app.model.SampleState.table.c.request_type_id == sample.request.type.id - and trans.app.model.SampleState.table.c.id == selected_state)[0] + new_state = trans.sa_session.query( trans.app.model.SampleState ) \ + .filter( and_( trans.app.model.SampleState.table.c.request_type_id == sample.request.type.id, + trans.app.model.SampleState.table.c.id == selected_state ) ) \ + .first() event = trans.app.model.SampleEvent(sample, new_state, comments) event.flush() self.__set_request_state(sample.request) @@ -1101,7 +1104,7 @@ params = util.Params( kwd ) try: sample_id = int(params.get('sample_id', False)) - sample = trans.app.model.Sample.get(sample_id) + sample = trans.sa_session.query( trans.app.model.Sample ).get( sample_id ) except: msg = "Invalid sample ID" return trans.response.send_redirect( web.url_for( controller='requests_admin', @@ -1134,7 +1137,7 @@ messagetype = params.get( 'messagetype', 'done' ) show_filter = util.restore_text( params.get( 'show_filter', 'Active' ) ) forms = get_all_forms(trans, all_versions=True) - request_types_list = trans.app.model.RequestType.query().all() + request_types_list = trans.sa_session.query( trans.app.model.RequestType ) if show_filter == 'All': request_types = request_types_list elif show_filter == 'Deleted': @@ -1184,8 +1187,8 @@ msg='Request type <b>%s</b> has been created' % st.name, messagetype='done') ) elif params.get('view', False): - rt = trans.app.model.RequestType.get(int(util.restore_text( params.id ))) - ss_list = trans.app.model.SampleState.filter(trans.app.model.SampleState.table.c.request_type_id == rt.id).all() + rt = trans.sa_session.query( trans.app.model.RequestType ).get( int( util.restore_text( params.id ) ) ) + ss_list = trans.sa_session.query( trans.app.model.SampleState ).filter( trans.app.model.SampleState.table.c.request_type_id == rt.id ) return trans.fill_template( '/admin/requests/view_request_type.mako', request_type=rt, forms=get_all_forms( trans ), @@ -1208,13 +1211,13 @@ rt = trans.app.model.RequestType() rt.name = util.restore_text( params.name ) rt.desc = util.restore_text( params.description ) or "" - rt.request_form = trans.app.model.FormDefinition.get(int( params.request_form_id )) - rt.sample_form = trans.app.model.FormDefinition.get(int( params.sample_form_id )) + rt.request_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.request_form_id ) ) + rt.sample_form = trans.sa_session.query( trans.app.model.FormDefinition ).get( int( params.sample_form_id ) ) rt.flush() # set sample states - ss_list = trans.app.model.SampleState.filter(trans.app.model.SampleState.table.c.request_type_id == rt.id).all() + ss_list = trans.sa_session.query( trans.app.model.SampleState ).filter( trans.app.model.SampleState.table.c.request_type_id == rt.id ) for ss in ss_list: - ss.delete() + trans.sa_session.delete( ss ) ss.flush() for i in range( num_states ): name = util.restore_text( params.get( 'state_name_%i' % i, None )) @@ -1229,7 +1232,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - rt = trans.app.model.RequestType.get(int(util.restore_text( params.request_type_id ))) + rt = trans.sa_session.query( trans.app.model.RequestType ).get( int( util.restore_text( params.request_type_id ) ) ) rt.deleted = True rt.flush() return trans.response.send_redirect( web.url_for( controller='requests_admin', @@ -1242,7 +1245,7 @@ params = util.Params( kwd ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) - rt = trans.app.model.RequestType.get(int(util.restore_text( params.request_type_id ))) + rt = trans.sa_session.query( trans.app.model.RequestType ).get( int( util.restore_text( params.request_type_id ) ) ) rt.deleted = False rt.flush() return trans.response.send_redirect( web.url_for( controller='requests_admin', diff -r 20b319780138 -r 2c4ed83f76ef lib/galaxy/web/controllers/root.py --- a/lib/galaxy/web/controllers/root.py Wed Oct 21 23:15:22 2009 -0400 +++ b/lib/galaxy/web/controllers/root.py Thu Oct 22 23:02:28 2009 -0400 @@ -85,7 +85,7 @@ def dataset_state ( self, trans, id=None, stamp=None ): if id is not None: try: - data = self.app.model.HistoryDatasetAssociation.get( id ) + data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) except: return trans.show_error_message( "Unable to check dataset %s." %str( id ) ) trans.response.headers['X-Dataset-State'] = data.state @@ -99,7 +99,7 @@ def dataset_code( self, trans, id=None, hid=None, stamp=None ): if id is not None: try: - data = self.app.model.HistoryDatasetAssociation.get( id ) + data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) except: return trans.show_error_message( "Unable to check dataset %s." %str( id ) ) trans.response.headers['Pragma'] = 'no-cache' @@ -119,7 +119,7 @@ ids = map( int, ids.split( "," ) ) states = states.split( "," ) for id, state in zip( ids, states ): - data = self.app.model.HistoryDatasetAssociation.get( id ) + data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id ) if data.state != state: job_hda = data while job_hda.copied_from_history_dataset_association: