[hg] galaxy 2939: Bug fix for cleanup_datasets.py - use app.sa_s...
details: http://www.bx.psu.edu/hg/galaxy/rev/6e8263ea83fa changeset: 2939:6e8263ea83fa user: Greg Von Kuster <greg@bx.psu.edu> date: Mon Nov 02 09:54:18 2009 -0500 description: Bug fix for cleanup_datasets.py - use app.sa_session instead of self.sa_session. diffstat: scripts/cleanup_datasets/cleanup_datasets.py | 90 +++++++++++++++--------------- 1 files changed, 45 insertions(+), 45 deletions(-) diffs (156 lines): diff -r 3aa07fc9512a -r 6e8263ea83fa scripts/cleanup_datasets/cleanup_datasets.py --- a/scripts/cleanup_datasets/cleanup_datasets.py Fri Oct 30 21:55:17 2009 -0400 +++ b/scripts/cleanup_datasets/cleanup_datasets.py Mon Nov 02 09:54:18 2009 -0500 @@ -93,14 +93,14 @@ history_count = 0 start = time.time() if force_retry: - histories = self.sa_session.query( app.model.History ) \ - .filter( and_( app.model.History.table.c.user_id==None, - app.model.History.table.c.update_time < cutoff_time ) ) + histories = app.sa_session.query( app.model.History ) \ + .filter( and_( app.model.History.table.c.user_id==None, + app.model.History.table.c.update_time < cutoff_time ) ) else: - histories = self.sa_session.query( app.model.History ) \ - .filter( and_( app.model.History.table.c.user_id==None, - app.model.History.table.c.deleted==False, - app.model.History.table.c.update_time < cutoff_time ) ) + histories = app.sa_session.query( app.model.History ) \ + .filter( and_( app.model.History.table.c.user_id==None, + app.model.History.table.c.deleted==False, + app.model.History.table.c.update_time < cutoff_time ) ) for history in histories: if not info_only: print "Deleting history id ", history.id @@ -121,16 +121,16 @@ history_count = 0 start = time.time() if force_retry: - histories = self.sa_session.query( app.model.History ) \ - .filter( and_( app.model.History.table.c.deleted==True, - app.model.History.table.c.update_time < cutoff_time ) ) \ - .options( eagerload( 'datasets' ) ) + histories = app.sa_session.query( app.model.History ) \ + .filter( and_( app.model.History.table.c.deleted==True, + app.model.History.table.c.update_time < cutoff_time ) ) \ + .options( eagerload( 'datasets' ) ) else: - histories = self.sa_session.query( app.model.History ) \ - .filter( and_( app.model.History.table.c.deleted==True, - app.model.History.table.c.purged==False, - app.model.History.table.c.update_time < cutoff_time ) ) \ - .options( eagerload( 'datasets' ) ) + histories = app.sa_session.query( app.model.History ) \ + .filter( and_( app.model.History.table.c.deleted==True, + app.model.History.table.c.purged==False, + app.model.History.table.c.update_time < cutoff_time ) ) \ + .options( eagerload( 'datasets' ) ) for history in histories: for dataset_assoc in history.datasets: _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable @@ -158,14 +158,14 @@ library_count = 0 start = time.time() if force_retry: - libraries = self.sa_session.query( app.model.Library ) \ - .filter( and_( app.model.Library.table.c.deleted==True, - app.model.Library.table.c.update_time < cutoff_time ) ) + libraries = app.sa_session.query( app.model.Library ) \ + .filter( and_( app.model.Library.table.c.deleted==True, + app.model.Library.table.c.update_time < cutoff_time ) ) else: - libraries = self.sa_session.query( app.model.Library ) \ - .filter( and_( app.model.Library.table.c.deleted==True, - app.model.Library.table.c.purged==False, - app.model.Library.table.c.update_time < cutoff_time ) ) + libraries = app.sa_session.query( app.model.Library ) \ + .filter( and_( app.model.Library.table.c.deleted==True, + app.model.Library.table.c.purged==False, + app.model.Library.table.c.update_time < cutoff_time ) ) for library in libraries: _purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only ) if not info_only: @@ -187,14 +187,14 @@ folder_count = 0 start = time.time() if force_retry: - folders = self.sa_session.query( app.model.LibraryFolder ) \ - .filter( and_( app.model.LibraryFolder.table.c.deleted==True, - app.model.LibraryFolder.table.c.update_time < cutoff_time ) ) + folders = app.sa_session.query( app.model.LibraryFolder ) \ + .filter( and_( app.model.LibraryFolder.table.c.deleted==True, + app.model.LibraryFolder.table.c.update_time < cutoff_time ) ) else: - folders = self.sa_session.query( app.model.LibraryFolder ) \ - .filter( and_( app.model.LibraryFolder.table.c.deleted==True, - app.model.LibraryFolder.table.c.purged==False, - app.model.LibraryFolder.table.c.update_time < cutoff_time ) ) + folders = app.sa_session.query( app.model.LibraryFolder ) \ + .filter( and_( app.model.LibraryFolder.table.c.deleted==True, + app.model.LibraryFolder.table.c.purged==False, + app.model.LibraryFolder.table.c.update_time < cutoff_time ) ) for folder in folders: _purge_folder( folder, app, remove_from_disk, info_only = info_only ) folder_count += 1 @@ -241,7 +241,7 @@ deleted_instance_count = 0 for dataset_id in dataset_ids: print "######### Processing dataset id:", dataset_id - dataset = self.sa_session.query( app.model.Dataset ).get( dataset_id ) + dataset = app.sa_session.query( app.model.Dataset ).get( dataset_id ) if dataset.id not in skip and _dataset_is_deletable( dataset ): deleted_dataset_count += 1 for dataset_instance in dataset.history_associations + dataset.library_associations: @@ -261,16 +261,16 @@ disk_space = 0 start = time.time() if force_retry: - datasets = self.sa_session.query( app.model.Dataset ) \ - .filter( and_( app.model.Dataset.table.c.deleted==True, - app.model.Dataset.table.c.purgable==True, - app.model.Dataset.table.c.update_time < cutoff_time ) ) + datasets = app.sa_session.query( app.model.Dataset ) \ + .filter( and_( app.model.Dataset.table.c.deleted==True, + app.model.Dataset.table.c.purgable==True, + app.model.Dataset.table.c.update_time < cutoff_time ) ) else: - datasets = self.sa_session.query( app.model.Dataset ) \ - .filter( and_( app.model.Dataset.table.c.deleted==True, - app.model.Dataset.table.c.purgable==True, - app.model.Dataset.table.c.purged==False, - app.model.Dataset.table.c.update_time < cutoff_time ) ) + datasets = app.sa_session.query( app.model.Dataset ) \ + .filter( and_( app.model.Dataset.table.c.deleted==True, + app.model.Dataset.table.c.purgable==True, + app.model.Dataset.table.c.purged==False, + app.model.Dataset.table.c.update_time < cutoff_time ) ) for dataset in datasets: file_size = dataset.file_size _purge_dataset( dataset, remove_from_disk, info_only = info_only ) @@ -294,7 +294,7 @@ dataset_instance.mark_deleted( include_children = include_children ) dataset_instance.clear_associated_files() dataset_instance.flush() - self.sa_session.refresh( dataset_instance.dataset ) + app.sa_session.refresh( dataset_instance.dataset ) if is_deletable or _dataset_is_deletable( dataset_instance.dataset ): # Calling methods may have already checked _dataset_is_deletable, if so, is_deletable should be True _delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only=info_only, is_deletable=is_deletable ) @@ -317,12 +317,12 @@ metadata_files = [] #lets create a list of metadata files, then perform actions on them for hda in dataset.history_associations: - for metadata_file in self.sa_session.query( app.model.MetadataFile ) \ - .filter( app.model.MetadataFile.table.c.hda_id==hda.id ): + for metadata_file in app.sa_session.query( app.model.MetadataFile ) \ + .filter( app.model.MetadataFile.table.c.hda_id==hda.id ): metadata_files.append( metadata_file ) for lda in dataset.library_associations: - for metadata_file in self.sa_session.query( app.model.MetadataFile ) \ - .filter( app.model.MetadataFile.table.c.lda_id==lda.id ): + for metadata_file in app.sa_session.query( app.model.MetadataFile ) \ + .filter( app.model.MetadataFile.table.c.lda_id==lda.id ): metadata_files.append( metadata_file ) for metadata_file in metadata_files: print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
participants (1)
-
Greg Von Kuster