[hg] galaxy 2448: Merge with main.
details: http://www.bx.psu.edu/hg/galaxy/rev/c6031c4e6546 changeset: 2448:c6031c4e6546 user: ianschenck@Thugunit.local date: Thu Apr 23 15:11:29 2009 -0400 description: Merge with main. 0 file(s) affected in this change: diffs (1069 lines): diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py Thu Apr 23 14:42:35 2009 -0400 +++ b/lib/galaxy/model/__init__.py Thu Apr 23 15:11:29 2009 -0400 @@ -229,7 +229,7 @@ return des @property def activatable_datasets( self ): - return [ hda for hda in self.datasets if not hda.dataset.purged ] #this needs to be a list + return [ hda for hda in self.datasets if not hda.dataset.deleted ] #this needs to be a list class UserRoleAssociation( object ): def __init__( self, user, role ): @@ -707,6 +707,12 @@ @property def active_components( self ): return list( self.active_folders ) + list( self.active_datasets ) + @property + def activatable_datasets( self ): + return [ ld for ld in self.datasets if not ld.library_dataset_dataset_association.dataset.deleted ] #this needs to be a list + @property #make this a relation + def activatable_folders( self ): + return [ folder for folder in self.folders if not folder.purged ] #this needs to be a list class LibraryDataset( object ): # This class acts as a proxy to the currently selected LDDA @@ -743,6 +749,14 @@ name = property( get_name, set_name ) def display_name( self ): self.library_dataset_dataset_association.display_name() + def get_purged( self ): + return self.library_dataset_dataset_association.dataset.purged + def set_purged( self, purged ): + if purged: + raise Exception( "Not implemented" ) + if not purged and self.purged: + raise Exception( "Cannot unpurge once purged" ) + purged = property( get_purged, set_purged ) def get_library_item_info_templates( self, template_list=[], restrict=False ): # If restrict is True, we'll return only those templates directly associated with this LibraryDataset if self.library_dataset_info_template_associations: @@ -750,7 +764,7 @@ if restrict not in [ 'True', True ]: self.folder.get_library_item_info_templates( template_list, restrict ) return template_list - + class LibraryDatasetDatasetAssociation( DatasetInstance ): def __init__( self, copied_from_history_dataset_association=None, diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/lib/galaxy/model/migrate/versions/0005_cleanup_datasets_fix.py Thu Apr 23 15:11:29 2009 -0400 @@ -0,0 +1,74 @@ +import sys, logging, os, time + +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +from migrate import migrate_engine +from sqlalchemy import and_ + +# load existing galaxy model, we are only changing data +import galaxy.model +from galaxy.model import mapping +model = mapping.init( galaxy.model.Dataset.file_path, str( migrate_engine.url ) ) + +def __guess_dataset_by_filename( filename ): + """Return a guessed dataset by filename""" + try: + fields = os.path.split( filename ) + if fields: + if fields[-1].startswith( 'dataset_' ) and fields[-1].endswith( '.dat' ): #dataset_%d.dat + return model.Dataset.get( int( fields[-1][ len( 'dataset_' ): -len( '.dat' ) ] ) ) + except: + pass #some parsing error, we can't guess Dataset + return None + +def upgrade(): + log.debug( "Fixing a discrepancy concerning deleted shared history items." ) + affected_items = 0 + start_time = time.time() + for dataset in model.Dataset.filter( and_( model.Dataset.c.deleted == True, model.Dataset.c.purged == False ) ).all(): + for dataset_instance in dataset.history_associations + dataset.library_associations: + if not dataset_instance.deleted: + dataset.deleted = False + if dataset.file_size in [ None, 0 ]: + dataset.set_size() #Restore filesize + affected_items += 1 + break + galaxy.model.mapping.Session.flush() + log.debug( "%i items affected, and restored." % ( affected_items ) ) + log.debug( "Time elapsed: %s" % ( time.time() - start_time ) ) + + #fix share before hda + log.debug( "Fixing a discrepancy concerning cleaning up deleted history items shared before HDAs." ) + dataset_by_filename = {} + changed_associations = 0 + start_time = time.time() + for dataset in model.Dataset.filter( model.Dataset.external_filename.like( '%dataset_%.dat' ) ).all(): + if dataset.file_name in dataset_by_filename: + guessed_dataset = dataset_by_filename[ dataset.file_name ] + else: + guessed_dataset = __guess_dataset_by_filename( dataset.file_name ) + if guessed_dataset and dataset.file_name != guessed_dataset.file_name:#not os.path.samefile( dataset.file_name, guessed_dataset.file_name ): + guessed_dataset = None + dataset_by_filename[ dataset.file_name ] = guessed_dataset + + if guessed_dataset is not None and guessed_dataset.id != dataset.id: #could we have a self referential dataset? + for dataset_instance in dataset.history_associations + dataset.library_associations: + dataset_instance.dataset = guessed_dataset + changed_associations += 1 + #mark original Dataset as deleted and purged, it is no longer in use, but do not delete file_name contents + dataset.deleted = True + dataset.external_filename = "Dataset was result of share before HDA, and has been replaced: %s mapped to Dataset %s" % ( dataset.external_filename, guessed_dataset.id ) + dataset.purged = True #we don't really purge the file here, but we mark it as purged, since this dataset is now defunct + galaxy.model.mapping.Session.flush() + log.debug( "%i items affected, and restored." % ( changed_associations ) ) + log.debug( "Time elapsed: %s" % ( time.time() - start_time ) ) + +def downgrade(): + log.debug( "Downgrade is not possible." ) + diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py Thu Apr 23 14:42:35 2009 -0400 +++ b/lib/galaxy/util/__init__.py Thu Apr 23 15:11:29 2009 -0400 @@ -146,7 +146,7 @@ elif isinstance( value, list ): return map(sanitize_text, value) else: - raise Exception, 'Unknown parameter type' + raise Exception, 'Unknown parameter type (%s)' % ( type( value ) ) class Params: """ diff -r 0cf5c25d1d2b -r c6031c4e6546 lib/galaxy/web/controllers/admin.py --- a/lib/galaxy/web/controllers/admin.py Thu Apr 23 14:42:35 2009 -0400 +++ b/lib/galaxy/web/controllers/admin.py Thu Apr 23 15:11:29 2009 -0400 @@ -822,11 +822,13 @@ msg=util.sanitize_text( msg ), messagetype='error' ) ) created_ldda_ids = params.get( 'created_ldda_ids', '' ) + show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) ) return trans.fill_template( '/admin/library/browse_library.mako', library=trans.app.model.Library.get( id ), created_ldda_ids=created_ldda_ids, msg=msg, - messagetype=messagetype ) + messagetype=messagetype, + show_deleted=show_deleted ) @web.expose @web.require_admin def library( self, trans, id=None, **kwd ): @@ -960,7 +962,8 @@ libraries=libraries, deleted=True, msg=msg, - messagetype=messagetype ) + messagetype=messagetype, + show_deleted = True ) @web.expose @web.require_admin def undelete_library( self, trans, **kwd ): @@ -2040,6 +2043,50 @@ id=library_id, msg=util.sanitize_text( msg ), messagetype=messagetype ) ) + + @web.expose + @web.require_admin + def delete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ): + #this action will handle deleting all types of library items in library browsing mode + library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, } + if library_item_type not in library_item_types: + raise ValueError( 'Bad library_item_type specified: %s' % library_item_types ) + if library_item_id is None: + raise ValueError( 'library_item_id not specified' ) + library_item = library_item_types[ library_item_type ].get( int( library_item_id ) ) + library_item.deleted = True + library_item.flush() + #need to str because unicode isn't accepted... + msg = str( "%s '%s' has been marked deleted" % ( library_item_type, library_item.name ) ) + messagetype = str( "done" ) + if library_item_type == 'library' or library_id is None: + return self.browse_libraries( trans, msg = msg, messagetype = messagetype ) + else: + return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype ) + + @web.expose + @web.require_admin + def undelete_library_item( self, trans, library_id = None, library_item_id = None, library_item_type = None ): + #this action will handle deleting all types of library items in library browsing mode + library_item_types = { 'library': trans.app.model.Library, 'folder': trans.app.model.LibraryFolder, 'dataset': trans.app.model.LibraryDataset, } + if library_item_type not in library_item_types: + raise ValueError( 'Bad library_item_type specified: %s' % library_item_types ) + if library_item_id is None: + raise ValueError( 'library_item_id not specified' ) + library_item = library_item_types[ library_item_type ].get( int( library_item_id ) ) + if library_item.purged: + raise ValueError( '%s %s cannot be undeleted' % ( library_item_type, library_item.name ) ) + library_item.deleted = False + library_item.flush() + msg = str( "%s '%s' has been undeleted" % ( library_item_type, library_item.name ) ) + messagetype = str( "done" ) + if library_item_type == 'library' or library_id is None: + return self.browse_libraries( trans, msg = msg, messagetype = messagetype ) + else: + return self.browse_library( trans, id = library_id , msg = msg, messagetype = messagetype ) + + + #@web.expose #@web.require_admin #def delete_dataset( self, trans, id=None, **kwd): diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/cleanup_datasets.py --- a/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/cleanup_datasets.py Thu Apr 23 15:11:29 2009 -0400 @@ -1,6 +1,6 @@ #!/usr/bin/env python -import sys, os, time, ConfigParser +import sys, os, time, ConfigParser, shutil from datetime import datetime, timedelta from time import strftime from optparse import OptionParser @@ -15,7 +15,7 @@ pkg_resources.require( "SQLAlchemy >= 0.4" ) -from galaxy.model.orm import * +from galaxy.model.orm import and_, eagerload assert sys.version_info[:2] >= ( 2, 4 ) @@ -23,271 +23,172 @@ parser = OptionParser() parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (60)", default=60 ) parser.add_option( "-r", "--remove_from_disk", action="store_true", dest="remove_from_disk", help="remove datasets from disk when purged", default=False ) - parser.add_option( "-1", "--info_delete_userless_histories", action="store_true", dest="info_delete_userless_histories", default=False, help="info about the histories and datasets that will be affected by delete_userless_histories()" ) - parser.add_option( "-2", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" ) - parser.add_option( "-3", "--info_purge_histories", action="store_true", dest="info_purge_histories", default=False, help="info about histories and datasets that will be affected by purge_histories()" ) - parser.add_option( "-4", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" ) - parser.add_option( "-5", "--info_purge_datasets", action="store_true", dest="info_purge_datasets", default=False, help="info about the datasets that will be affected by purge_datasets()" ) - parser.add_option( "-6", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" ) + parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False ) + + parser.add_option( "-1", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" ) + + parser.add_option( "-2", "--purge_histories", action="store_true", dest="purge_histories", default=False, help="purge deleted histories" ) + + parser.add_option( "-3", "--purge_datasets", action="store_true", dest="purge_datasets", default=False, help="purge deleted datasets" ) + + parser.add_option( "-4", "--purge_libraries", action="store_true", dest="purge_libraries", default=False, help="purge deleted libraries" ) + + parser.add_option( "-5", "--purge_folders", action="store_true", dest="purge_folders", default=False, help="purge deleted library folders" ) + + ( options, args ) = parser.parse_args() ini_file = args[0] - if not ( options.info_delete_userless_histories ^ options.delete_userless_histories ^ \ - options.info_purge_histories ^ options.purge_histories ^ \ - options.info_purge_datasets ^ options.purge_datasets ): + if not ( options.purge_folders ^ options.delete_userless_histories ^ \ + options.purge_libraries ^ options.purge_histories ^ \ + options.purge_datasets ): parser.print_help() sys.exit(0) + + if options.remove_from_disk and options.info_only: + parser.error( "remove_from_disk and info_only are mutually exclusive" ) conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} ) conf_parser.read( ini_file ) configuration = {} for key, value in conf_parser.items( "app:main" ): configuration[key] = value - database_connection = configuration['database_connection'] + + if 'database_connection' in configuration: + database_connection = configuration['database_connection'] + else: + database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % configuration["database_file"] file_path = configuration['file_path'] app = CleanupDatasetsApplication( database_connection=database_connection, file_path=file_path ) - h = app.model.History - d = app.model.Dataset - m = app.model.MetadataFile cutoff_time = datetime.utcnow() - timedelta( days=options.days ) now = strftime( "%Y-%m-%d %H:%M:%S" ) - + print "\n# %s - Handling stuff older than %i days\n" % ( now, options.days ) - - if options.info_delete_userless_histories: - info_delete_userless_histories( h, cutoff_time ) - elif options.delete_userless_histories: - delete_userless_histories( h, d, cutoff_time ) - if options.info_purge_histories: - info_purge_histories( h, d, cutoff_time ) + + if options.info_only: + print "# Displaying info only ( --info_only )\n" + elif options.remove_from_disk: + print "# Datasets will be removed from disk.\n" + else: + print "# Datasets will NOT be removed from disk.\n" + + if options.delete_userless_histories: + delete_userless_histories( app, cutoff_time, info_only = options.info_only ) elif options.purge_histories: - if options.remove_from_disk: - print "# Datasets will be removed from disk...\n" - else: - print "# Datasets will NOT be removed from disk...\n" - purge_histories( h, d, m, cutoff_time, options.remove_from_disk ) - elif options.info_purge_datasets: - info_purge_datasets( d, cutoff_time ) + purge_histories( app, cutoff_time, options.remove_from_disk, info_only = options.info_only ) elif options.purge_datasets: - if options.remove_from_disk: - print "# Datasets will be removed from disk...\n" - else: - print "# Datasets will NOT be removed from disk...\n" - purge_datasets( d, m, cutoff_time, options.remove_from_disk ) + purge_datasets( app, cutoff_time, options.remove_from_disk, info_only = options.info_only ) + elif options.purge_libraries: + purge_libraries( app, cutoff_time, options.remove_from_disk, info_only = options.info_only ) + elif options.purge_folders: + purge_folders( app, cutoff_time, options.remove_from_disk, info_only = options.info_only ) + sys.exit(0) -def info_delete_userless_histories( h, cutoff_time ): - # Provide info about the histories and datasets that will be affected if the delete_userless_histories function is executed. +def delete_userless_histories( app, cutoff_time, info_only = False ): + # Deletes userless histories whose update_time value is older than the cutoff_time. + # The purge history script will handle marking DatasetInstances as deleted. + # Nothing is removed from disk yet. history_count = 0 - dataset_count = 0 - histories = h.filter( and_( h.table.c.user_id==None, - h.table.c.deleted==False, - h.table.c.update_time < cutoff_time ) ) \ - .options( eagerload( 'active_datasets' ) ).all() - - print '# The following datasets and associated userless histories will be deleted' + print '# The following datasets and associated userless histories have been deleted' start = time.clock() + histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None, + app.model.History.table.c.deleted==False, + app.model.History.table.c.update_time < cutoff_time ) ).all()# \ for history in histories: - for dataset_assoc in history.active_datasets: - if not dataset_assoc.deleted: - # This check is not necessary since 'active_datasets' are not - # deleted, but just being cautious - print "dataset_%d" %dataset_assoc.dataset_id - dataset_count += 1 + if not info_only: + history.deleted = True print "%d" % history.id history_count += 1 + app.model.flush() stop = time.clock() - print "# %d histories ( including a total of %d datasets ) will be deleted\n" %( history_count, dataset_count ) - print "Elapsed time: ", stop - start, "\n" - -def delete_userless_histories( h, d, cutoff_time ): - # Deletes userless histories whose update_time value is older than the cutoff_time. - # The datasets associated with each history are also deleted. Nothing is removed from disk. - history_count = 0 - dataset_count = 0 - - print '# The following datasets and associated userless histories have been deleted' - start = time.clock() - histories = h.filter( and_( h.table.c.user_id==None, - h.table.c.deleted==False, - h.table.c.update_time < cutoff_time ) ) \ - .options( eagerload( 'active_datasets' ) ).all() - for history in histories: - for dataset_assoc in history.active_datasets: - if not dataset_assoc.deleted: - # Mark all datasets as deleted - datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all() - for dataset in datasets: - if not dataset.deleted: - dataset.deleted = True - dataset.flush() - # Mark the history_dataset_association as deleted - dataset_assoc.deleted = True - dataset_assoc.clear_associated_files() - dataset_assoc.flush() - print "dataset_%d" % dataset_assoc.dataset_id - dataset_count += 1 - history.deleted = True - history.flush() - print "%d" % history.id - history_count += 1 - stop = time.clock() - print "# Deleted %d histories ( including a total of %d datasets )\n" %( history_count, dataset_count ) + print "# Deleted %d histories.\n" % ( history_count ) print "Elapsed time: ", stop - start, "\n" -def info_purge_histories( h, d, cutoff_time ): - # Provide info about the histories and datasets that will be affected if the purge_histories function is executed. + +def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ): + # Purges deleted histories whose update_time is older than the cutoff_time. + # The dataset associations of each history are also marked as deleted. + # The Purge Dataset method will purge each Dataset as necessary + # history.purged == True simply means that it can no longer be undeleted + # i.e. all associated datasets are marked as deleted history_count = 0 - dataset_count = 0 - disk_space = 0 - print '# The following datasets and associated deleted histories will be purged' + print '# The following datasets and associated deleted histories have been purged' start = time.clock() - histories = h.filter( and_( h.table.c.deleted==True, - h.table.c.purged==False, - h.table.c.update_time < cutoff_time ) ) \ + histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True, + app.model.History.table.c.purged==False, + app.model.History.table.c.update_time < cutoff_time ) ) \ .options( eagerload( 'datasets' ) ).all() for history in histories: for dataset_assoc in history.datasets: - # Datasets can only be purged if their HistoryDatasetAssociation has been deleted. - if dataset_assoc.deleted: - datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all() - for dataset in datasets: - if dataset.purgable and not dataset.purged: - print "%s" % dataset.file_name - dataset_count += 1 - try: - disk_space += dataset.file_size - except: - pass + _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable + if not info_only: + history.purged = True print "%d" % history.id history_count += 1 + app.model.flush() stop = time.clock() - print '# %d histories ( including a total of %d datasets ) will be purged. Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n' + print '# Purged %d histories.' % ( history_count ), '\n' print "Elapsed time: ", stop - start, "\n" -def purge_histories( h, d, m, cutoff_time, remove_from_disk ): - # Purges deleted histories whose update_time is older than the cutoff_time. - # The datasets associated with each history are also purged. - history_count = 0 - dataset_count = 0 - disk_space = 0 - file_size = 0 - errors = False - print '# The following datasets and associated deleted histories have been purged' +def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False ): + # Purges deleted libraries whose update_time is older than the cutoff_time. + # The dataset associations of each library are also marked as deleted. + # The Purge Dataset method will purge each Dataset as necessary + # library.purged == True simply means that it can no longer be undeleted + # i.e. all associated LibraryDatasets/folders are marked as deleted + library_count = 0 + print '# The following libraries and associated folders have been purged' start = time.clock() - histories = h.filter( and_( h.table.c.deleted==True, - h.table.c.purged==False, - h.table.c.update_time < cutoff_time ) ) \ - .options( eagerload( 'datasets' ) ).all() - for history in histories: - errors = False - for dataset_assoc in history.datasets: - if dataset_assoc.deleted: - datasets = d.filter( d.table.c.id==dataset_assoc.dataset_id ).all() - for dataset in datasets: - if dataset.purgable and not dataset.purged: - file_size = dataset.file_size - dataset.deleted = True - dataset.file_size = 0 - if remove_from_disk: - dataset.flush() - errmsg = purge_dataset( dataset, d, m ) - if errmsg: - errors = True - print errmsg - else: - dataset.purged = True - dataset.flush() - print "%s" % dataset.file_name - # Mark all associated MetadataFiles as deleted and purged - print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name - for hda in dataset.history_associations: - for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all(): - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - for lda in dataset.library_associations: - for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all(): - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - dataset_count += 1 - try: - disk_space += file_size - except: - pass - if not errors: - history.purged = True - history.flush() - print "%d" % history.id - history_count += 1 + libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True, + app.model.Library.table.c.purged==False, + app.model.Library.table.c.update_time < cutoff_time ) ).all() + for library in libraries: + _purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only ) + if not info_only: + library.purged = True + print "%d" % library.id + library_count += 1 + app.model.flush() stop = time.clock() - print '# Purged %d histories ( including a total of %d datasets ). Freed disk space: ' %( history_count, dataset_count ), disk_space, '\n' + print '# Purged %d libraries .' % ( library_count ), '\n' print "Elapsed time: ", stop - start, "\n" -def info_purge_datasets( d, cutoff_time ): - # Provide info about the datasets that will be affected if the purge_datasets function is executed. - dataset_count = 0 - disk_space = 0 - print '# The following deleted datasets will be purged' +def purge_folders( app, cutoff_time, remove_from_disk, info_only = False ): + # Purges deleted folders whose update_time is older than the cutoff_time. + # The dataset associations of each folder are also marked as deleted. + # The Purge Dataset method will purge each Dataset as necessary + # libraryFolder.purged == True simply means that it can no longer be undeleted + # i.e. all associated LibraryDatasets/folders are marked as deleted + folder_count = 0 + print '# The following folders have been purged' start = time.clock() - datasets = d.filter( and_( d.table.c.deleted==True, - d.table.c.purgable==True, - d.table.c.purged==False, - d.table.c.update_time < cutoff_time ) ).all() - for dataset in datasets: - print "%s" % dataset.file_name - dataset_count += 1 - try: - disk_space += dataset.file_size - except: - pass + folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True, + app.model.LibraryFolder.table.c.purged==False, + app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all() + for folder in folders: + _purge_folder( folder, app, remove_from_disk, info_only = info_only ) + print "%d" % folder.id + folder_count += 1 stop = time.clock() - print '# %d datasets will be purged. Freed disk space: ' %dataset_count, disk_space, '\n' + print '# Purged %d folders.' % ( folder_count ), '\n' print "Elapsed time: ", stop - start, "\n" -def purge_datasets( d, m, cutoff_time, remove_from_disk ): +def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False ): # Purges deleted datasets whose update_time is older than cutoff_time. Files may or may # not be removed from disk. dataset_count = 0 disk_space = 0 - file_size = 0 print '# The following deleted datasets have been purged' start = time.clock() - datasets = d.filter( and_( d.table.c.deleted==True, - d.table.c.purgable==True, - d.table.c.purged==False, - d.table.c.update_time < cutoff_time ) ).all() + datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True, + app.model.Dataset.table.c.purgable==True, + app.model.Dataset.table.c.purged==False, + app.model.Dataset.table.c.update_time < cutoff_time ) ).all() for dataset in datasets: file_size = dataset.file_size - if remove_from_disk: - errmsg = purge_dataset( dataset, d, m ) - if errmsg: - print errmsg - else: - dataset_count += 1 - else: - dataset.purged = True - dataset.file_size = 0 - dataset.flush() - print "%s" % dataset.file_name - # Mark all associated MetadataFiles as deleted and purged - print "The following metadata files associated with dataset '%s' have been marked purged" % dataset.file_name - for hda in dataset.history_associations: - for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all(): - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - for lda in dataset.library_associations: - for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all(): - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - dataset_count += 1 + _purge_dataset( dataset, remove_from_disk, info_only = info_only ) + dataset_count += 1 try: disk_space += file_size except: @@ -298,68 +199,90 @@ print '# Freed disk space: ', disk_space, '\n' print "Elapsed time: ", stop - start, "\n" -def purge_dataset( dataset, d, m ): - # Removes the file from disk and updates the database accordingly. + +def _purge_dataset_instance( dataset_instance, app, remove_from_disk, include_children = True, info_only = False ): + #purging a dataset instance marks the instance as deleted, + #and marks the dataset as deleted if it is not associated with another DatsetInstance that is not deleted + if not info_only: + dataset_instance.mark_deleted( include_children = include_children ) + dataset_instance.clear_associated_files() + dataset_instance.flush() + dataset_instance.dataset.refresh() + if _dataset_is_deletable( dataset_instance.dataset ): + _delete_dataset( dataset_instance.dataset, app, remove_from_disk, info_only = info_only ) + #need to purge children here + if include_children: + for child in dataset_instance.children: + _purge_dataset_instance( child, app, remove_from_disk, include_children = include_children, info_only = info_only ) + +def _dataset_is_deletable( dataset ): + #a dataset is deletable when it no longer has any non-deleted associations + return not bool( dataset.active_history_associations or dataset.active_library_associations ) + +def _delete_dataset( dataset, app, remove_from_disk, info_only = False ): + #marks a base dataset as deleted, hdas/ldas associated with dataset can no longer be undeleted + #metadata files attached to associated dataset Instances is removed now + if not _dataset_is_deletable( dataset ): + print "# This Dataset (%i) is not deletable, associated Metadata Files will not be removed.\n" % ( dataset.id ) + else: + # Mark all associated MetadataFiles as deleted and purged and remove them from disk + print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id + metadata_files = [] + #lets create a list of metadata files, then perform actions on them + for hda in dataset.history_associations: + for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.hda_id==hda.id ).all(): + metadata_files.append( metadata_file ) + for lda in dataset.library_associations: + for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.lda_id==lda.id ).all(): + metadata_files.append( metadata_file ) + for metadata_file in metadata_files: + if not info_only: + if remove_from_disk: + try: + os.unlink( metadata_file.file_name ) + except Exception, e: + print "# Error, exception: %s caught attempting to purge metadata file %s\n" %( str( e ), metadata_file.file_name ) + metadata_file.purged = True + metadata_file.deleted = True + #metadata_file.flush() + print "%s" % metadata_file.file_name + print + dataset.deleted = True + #dataset.flush() + app.model.flush() + +def _purge_dataset( dataset, remove_from_disk, info_only = False ): if dataset.deleted: - purgable = True - # Remove files from disk and update the database try: - # See if the dataset has been shared - if dataset.external_filename: - # This check handles the pre-history_dataset_association approach to sharing. - shared_data = d.filter( and_( d.table.c.external_filename==dataset.external_filename, - d.table.c.deleted==False ) ).all() - if shared_data: - purgable = False - if purgable: - # This check handles the history_dataset_association approach to sharing. - for shared_data in dataset.history_associations: - # Check to see if another dataset is using this file. This happens when a user shares - # their history with another user. In this case, a new record is created in the dataset - # table for each dataset, but the dataset records point to the same data file on disk. So - # if 1 of the 2 users deletes the dataset from their history but the other doesn't, we need - # to keep the dataset on disk for the 2nd user. - if not shared_data.deleted: - purgable = False - break - if purgable: - # This check handles the library_dataset_dataset_association approach to sharing. - for shared_data in dataset.library_associations: - if not shared_data.deleted: - purgable = False - break - if purgable: - dataset.purged = True - dataset.file_size = 0 - dataset.flush() - # Remove dataset file from disk - os.unlink( dataset.file_name ) + if dataset.purgable and _dataset_is_deletable( dataset ): print "%s" % dataset.file_name - # Mark all associated MetadataFiles as deleted and purged and remove them from disk - print "The following metadata files associated with dataset '%s' have been purged" % dataset.file_name - for hda in dataset.history_associations: - for metadata_file in m.filter( m.table.c.hda_id==hda.id ).all(): - os.unlink( metadata_file.file_name() ) - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - for lda in dataset.library_associations: - for metadata_file in m.filter( m.table.c.lda_id==lda.id ).all(): - metadata_file.deleted = True - metadata_file.purged = True - metadata_file.flush() - print "%s" % metadata_file.file_name() - try: - # Remove associated extra files from disk if they exist - os.unlink( dataset.extra_files_path ) - except: - pass + if not info_only: + # Remove files from disk and update the database + if remove_from_disk: + os.unlink( dataset.file_name ) + # Remove associated extra files from disk if they exist + if dataset.extra_files_path and os.path.exists( dataset.extra_files_path ): + shutil.rmtree( dataset.extra_files_path ) #we need to delete the directory and its contents; os.unlink would always fail on a directory + dataset.purged = True + else: + print "# This dataset (%i) is not purgable, the file (%s) will not be removed.\n" % ( dataset.id, dataset.file_name ) except Exception, exc: - return "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name ) + print "# Error, exception: %s caught attempting to purge %s\n" %( str( exc ), dataset.file_name ) else: - return "# Error: '%s' has not previously been deleted, so it cannot be purged\n" %dataset.file_name - return "" + print "# Error: '%s' has not previously been deleted, so it cannot be purged\n" % dataset.file_name + print "" + +def _purge_folder( folder, app, remove_from_disk, info_only = False ): + """Purges a folder and its contents, recursively""" + for ld in folder.datasets: + ld.deleted = True + for ldda in [ld.library_dataset_dataset_association] + ld.expired_datasets: + _purge_dataset_instance( ldda, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable + for sub_folder in folder.folders: + _purge_folder( sub_folder, app, remove_from_disk, info_only = info_only ) + if not info_only: + folder.purged = True + folder.flush() class CleanupDatasetsApplication( object ): """Encapsulates the state of a Universe application""" diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories.sh --- a/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/delete_userless_histories.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/delete_userless_histories_main.sh --- a/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/delete_userless_histories_main.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -1 $@ >> ./scripts/cleanup_datasets/delete_userless_histories.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets.sh --- a/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/purge_datasets.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_datasets_main.sh --- a/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/purge_datasets_main.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -6 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -3 -r $@ >> ./scripts/cleanup_datasets/purge_datasets.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scripts/cleanup_datasets/purge_folders.sh Thu Apr 23 15:11:29 2009 -0400 @@ -0,0 +1,4 @@ +#!/bin/sh + +cd `dirname $0`/../.. +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_folders_main.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scripts/cleanup_datasets/purge_folders_main.sh Thu Apr 23 15:11:29 2009 -0400 @@ -0,0 +1,4 @@ +#!/bin/sh + +cd `dirname $0`/../.. +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -5 -r $@ >> ./scripts/cleanup_datasets/purge_folders.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories.sh --- a/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/purge_histories.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_histories_main.sh --- a/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 14:42:35 2009 -0400 +++ b/scripts/cleanup_datasets/purge_histories_main.sh Thu Apr 23 15:11:29 2009 -0400 @@ -1,4 +1,4 @@ #!/bin/sh cd `dirname $0`/../.. -python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -2 -r $@ >> ./scripts/cleanup_datasets/purge_histories.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scripts/cleanup_datasets/purge_libraries.sh Thu Apr 23 15:11:29 2009 -0400 @@ -0,0 +1,4 @@ +#!/bin/sh + +cd `dirname $0`/../.. +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 10 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log diff -r 0cf5c25d1d2b -r c6031c4e6546 scripts/cleanup_datasets/purge_libraries_main.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scripts/cleanup_datasets/purge_libraries_main.sh Thu Apr 23 15:11:29 2009 -0400 @@ -0,0 +1,4 @@ +#!/bin/sh + +cd `dirname $0`/../.. +python ./scripts/cleanup_datasets/cleanup_datasets.py ./universe_wsgi.ini -d 60 -4 -r $@ >> ./scripts/cleanup_datasets/purge_libraries.log diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/base.css.tmpl --- a/static/june_2007_style/base.css.tmpl Thu Apr 23 14:42:35 2009 -0400 +++ b/static/june_2007_style/base.css.tmpl Thu Apr 23 15:11:29 2009 -0400 @@ -563,6 +563,7 @@ color: #333; font-size: 110%; font-weight: bold; + font-style: normal; white-space: nowrap; position: absolute; z-index: 20000; diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/base.css --- a/static/june_2007_style/blue/base.css Thu Apr 23 14:42:35 2009 -0400 +++ b/static/june_2007_style/blue/base.css Thu Apr 23 15:11:29 2009 -0400 @@ -559,6 +559,7 @@ color: #333; font-size: 110%; font-weight: bold; + font-style: normal; white-space: nowrap; position: absolute; z-index: 20000; diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/blue/library.css --- a/static/june_2007_style/blue/library.css Thu Apr 23 14:42:35 2009 -0400 +++ b/static/june_2007_style/blue/library.css Thu Apr 23 15:11:29 2009 -0400 @@ -4,6 +4,10 @@ .datasetHighlighted { background-color: #C1C9E5; +} + +.libraryItemDeleted-True { + font-style: italic; } div.historyItemBody { diff -r 0cf5c25d1d2b -r c6031c4e6546 static/june_2007_style/library.css.tmpl --- a/static/june_2007_style/library.css.tmpl Thu Apr 23 14:42:35 2009 -0400 +++ b/static/june_2007_style/library.css.tmpl Thu Apr 23 15:11:29 2009 -0400 @@ -4,6 +4,10 @@ .datasetHighlighted { background-color: $menu_bg_over; +} + +.libraryItemDeleted-True { + font-style: italic; } div.historyItemBody { diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/browse_library.mako --- a/templates/admin/library/browse_library.mako Thu Apr 23 14:42:35 2009 -0400 +++ b/templates/admin/library/browse_library.mako Thu Apr 23 15:11:29 2009 -0400 @@ -93,7 +93,7 @@ %> %if not root_folder: <li class="folderRow libraryOrFolderRow" style="padding-left: ${pad}px;"> - <div class="rowTitle"> + <div class="rowTitle libraryItemDeleted-${parent.deleted}"> <img src="${h.url_for( expander )}" class="expanderIcon"/><img src="${h.url_for( folder )}" class="rowIcon"/> ${parent.name} %if parent.description: @@ -101,7 +101,7 @@ %endif <a id="folder-${parent.id}-popup" class="popup-arrow" style="display: none;">▼</a> </div> - %if not deleted: + %if not parent.deleted: <% library_item_ids = {} library_item_ids[ 'folder' ] = parent.id @@ -117,10 +117,11 @@ <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, folder_id=parent.id, new_template=True )}">Add an information template to this folder</a> %endif <a class="action-button" href="${h.url_for( controller='admin', action='folder', permissions=True, id=parent.id, library_id=library_id )}">Edit this folder's permissions</a> - ## TODO: need to revamp the way folders and contained LibraryDatasets are deleted - ##%if subfolder: - ## <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( action='folder', delete=True, id=parent.id, library_id=library_id )}">Remove this folder and its contents from the library</a> - ##%endif + <a class="action-button" confirm="Click OK to delete the folder '${parent.name}'" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Remove this folder and its contents from the library</a> + </div> + %else: + <div popupmenu="folder-${parent.id}-popup"> + <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library_id, library_item_id=parent.id, library_item_type='folder' )}">Undelete this folder</a> </div> %endif </li> @@ -130,10 +131,10 @@ %else: <ul> %endif - %if library.deleted: + %if deleted: <% - parent_folders = parent.folders - parent_datasets = parent.datasets + parent_folders = parent.activatable_folders + parent_datasets = parent.activatable_datasets %> %else: <% @@ -142,7 +143,7 @@ %> %endif %for folder in name_sorted( parent_folders ): - ${render_folder( folder, pad, library.deleted, created_ldda_ids, library.id )} + ${render_folder( folder, pad, deleted, created_ldda_ids, library.id )} %endfor %for library_dataset in name_sorted( parent_datasets ): <% @@ -182,17 +183,20 @@ <table cellspacing="0" cellpadding="0" border="0" width="100%" class="libraryTitle"> <th width="*"> <img src="${h.url_for( '/static/images/silk/resultset_bottom.png' )}" class="expanderIcon"/><img src="${h.url_for( '/static/images/silk/book_open.png' )}" class="rowIcon"/> - ${library.name} - %if library.description: - <i>- ${library.description}</i> - %endif + <span class="libraryItemDeleted-${library.deleted}"> + ${library.name} + %if library.description: + <i>- ${library.description}</i> + %endif + </span> <a id="library-${library.id}-popup" class="popup-arrow" style="display: none;">▼</a> + <div popupmenu="library-${library.id}-popup"> %if not library.deleted: <% library_item_ids = {} library_item_ids[ 'library' ] = library.id %> - <div popupmenu="library-${library.id}-popup"> + <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a> %if library.library_info_template_associations: <% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %> @@ -201,15 +205,16 @@ <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a> %endif <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a> - ## TODO: need to revamp the way libraries, folders, and contained LibraryDatasets are deleted - ##<a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='library', delete=True, id=library.id )}">Delete this library and its contents</a> - </div> - ##%else: - ## <div popupmenu="library-${library.id}-popup"> - ## <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library', id=library.id )}">Undelete this library and its contents</a> - ## <a class="action-button" href="${h.url_for( controller='admin', action='purge_library', id=library.id )}">Purge this library and its contents</a> - ## </div> + <a class="action-button" confirm="Current state will not be saved, so undeleting the library will restore all of its contents. Click OK to delete the library named '${library.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_item_type='library', library_item_id=library.id )}">Delete this library and its contents</a> + %if show_deleted: + <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a> + %else: + <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a> + %endif + %elif not library.purged: + <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_item_type='library', library_item_id=library.id )}">Undelete this library</a> %endif + </div> </th> <th width="300">Information</th> <th width="150">Uploaded By</th> @@ -218,7 +223,7 @@ </div> </li> <ul> - ${render_folder( library.root_folder, 0, library.deleted, created_ldda_ids, library.id )} + ${render_folder( library.root_folder, 0, library.deleted or show_deleted, created_ldda_ids, library.id )} </ul> <br/> </ul> diff -r 0cf5c25d1d2b -r c6031c4e6546 templates/admin/library/common.mako --- a/templates/admin/library/common.mako Thu Apr 23 14:42:35 2009 -0400 +++ b/templates/admin/library/common.mako Thu Apr 23 15:11:29 2009 -0400 @@ -1,6 +1,6 @@ <% from time import strftime %> -<%def name="render_dataset( library_dataset, selected, library )"> +<%def name="render_dataset( library_dataset, selected, library, show_deleted = False )"> <% ## The received data must always be a LibraryDataset object, but the object id passed to methods from the drop down menu ## should be the underlying ldda id to prevent id collision ( which could happen when displaying children, which are always @@ -27,13 +27,15 @@ %else: <input type="checkbox" name="ldda_ids" value="${ldda.id}"/> %endif - <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a> - %if not library.deleted: + <span class="libraryItemDeleted-${library_dataset.deleted}"> + <a href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, info=True )}"><b>${ldda.name[:50]}</b></a> + </span> + <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a> + %if not library_dataset.deleted: <% library_item_ids = {} library_item_ids[ 'ldda' ] = ldda.id %> - <a id="dataset-${ldda.id}-popup" class="popup-arrow" style="display: none;">▼</a> <div popupmenu="dataset-${ldda.id}-popup"> <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, edit_info=True )}">Edit this dataset's information</a> ## We're disabling the ability to add templates at the LDDA and LibraryDataset level, but will leave this here for possible future use @@ -46,7 +48,11 @@ <a class="action-button" href="${h.url_for( controller='admin', action='download_dataset_from_folder', id=ldda.id, library_id=library.id )}">Download this dataset</a> %endif ##TODO: need to revamp the way we remove datasets from disk. - ##<a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library_dataset.folder.id, id=ldda.id, delete=True )}">Remove this dataset from the library</a> + <a class="action-button" confirm="Click OK to remove dataset '${ldda.name}'?" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Remove this dataset from the library</a> + </div> + %else: + <div popupmenu="dataset-${ldda.id}-popup"> + <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library_dataset.id, library_item_type='dataset' )}">Undelete this dataset</a> </div> %endif </td>
participants (1)
-
Greg Von Kuster