commit/galaxy-central: greg: Several data library improvements:
1 new changeset in galaxy-central: http://bitbucket.org/galaxy/galaxy-central/changeset/82107691d26e/ changeset: r4964:82107691d26e user: greg date: 2011-01-31 22:41:03 summary: Several data library improvements: 1. Enhance importing library dataset into the current history by now allowing the user to select from a list of existing histories or create a new named history ( similar to copying datasets ).  This feature is available for a specific library dataset, a folder ( or root folder ), when selecting multiple datasets from in a library,  and on the results page for searched library datasets. 2. Add Brad Chapman's latest patch for fixing the lucene search implementation 3. Eliminate searches on dataset state for both whoosh and lucene searches 4. Clarify the objects being searched in the regular search and advanced search boxes in the data libraries grids. 5. Add the ability to perform certain actions on 1 or more selected library datasets on the results page for searched library datasets. 6. Add a "select all" checkbox to the browse_library.mako template. 7. Fixed library functional tests accordingly. affected #: 14 files (14.7 KB) --- a/lib/galaxy/web/controllers/library.py Mon Jan 31 12:57:42 2011 -0500 +++ b/lib/galaxy/web/controllers/library.py Mon Jan 31 16:41:03 2011 -0500 @@ -6,7 +6,7 @@ from galaxy.datatypes import sniff from galaxy import model, util from galaxy.util.odict import odict -from library_common import lucene_search, whoosh_search +from library_common import get_comptypes, lucene_search, whoosh_search log = logging.getLogger( __name__ ) @@ -25,17 +25,17 @@ template='/library/grid.mako' default_sort_key = "name" columns = [ - NameColumn( "Name", + NameColumn( "Data library name", key="name", link=( lambda library: dict( operation="browse", id=library.id ) ), attach_popup=False, filterable="advanced" ), - DescriptionColumn( "Description", + DescriptionColumn( "Data library description", key="description", attach_popup=False, filterable="advanced" ), ] - columns.append( grids.MulticolFilterColumn( "Search", + columns.append( grids.MulticolFilterColumn( "search library dataset name, info, message, dbkey", cols_to_filter=[ columns[0], columns[1] ], key="free-text-search", visible=False, @@ -74,8 +74,9 @@ params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) + default_action = params.get( 'default_action', None ) return trans.fill_template( "/library/index.mako", - default_action=params.get( 'default_action', None ), + default_action=default_action, message=message, status=status ) @web.expose @@ -91,8 +92,9 @@ search_term = kwd[ "f-free-text-search" ] if trans.app.config.enable_lucene_library_search: indexed_search_enabled = True - search_url = trans.app.config.config_dict.get( "fulltext_find_url", "" ) + search_url = trans.app.config.config_dict.get( "fulltext_url", "" ) if search_url: + indexed_search_enabled = True status, message, lddas = lucene_search( trans, 'library', search_term, search_url, **kwd ) elif trans.app.config.enable_whoosh_library_search: indexed_search_enabled = True @@ -100,11 +102,15 @@ else: indexed_search_enabled = False if indexed_search_enabled: + comptypes = get_comptypes( trans ) + show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) ) use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) ) return trans.fill_template( '/library/common/library_dataset_search_results.mako', cntrller='library', search_term=search_term, + comptypes=comptypes, lddas=lddas, + show_deleted=show_deleted, use_panels=use_panels, message=message, status=status ) --- a/lib/galaxy/web/controllers/library_admin.py Mon Jan 31 12:57:42 2011 -0500 +++ b/lib/galaxy/web/controllers/library_admin.py Mon Jan 31 16:41:03 2011 -0500 @@ -3,7 +3,7 @@ from galaxy.web.base.controller import * from galaxy.web.framework.helpers import time_ago, iff, grids from galaxy.model.orm import * -from library_common import lucene_search, whoosh_search +from library_common import get_comptypes, lucene_search, whoosh_search # Older py compatibility try: set() @@ -35,12 +35,12 @@ template='/admin/library/grid.mako' default_sort_key = "name" columns = [ - NameColumn( "Name", + NameColumn( "Data library name", key="name", link=( lambda library: dict( operation="browse", id=library.id ) ), attach_popup=False, filterable="advanced" ), - DescriptionColumn( "Description", + DescriptionColumn( "Data library description", key="description", attach_popup=False, filterable="advanced" ), @@ -50,7 +50,7 @@ # Columns that are valid for filtering but are not visible. grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" ) ] - columns.append( grids.MulticolFilterColumn( "Search", + columns.append( grids.MulticolFilterColumn( "search library dataset name, info, message, dbkey", cols_to_filter=[ columns[0], columns[1] ], key="free-text-search", visible=False, @@ -133,11 +133,15 @@ else: indexed_search_enabled = False if indexed_search_enabled: + comptypes = get_comptypes( trans ) + show_deleted = util.string_as_bool( kwd.get( 'show_deleted', False ) ) use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) ) return trans.fill_template( '/library/common/library_dataset_search_results.mako', cntrller='library_admin', search_term=search_term, + comptypes=comptypes, lddas=lddas, + show_deleted=show_deleted, use_panels=use_panels, message=message, status=status ) --- a/lib/galaxy/web/controllers/library_common.py Mon Jan 31 12:57:42 2011 -0500 +++ b/lib/galaxy/web/controllers/library_common.py Mon Jan 31 16:41:03 2011 -0500 @@ -1,4 +1,4 @@ -import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob, string +import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob, string, urllib2 from galaxy.web.base.controller import * from galaxy import util, jobs from galaxy.datatypes import sniff @@ -7,6 +7,7 @@ from galaxy.tools.actions import upload_common from galaxy.model.orm import * from galaxy.util.streamball import StreamBall +from galaxy.util import inflector from galaxy.web.form_builder import AddressField, CheckboxField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField import logging, tempfile, zipfile, tarfile, os, sys, operator from galaxy.eggs import require @@ -20,7 +21,7 @@ whoosh_search_enabled = True # The following must be defined exactly like the # schema in ~/scripts/data_libraries/build_whoosh_index.py - schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT, state=TEXT ) + schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT ) except ImportError, e: whoosh_search_enabled = False schema = None @@ -125,16 +126,7 @@ message += "Don't navigate away from Galaxy or use the browser's \"stop\" or \"reload\" buttons (on this tab) until the " message += "message \"This job is running\" is cleared from the \"Information\" column below for each selected dataset." status = "info" - comptypes_t = comptypes - if trans.app.config.nginx_x_archive_files_base: - comptypes_t = ['ngxzip'] - for comptype in trans.app.config.disable_library_comptypes: - # TODO: do this once, not every time (we're gonna raise an - # exception every time after the first time) - try: - comptypes_t.remove( comptype ) - except: - pass + comptypes = get_comptypes( trans ) try: return trans.fill_template( '/library/common/browse_library.mako', cntrller=cntrller, @@ -143,17 +135,18 @@ created_ldda_ids=created_ldda_ids, hidden_folder_ids=hidden_folder_ids, show_deleted=show_deleted, - comptypes=comptypes_t, + comptypes=comptypes, current_user_roles=current_user_roles, message=message, status=status ) except Exception, e: message = 'Error attempting to display contents of library (%s): %s.' % ( str( library.name ), str( e ) ) status = 'error' + default_action = params.get( 'default_action', None ) return trans.response.send_redirect( web.url_for( use_panels=use_panels, controller=cntrller, action='browse_libraries', - default_action=params.get( 'default_action', None ), + default_action=default_action, message=util.sanitize_text( message ), status=status ) ) @web.expose @@ -871,7 +864,7 @@ message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary." default_action = 'manage_permissions' else: - default_action = 'add' + default_action = 'import_to_histories' trans.response.send_redirect( web.url_for( controller='library_common', action='browse_library', cntrller=cntrller, @@ -1247,7 +1240,7 @@ message += "Click the Go button at the bottom of this page to edit the permissions on these datasets if necessary." default_action = 'manage_permissions' else: - default_action = 'add' + default_action = 'import_to_histories' return trans.response.send_redirect( web.url_for( controller='library_common', action='browse_library', cntrller=cntrller, @@ -1512,7 +1505,12 @@ message=util.sanitize_text( message ), status=status ) ) @web.expose - def act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ): + def act_on_multiple_datasets( self, trans, cntrller, library_id=None, ldda_ids='', **kwd ): + # This method is called from 1 of 3 places: + # - this controller's download_dataset_from_folder() method + # - he browse_library.mako template + # - the library_dataset_search_results.mako template + # In the last case above, we will not have a library_id class NgxZip( object ): def __init__( self, url_base ): self.files = {} @@ -1534,6 +1532,16 @@ show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) ) use_panels = util.string_as_bool( params.get( 'use_panels', False ) ) action = params.get( 'do_action', None ) + if action == 'import_to_histories': + return trans.response.send_redirect( web.url_for( controller='library_common', + action='import_datasets_to_histories', + cntrller=cntrller, + library_id=library_id, + ldda_ids=ldda_ids, + use_panels=use_panels, + show_deleted=show_deleted, + message=message, + status=status ) ) lddas = [] error = False is_admin = trans.user_is_admin() and cntrller == 'library_admin' @@ -1557,27 +1565,177 @@ message = "Invalid library dataset id ( %s ) specified." % str( ldda_id ) break lddas.append( ldda ) - if action == 'import_to_history' or action == 'add': - if trans.get_history() is None: - # Must be a bot sending a request without having a history. - error = True - message = "You do not have a current history" - elif action == 'manage_permissions': - if not is_admin: + if not is_admin: + if action == 'manage_permissions': for ldda in lddas: if not ( trans.app.security_agent.can_manage_library_item( current_user_roles, ldda ) and \ trans.app.security_agent.can_manage_dataset( current_user_roles, ldda.dataset ) ): error = True message = "You are not authorized to manage permissions on library dataset '%s'." % ldda.name break - elif action == 'delete': - if not is_admin: + elif action == 'delete': for ldda in lddas: if not trans.app.security_agent.can_modify_library_item( current_user_roles, ldda ): error = True message = "You are not authorized to modify library dataset '%s'." % ldda.name break - if error: + if not error: + if action == 'manage_permissions': + trans.response.send_redirect( web.url_for( controller='library_common', + action='ldda_permissions', + cntrller=cntrller, + use_panels=use_panels, + library_id=library_id, + folder_id=trans.security.encode_id( lddas[0].library_dataset.folder.id ), + id=",".join( ldda_ids ), + show_deleted=show_deleted, + message=util.sanitize_text( message ), + status=status ) ) + elif action == 'delete': + for ldda in lddas: + # Do not delete the association, just delete the library_dataset. The + # cleanup_datasets.py script handles everything else. + ld = ldda.library_dataset + ld.deleted = True + trans.sa_session.add( ld ) + trans.sa_session.flush() + message = "The selected datasets have been deleted." + elif action in ['zip','tgz','tbz','ngxzip']: + error = False + killme = string.punctuation + string.whitespace + trantab = string.maketrans(killme,'_'*len(killme)) + try: + outext = 'zip' + if action == 'zip': + # Can't use mkstemp - the file must not exist first + tmpd = tempfile.mkdtemp() + tmpf = os.path.join( tmpd, 'library_download.' + action ) + if ziptype == '64' and trans.app.config.upstream_gzip: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) + elif ziptype == '64': + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) + elif trans.app.config.upstream_gzip: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED ) + else: + archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED ) + archive.add = lambda x, y: archive.write( x, y.encode('CP437') ) + elif action == 'tgz': + if trans.app.config.upstream_gzip: + archive = util.streamball.StreamBall( 'w|' ) + outext = 'tar' + else: + archive = util.streamball.StreamBall( 'w|gz' ) + outext = 'tgz' + elif action == 'tbz': + archive = util.streamball.StreamBall( 'w|bz2' ) + outext = 'tbz2' + elif action == 'ngxzip': + archive = NgxZip( trans.app.config.nginx_x_archive_files_base ) + except ( OSError, zipfile.BadZipfile ): + error = True + log.exception( "Unable to create archive for download" ) + message = "Unable to create archive for download, please report this error" + status = 'error' + except: + error = True + log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[0] ) + message = "Unable to create archive for download, please report - %s" % sys.exc_info()[0] + status = 'error' + if not error: + composite_extensions = trans.app.datatypes_registry.get_composite_extensions() + seen = [] + for ldda in lddas: + if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]: + continue + ext = ldda.extension + is_composite = ext in composite_extensions + path = "" + parent_folder = ldda.library_dataset.folder + while parent_folder is not None: + # Exclude the now-hidden "root folder" + if parent_folder.parent is None: + path = os.path.join( parent_folder.library_root[0].name, path ) + break + path = os.path.join( parent_folder.name, path ) + parent_folder = parent_folder.parent + path += ldda.name + while path in seen: + path += '_' + seen.append( path ) + zpath = os.path.split(path)[-1] # comes as base_name/fname + outfname,zpathext = os.path.splitext(zpath) + if is_composite: + # need to add all the components from the extra_files_path to the zip + if zpathext == '': + zpath = '%s.html' % zpath # fake the real nature of the html file + try: + archive.add(ldda.dataset.file_name,zpath) # add the primary of a composite set + except IOError: + error = True + log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name) + message = "Unable to create archive for download, please report this error" + status = 'error' + continue + flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths + for fpath in flist: + efp,fname = os.path.split(fpath) + if fname > '': + fname = fname.translate(trantab) + try: + archive.add( fpath,fname ) + except IOError: + error = True + log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname)) + message = "Unable to create archive for download, please report this error" + status = 'error' + continue + else: # simple case + try: + archive.add( ldda.dataset.file_name, path ) + except IOError: + error = True + log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name) + message = "Unable to create archive for download, please report this error" + status = 'error' + if not error: + if library_id: + lname = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) ).name + else: + # Request must have coe from the library_dataset_search_results page. + lname = 'selected_dataset' + fname = lname.replace( ' ', '_' ) + '_files' + if action == 'zip': + archive.close() + tmpfh = open( tmpf ) + # clean up now + try: + os.unlink( tmpf ) + os.rmdir( tmpd ) + except OSError: + error = True + log.exception( "Unable to remove temporary library download archive and directory" ) + message = "Unable to create archive for download, please report this error" + status = 'error' + if not error: + trans.response.set_content_type( "application/x-zip-compressed" ) + trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) + return tmpfh + elif action == 'ngxzip': + trans.response.set_content_type( "application/zip" ) + trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) + trans.response.headers[ "X-Archive-Files" ] = "zip" + return archive + else: + trans.response.set_content_type( "application/x-tar" ) + trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) + archive.wsgi_status = trans.response.wsgi_status() + archive.wsgi_headeritems = trans.response.wsgi_headeritems() + return archive.stream + else: + status = 'error' + message = 'Invalid action ( %s ) specified.' % action + if library_id: + # If we have a library_id, browse the associated library return trans.response.send_redirect( web.url_for( controller='library_common', action='browse_library', cntrller=cntrller, @@ -1585,181 +1743,129 @@ id=library_id, show_deleted=show_deleted, message=util.sanitize_text( message ), - status='error' ) ) - if action == 'import_to_history' or action == 'add': - history = trans.get_history() - total_imported_lddas = 0 - message = '' - status = 'done' - for ldda in lddas: - if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]: - message += "Cannot import dataset '%s' since its state is '%s'. " % ( ldda.name, ldda.dataset.state ) + status=status ) ) + else: + # We must have arrived here from the library_dataset_search_results page, so reddirect there. + search_term = params.get( 'search_term', '' ) + comptypes = get_comptypes( trans ) + return trans.fill_template( '/library/common/library_dataset_search_results.mako', + cntrller=cntrller, + search_term=search_term, + comptypes=comptypes, + lddas=lddas, + show_deleted=show_deleted, + use_panels=use_panels, + message=message, + status=status ) + + @web.expose + def import_datasets_to_histories( self, trans, cntrller, library_id='', folder_id='', ldda_ids='', target_history_ids='', new_history_name='', **kwd ): + # This method is called from one of the following places: + # - a menu option for a library dataset ( ldda_ids will be a singel dataset id ) + # - a menu option for a library folder ( folder_id will have a value ) + # - a menu option for a library dataset search result set ( ldda_ids will be a comma separated string of dataset ids ) + params = util.Params( kwd ) + message = util.restore_text( params.get( 'message', '' ) ) + status = params.get( 'status', 'done' ) + show_deleted = util.string_as_bool( params.get( 'show_deleted', False ) ) + use_panels = util.string_as_bool( params.get( 'use_panels', False ) ) + user = trans.get_user() + current_history = trans.get_history() + if library_id: + library = trans.sa_session.query( trans.model.Library ).get( trans.security.decode_id( library_id ) ) + else: + library = None + if folder_id: + folder = trans.sa_session.query( trans.model.LibraryFolder ).get( trans.security.decode_id( folder_id ) ) + else: + folder = None + ldda_ids = util.listify( ldda_ids ) + if ldda_ids: + # Check boxes cause 2 copies of each id to be included in the request + ldda_ids = map( trans.security.decode_id, ldda_ids ) + unique_ldda_ids = [] + for ldda_id in ldda_ids: + if ldda_id not in unique_ldda_ids: + unique_ldda_ids.append( ldda_id ) + ldda_ids = unique_ldda_ids + target_history_ids = util.listify( target_history_ids ) + if target_history_ids: + target_history_ids = [ trans.security.decode_id( target_history_id ) for target_history_id in target_history_ids if target_history_id ] + if params.get( 'import_datasets_to_histories_button', False ): + invalid_datasets = 0 + if not ldda_ids or not ( target_history_ids or new_history_name ): + message = "You must provide one or more source library datasets and one or more target histories." + status = 'error' + else: + if new_history_name: + new_history = trans.app.model.History() + new_history.name = new_history_name + new_history.user = user + trans.sa_session.add( new_history ) + trans.sa_session.flush() + target_history_ids.append( new_history.id ) + if user: + target_histories = [ hist for hist in map( trans.sa_session.query( trans.app.model.History ).get, target_history_ids ) if ( hist is not None and hist.user == user )] + else: + target_histories = [ current_history ] + if len( target_histories ) != len( target_history_ids ): + message += "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) ) status = 'error' - elif ldda.dataset.state in [ 'ok', 'error' ]: - hda = ldda.to_history_dataset_association( target_history=history, add_to_history=True ) - total_imported_lddas += 1 - if total_imported_lddas: - trans.sa_session.add( history ) + for ldda in map( trans.sa_session.query( trans.app.model.LibraryDatasetDatasetAssociation ).get, ldda_ids ): + if ldda is None: + message += "You tried to import a library dataset that does not exist. " + status = 'error' + invalid_datasets += 1 + elif ldda.dataset.state not in [ trans.model.Dataset.states.OK, trans.model.Dataset.states.ERROR ]: + message += "Cannot import dataset '%s' since its state is '%s'. " % ( ldda.name, ldda.dataset.state ) + status = 'error' + invalid_datasets += 1 + elif not ldda.has_data(): + message += "Cannot import empty dataset '%s'. " % ldda.name + status = 'error' + invalid_datasets += 1 + else: + for target_history in target_histories: + hda = ldda.to_history_dataset_association( target_history=target_history, add_to_history=True ) trans.sa_session.flush() - message += "%i dataset(s) have been imported into your history. " % total_imported_lddas - elif action == 'manage_permissions': - trans.response.send_redirect( web.url_for( controller='library_common', - action='ldda_permissions', - cntrller=cntrller, - use_panels=use_panels, - library_id=library_id, - folder_id=trans.security.encode_id( lddas[0].library_dataset.folder.id ), - id=",".join( ldda_ids ), - show_deleted=show_deleted, - message=util.sanitize_text( message ), - status=status ) ) - elif action == 'delete': - for ldda in lddas: - # Do not delete the association, just delete the library_dataset. The - # cleanup_datasets.py script handles everything else. - ld = ldda.library_dataset - ld.deleted = True - trans.sa_session.add( ld ) - trans.sa_session.flush() - message = "The selected datasets have been removed from this data library" - elif action in ['zip','tgz','tbz','ngxzip']: - error = False - killme = string.punctuation + string.whitespace - trantab = string.maketrans(killme,'_'*len(killme)) - try: - outext = 'zip' - if action == 'zip': - # Can't use mkstemp - the file must not exist first - tmpd = tempfile.mkdtemp() - tmpf = os.path.join( tmpd, 'library_download.' + action ) - if ziptype == '64' and trans.app.config.upstream_gzip: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED, True ) - elif ziptype == '64': - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True ) - elif trans.app.config.upstream_gzip: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_STORED ) - else: - archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED ) - archive.add = lambda x, y: archive.write( x, y.encode('CP437') ) - elif action == 'tgz': - if trans.app.config.upstream_gzip: - archive = util.streamball.StreamBall( 'w|' ) - outext = 'tar' - else: - archive = util.streamball.StreamBall( 'w|gz' ) - outext = 'tgz' - elif action == 'tbz': - archive = util.streamball.StreamBall( 'w|bz2' ) - outext = 'tbz2' - elif action == 'ngxzip': - archive = NgxZip( trans.app.config.nginx_x_archive_files_base ) - except (OSError, zipfile.BadZipfile): - error = True - log.exception( "Unable to create archive for download" ) - message = "Unable to create archive for download, please report this error" - status = 'error' - except: - error = True - log.exception( "Unexpected error %s in create archive for download" % sys.exc_info()[0]) - message = "Unable to create archive for download, please report - %s" % sys.exc_info()[0] - status = 'error' - if not error: - composite_extensions = trans.app.datatypes_registry.get_composite_extensions( ) - seen = [] - for ldda in lddas: - if ldda.dataset.state in [ 'new', 'upload', 'queued', 'running', 'empty', 'discarded' ]: - continue - ext = ldda.extension - is_composite = ext in composite_extensions - path = "" - parent_folder = ldda.library_dataset.folder - while parent_folder is not None: - # Exclude the now-hidden "root folder" - if parent_folder.parent is None: - path = os.path.join( parent_folder.library_root[0].name, path ) - break - path = os.path.join( parent_folder.name, path ) - parent_folder = parent_folder.parent - path += ldda.name - while path in seen: - path += '_' - seen.append( path ) - zpath = os.path.split(path)[-1] # comes as base_name/fname - outfname,zpathext = os.path.splitext(zpath) - if is_composite: - # need to add all the components from the extra_files_path to the zip - if zpathext == '': - zpath = '%s.html' % zpath # fake the real nature of the html file - try: - archive.add(ldda.dataset.file_name,zpath) # add the primary of a composite set - except IOError: - error = True - log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name) - message = "Unable to create archive for download, please report this error" - status = 'error' - continue - flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths - for fpath in flist: - efp,fname = os.path.split(fpath) - if fname > '': - fname = fname.translate(trantab) - try: - archive.add( fpath,fname ) - except IOError: - error = True - log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname)) - message = "Unable to create archive for download, please report this error" - status = 'error' - continue - else: # simple case - try: - archive.add( ldda.dataset.file_name, path ) - except IOError: - error = True - log.exception( "Unable to write %s to temporary library download archive" % ldda.dataset.file_name) - message = "Unable to create archive for download, please report this error" - status = 'error' - if not error: - lname = trans.sa_session.query( trans.app.model.Library ).get( trans.security.decode_id( library_id ) ).name - fname = lname.replace( ' ', '_' ) + '_files' - if action == 'zip': - archive.close() - tmpfh = open( tmpf ) - # clean up now - try: - os.unlink( tmpf ) - os.rmdir( tmpd ) - except OSError: - error = True - log.exception( "Unable to remove temporary library download archive and directory" ) - message = "Unable to create archive for download, please report this error" - status = 'error' - if not error: - trans.response.set_content_type( "application/x-zip-compressed" ) - trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) - return tmpfh - elif action == 'ngxzip': - trans.response.set_content_type( "application/zip" ) - trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) - trans.response.headers[ "X-Archive-Files" ] = "zip" - return archive - else: - trans.response.set_content_type( "application/x-tar" ) - trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (fname,outext) - archive.wsgi_status = trans.response.wsgi_status() - archive.wsgi_headeritems = trans.response.wsgi_headeritems() - return archive.stream - else: - status = 'error' - message = 'Invalid action ( %s ) specified.' % action - return trans.response.send_redirect( web.url_for( controller='library_common', - action='browse_library', - cntrller=cntrller, - use_panels=use_panels, - id=library_id, - show_deleted=show_deleted, - message=util.sanitize_text( message ), - status=status ) ) + hist_names_str = ", ".join( [ target_history.name for target_history in target_histories ] ) + num_source = len( ldda_ids ) - invalid_datasets + num_target = len( target_histories ) + message = "%i %s have been imported into %i %s: %s" % ( num_source, + inflector.cond_plural( num_source, "dataset" ), + num_target, + inflector.cond_plural( num_target, "history" ), + hist_names_str ) + trans.sa_session.refresh( current_history ) + current_user_roles = trans.get_current_user_roles() + source_lddas = [] + if folder: + for library_dataset in folder.datasets: + ldda = library_dataset.library_dataset_dataset_association + if not ldda.deleted and trans.app.security_agent.can_access_library_item( current_user_roles, ldda, trans.user ): + source_lddas.append( ldda ) + elif ldda_ids: + for ldda_id in ldda_ids: + # Secuirty access permiision chcck is not needed here since the current user had access + # to the lddas in order for the menu optin to be available. + ldda = trans.sa_session.query( trans.model.LibraryDatasetDatasetAssociation ).get( ldda_id ) + source_lddas.append( ldda ) + target_histories = [ current_history ] + if user: + target_histories = user.active_histories + return trans.fill_template( "/library/common/import_datasets_to_histories.mako", + cntrller=cntrller, + library=library, + current_history=trans.get_history(), + ldda_ids=ldda_ids, + target_history_ids=target_history_ids, + source_lddas=source_lddas, + target_histories=target_histories, + new_history_name=new_history_name, + show_deleted=show_deleted, + use_panels=use_panels, + message=message, + status=status ) @web.expose def manage_template_inheritance( self, trans, cntrller, item_type, library_id, folder_id=None, ldda_id=None, **kwd ): params = util.Params( kwd ) @@ -2064,6 +2170,31 @@ if library.root_folder == folder: return library return None +def get_comptypes( trans ): + comptypes_t = comptypes + if trans.app.config.nginx_x_archive_files_base: + comptypes_t = ['ngxzip'] + for comptype in trans.app.config.disable_library_comptypes: + # TODO: do this once, not every time (we're gonna raise an + # exception every time after the first time) + try: + comptypes_t.remove( comptype ) + except: + pass + return comptypes_t +def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ): + is_admin = trans.user_is_admin() and cntrller == 'library_admin' + if is_admin: + accessible_items = items + else: + # Enforce access permission settings + current_user_roles = trans.get_current_user_roles() + accessible_items = [] + for item in items: + if trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ): + accessible_items.append( item ) + # Sort by name + return sort_by_attr( [ item for item in accessible_items ], sort_attr ) def sort_by_attr( seq, attr ): """ Sort the sequence of objects by object's attribute @@ -2079,25 +2210,12 @@ intermed = map( None, map( getattr, seq, ( attr, ) * len( seq ) ), xrange( len( seq ) ), seq ) intermed.sort() return map( operator.getitem, intermed, ( -1, ) * len( intermed ) ) -def get_sorted_accessible_library_items( trans, cntrller, items, sort_attr ): - is_admin = trans.user_is_admin() and cntrller == 'library_admin' - if is_admin: - accessible_items = items - else: - # Enforce access permission settings - current_user_roles = trans.get_current_user_roles() - accessible_items = [] - for item in items: - if trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user ): - accessible_items.append( item ) - # Sort by name - return sort_by_attr( [ item for item in accessible_items ], sort_attr ) def lucene_search( trans, cntrller, search_term, search_url, **kwd ): """Return display of results from a full-text lucene search of data libraries.""" params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) - full_url = "%s?%s" % ( search_url, urllib.urlencode( { "kwd" : search_term } ) ) + full_url = "%s/find?%s" % ( search_url, urllib.urlencode( { "kwd" : search_term } ) ) response = urllib2.urlopen( full_url ) ldda_ids = util.json.from_json_string( response.read() )[ "ids" ] response.close() @@ -2115,17 +2233,20 @@ if index_exists: index = whoosh.index.open_dir( whoosh_index_dir ) # Set field boosts for searcher to place equal weight on all search fields. - searcher = index.searcher( weighting=BM25F( field_B={ 'name_B' : 3.5, - 'info_B' : 2.3, - 'dbkey_B' : 3.1, - 'message_B' : 2.1, - 'state_B' : 1.2 } ) ) + searcher = index.searcher( weighting=BM25F( field_B={ 'name_B' : 3.4, + 'info_B' : 3.2, + 'dbkey_B' : 3.3, + 'message_B' : 3.5 } ) ) # Perform search - parser = MultifieldParser( [ 'name', 'info', 'dbkey', 'message', 'state' ], schema=schema ) + parser = MultifieldParser( [ 'name', 'info', 'dbkey', 'message' ], schema=schema ) # Search term with wildcards may be slow... - results = searcher.search( parser.parse( '*' + search_term + '*' ), minscore=1.0 ) + results = searcher.search( parser.parse( '*' + search_term + '*' ), minscore=0.1 ) ldda_ids = [ result[ 'id' ] for result in results ] - lddas = [ trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) for ldda_id in ldda_ids ] + lddas = [] + for ldda_id in ldda_ids: + ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id ) + if ldda: + lddas.append( ldda ) lddas = get_sorted_accessible_library_items( trans, cntrller, lddas, 'name' ) else: message = "Tell your Galaxy administrator that the directory %s does not contain valid whoosh indexes" % str( whoosh_index_dir ) --- a/scripts/data_libraries/build_lucene_index.py Mon Jan 31 12:57:42 2011 -0500 +++ b/scripts/data_libraries/build_lucene_index.py Mon Jan 31 16:41:03 2011 -0500 @@ -30,7 +30,7 @@ sa_session, gconfig = get_sa_session( ini_file ) max_size = float( gconfig.get( "fulltext_max_size", 100 ) ) * 1048576 ignore_exts = gconfig.get( "fulltext_noindex_filetypes", "" ).split( "," ) - search_url = gconfig.get( "fulltext_index_url", None ) + search_url = gconfig.get( "fulltext_url", None ) if not search_url: raise ValueError( "Need to specify search functionality in universe_wsgi.ini" ) dataset_file = create_dataset_file( get_lddas( sa_session, max_size, ignore_exts ) ) @@ -41,7 +41,7 @@ os.remove( dataset_file ) def build_index( search_url, dataset_file ): - url = "%s?%s" % ( search_url, urllib.urlencode( { "docfile" : dataset_file } ) ) + url = "%s/index?%s" % ( search_url, urllib.urlencode( { "docfile" : dataset_file } ) ) request = urllib2.Request( url ) request.get_method = lambda: "PUT" response = urllib2.urlopen( request ) @@ -57,21 +57,35 @@ def get_lddas( sa_session, max_size, ignore_exts ): for ldda in sa_session.query( model.LibraryDatasetDatasetAssociation ).filter_by( deleted=False ): - id = ldda.id - name = ldda.name - info = ldda.library_dataset.get_info() - if info and not info.startswith( 'upload' ): - info = info.replace( 'no info', '' ) - else: - info = '' - dbkey = ldda.metadata.dbkey - state = ldda.state - message = ldda.message if ( float( ldda.dataset.get_size() ) > max_size or ldda.extension in ignore_exts ): fname = "" else: fname = ldda.dataset.get_file_name() - yield id, name, info, dbkey, state, message, fname + yield ldda.id, fname, _get_dataset_metadata(ldda) + +def _get_dataset_metadata(ldda): + """Retrieve descriptions and information associated with a dataset. + """ + lds = ldda.library_dataset + folder_info = _get_folder_info(lds.folder) + lds_info = lds.get_info() + if lds_info and not lds_info.startswith("upload"): + lds_info = lds_info.replace("no info", "") + else: + lds_info = "" + return "%s %s %s %s %s" % (lds.name or "", lds_info, ldda.metadata.dbkey, + ldda.message, folder_info) + +def _get_folder_info(folder): + """Get names and descriptions for all parent folders except top level. + """ + folder_info = "" + if folder and folder.parent: + folder_info = _get_folder_info(folder.parent) + folder_info += " %s %s" % ( + folder.name.replace("Unnamed folder", ""), + folder.description or "") + return folder_info def get_sa_session( ini_file ): conf_parser = ConfigParser.ConfigParser( { 'here':os.getcwd() } ) --- a/scripts/data_libraries/build_whoosh_index.py Mon Jan 31 12:57:42 2011 -0500 +++ b/scripts/data_libraries/build_whoosh_index.py Mon Jan 31 16:41:03 2011 -0500 @@ -24,7 +24,7 @@ from whoosh.fields import Schema, STORED, ID, KEYWORD, TEXT from whoosh.index import Index whoosh_search_enabled = True - schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT, state=TEXT ) + schema = Schema( id=STORED, name=TEXT, info=TEXT, dbkey=TEXT, message=TEXT ) import galaxy.model.mapping from galaxy import config, model import pkg_resources @@ -43,13 +43,12 @@ else: return a_basestr lddas_indexed = 0 - for id, name, info, dbkey, state, message in get_lddas( sa_session ): + for id, name, info, dbkey, message in get_lddas( sa_session ): writer.add_document( id=id, name=to_unicode( name ), info=to_unicode( info ), dbkey=to_unicode( dbkey ), - message=to_unicode( message ), - state=to_unicode( state ) ) + message=to_unicode( message ) ) lddas_indexed += 1 writer.commit() print "Number of active library datasets indexed: ", lddas_indexed @@ -64,9 +63,8 @@ else: info = '' dbkey = ldda.metadata.dbkey - state = ldda.state message = ldda.message - yield id, name, info, dbkey, state, message + yield id, name, info, dbkey, message def get_sa_session_and_needed_config_settings( ini_file ): conf_parser = ConfigParser.ConfigParser( { 'here' : os.getcwd() } ) --- a/templates/library/common/browse_library.mako Mon Jan 31 12:57:42 2011 -0500 +++ b/templates/library/common/browse_library.mako Mon Jan 31 16:41:03 2011 -0500 @@ -1,6 +1,7 @@ <%namespace file="/message.mako" import="render_msg" /><%namespace file="/library/common/library_item_info.mako" import="render_library_item_info" /><%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" /> +<%namespace file="/library/common/common.mako" import="common_javascripts" /><%! def inherit(context): @@ -46,6 +47,7 @@ <%def name="javascripts()"> ${parent.javascripts()} ${h.js("class", "jquery.jstore")} + ${common_javascripts()} ${self.grid_javascripts()} </%def> @@ -152,15 +154,6 @@ }); }); - function checkForm() { - if ( $("select#action_on_datasets_select option:selected").text() == "delete" ) { - if ( confirm( "Click OK to delete these datasets?" ) ) { - return true; - } else { - return false; - } - } - } // Looks for changes in dataset state using an async request. Keeps // calling itself (via setTimeout) until all datasets are in a terminal // state. @@ -212,7 +205,7 @@ </script></%def> -<%def name="render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False, render_checkboxes=True )"> +<%def name="render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False )"><% ## The received ldda must always be a LibraryDatasetDatasetAssociation object. The object id passed to methods ## from the drop down menu should be the ldda id to prevent id collision ( which could happen when displaying @@ -248,13 +241,11 @@ %endif id="libraryItem-${ldda.id}"><td style="padding-left: ${pad+20}px;"> - %if render_checkboxes: - <input style="float: left;" type="checkbox" name="ldda_ids" value="${trans.security.encode_id( ldda.id )}" - %if selected: - checked="checked" - %endif - /> + <input style="float: left;" type="checkbox" name="ldda_ids" value="${trans.security.encode_id( ldda.id )}" + %if selected: + checked="checked" %endif + /> %if ldda.library_dataset.deleted: <span class="libraryItem-error"> %endif @@ -288,7 +279,7 @@ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a> %endif %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data: - <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a> + <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a><a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a> %endif %if can_modify: @@ -379,6 +370,7 @@ <a class="action-button" href="${h.url_for( controller='library_common', action='create_folder', cntrller=cntrller, parent_id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add sub-folder</a> %endif %if not branch_deleted( folder ): + <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select folder datasets for import into selected histories</a> %if can_modify: <a class="action-button" href="${h.url_for( controller='library_common', action='folder_info', cntrller=cntrller, id=trans.security.encode_id( folder.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a> %else: @@ -515,6 +507,7 @@ %endif <a class="action-button" href="${h.url_for( controller='library_common', action='library_permissions', cntrller=cntrller, id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a> %endif + <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select datasets for import into selected histories</a> %elif can_modify and not library.purged: <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library.id ), item_type='library', use_panels=use_panels )}">Undelete this data library</a> %elif library.purged: @@ -539,7 +532,10 @@ <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid"><thead><tr class="libraryTitle"> - <th>Name</th> + <th> + <input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/> + Name + </th><th>Message</th><th>Uploaded By</th><th>Date</th> --- a/templates/library/common/common.mako Mon Jan 31 12:57:42 2011 -0500 +++ b/templates/library/common/common.mako Mon Jan 31 16:41:03 2011 -0500 @@ -1,5 +1,48 @@ <%namespace file="/common/template_common.mako" import="render_template_field" /> +<%def name="common_javascripts()"> + <script type="text/javascript"> + function checkAllFields() + { + var chkAll = document.getElementById('checkAll'); + var checks = document.getElementsByTagName('input'); + var boxLength = checks.length; + var allChecked = false; + var totalChecked = 0; + if ( chkAll.checked == true ) + { + for ( i=0; i < boxLength; i++ ) + { + if ( checks[i].name.indexOf( 'ldda_ids' ) != -1) + { + checks[i].checked = true; + } + } + } + else + { + for ( i=0; i < boxLength; i++ ) + { + if ( checks[i].name.indexOf( 'ldda_ids' ) != -1) + { + checks[i].checked = false + } + } + } + } + + function checkForm() { + if ( $("select#action_on_datasets_select option:selected").text() == "delete" ) { + if ( confirm( "Click OK to delete these datasets?" ) ) { + return true; + } else { + return false; + } + } + } + </script> +</%def> + <%def name="render_upload_form( cntrller, upload_option, action, library_id, folder_id, replace_dataset, file_formats, dbkeys, space_to_tab, link_data_only, widgets, roles_select_list, history, show_deleted )"><% import os, os.path @@ -319,26 +362,34 @@ %endif </%def> -<%def name="render_actions_on_multiple_items()"> +<%def name="render_actions_on_multiple_items( actions_to_exclude=[] )"> + <% + is_admin = trans.user_is_admin() and cntrller=='library_admin' + can_delete = 'delete' not in actions_to_exclude and is_admin + can_download = 'download' not in actions_to_exclude + can_import_to_histories = 'import_to_histories' not in actions_to_exclude + can_manage_permissions = 'manage_permissions' not in actions_to_exclude + %><tfoot><tr><td colspan="5" style="padding-left: 42px;"> For selected items: <select name="do_action" id="action_on_selected_items"> - %if ( trans.user_is_admin() and cntrller=='library_admin' ): - <option value="manage_permissions">Edit permissions</option> - <option value="delete">Delete</option> - %elif cntrller in ['library', 'library_search']: - %if default_action == 'add': - <option value="add" selected>Import into your current history</option> + %if can_import_to_histories: + %if not is_admin and default_action == 'import_to_histories': + <option value="import_to_histories" selected>Import selected datasets to histories</option> %else: - <option value="add">Import into your current history</option> + <option value="import_to_histories">Import selected datasets to histories</option> %endif - %if default_action == 'manage_permissions': + %endif + %if can_manage_permissions: + %if not is_admin and default_action == 'manage_permissions': <option value="manage_permissions" selected>Edit permissions</option> - # This condition should not contain an else clause because the user is not authorized - # to manage dataset permissions unless the default action is 'manage_permissions' + %else: + <option value="manage_permissions">Edit permissions</option> %endif + %endif + %if can_download: %if 'gz' in comptypes: <option value="tgz" %if default_action == 'download': @@ -361,6 +412,9 @@ >Download as a .zip file</option> %endif %endif + %if can_delete: + <option value="delete">Delete</option> + %endif </select><input type="submit" class="primary-button" name="action_on_datasets_button" id="action_on_datasets_button" value="Go"/></td> --- a/templates/library/common/ldda_info.mako Mon Jan 31 12:57:42 2011 -0500 +++ b/templates/library/common/ldda_info.mako Mon Jan 31 16:41:03 2011 -0500 @@ -66,7 +66,7 @@ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( ldda.library_dataset.folder.id ), replace_id=trans.security.encode_id( ldda.library_dataset.id ) )}">Upload a new version of this dataset</a> %endif %if cntrller=='library' and ldda.has_data(): - <a class="action-button" href="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), do_action='import_to_history', use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into your current history</a> + <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Select histories to import this dataset</a><a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Download this dataset</a> %endif %if show_associated_hdas_and_lddas: --- a/templates/library/common/library_dataset_search_results.mako Mon Jan 31 12:57:42 2011 -0500 +++ b/templates/library/common/library_dataset_search_results.mako Mon Jan 31 16:41:03 2011 -0500 @@ -1,5 +1,7 @@ <%namespace file="/message.mako" import="render_msg" /><%namespace file="/library/common/browse_library.mako" import="render_dataset" /> +<%namespace file="/library/common/common.mako" import="render_actions_on_multiple_items" /> +<%namespace file="/library/common/common.mako" import="common_javascripts" /><%! def inherit(context): @@ -44,6 +46,7 @@ <%def name="javascripts()"> ${parent.javascripts()} ${h.js("class", "jquery.jstore")} + ${common_javascripts()} </%def><%def name="render_searched_components()"> @@ -52,7 +55,6 @@ <li>info</li><li>dbkey (genome build)</li><li>message</li> - <li>state</li> %if trans.app.config.enable_lucene_library_search: <li>disk file content</li> %endif @@ -91,34 +93,69 @@ %if lddas: <p>The string "${search_term}" was found in at least one of the following information components of the displayed library datasets.</p> ${render_searched_components()} - <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid"> - <thead> - <tr class="libraryTitle"> - <th>Name</th> - <th>Message</th> - <th>Uploaded By</th> - <th>Date</th> - <th>File Size</th> - </tr> - </thead> - <% - tracked_datasets = {} - row_counter = RowCounter() - my_row = row_counter.count - %> - %for ldda in lddas: + <form name="act_on_multiple_datasets" action="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, use_panels=use_panels, show_deleted=show_deleted )}" onSubmit="javascript:return checkForm();" method="post"> + <table cellspacing="0" cellpadding="0" border="0" width="100%" class="grid" id="library-grid"> + <thead> + <tr class="libraryTitle"> + <th> + <input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/> + Name + </th> + <th>Message</th> + <th>Uploaded By</th> + <th>Date</th> + <th>File Size</th> + </tr> + </thead><% - library_dataset = ldda.library_dataset - folder = library_dataset.folder - library = folder.parent_library + tracked_datasets = {} + row_counter = RowCounter() + my_row = row_counter.count %> - ${render_dataset( cntrller, ldda, library_dataset, False, library, folder, 0, my_row, row_counter, tracked_datasets, show_deleted=False, render_checkboxes=False )} - <% - my_row = row_counter.count - row_counter.increment() - %> - %endfor - </table> + %for ldda in lddas: + <% + library_dataset = ldda.library_dataset + folder = library_dataset.folder + library = folder.parent_library + %> + ${render_dataset( cntrller, ldda, library_dataset, False, library, folder, 0, my_row, row_counter, tracked_datasets, show_deleted=False )} + <% + my_row = row_counter.count + row_counter.increment() + %> + %endfor + ${render_actions_on_multiple_items( actions_to_exclude=[ 'manage_permissions' ] )} + </table> + </form> + ## Help about compression types + + <div class="libraryItemBody"> + <p class="infomark"> + TIP: You can download individual library files by selecting "Download this dataset" from the context menu (triangle) next to the dataset's name. + </p> + </div> + %if len( comptypes ) > 1: + <div class="libraryItemBody"> + <p class="infomark"> + TIP: Multiple compression options are available for downloading library datasets: + </p> + <ul style="padding-left: 1em; list-style-type: disc;"> + %if 'gz' in comptypes: + <li>gzip: Recommended for fast network connections + %if trans.app.config.upstream_gzip: + NOTE: The file you receive will be an uncompressed .tar file - this is because the Galaxy server compresses it and your browser decompresses it on the fly. + %endif + </li> + %endif + %if 'bz2' in comptypes: + <li>bzip2: Recommended for slower network connections (smaller size but takes longer to compress)</li> + %endif + %if 'zip' in comptypes: + <li>zip: Not recommended but is provided as an option for those who cannot open the above formats</li> + %endif + </ul> + </div> + %endif %elif status != 'error': <p>The string "${search_term}" was not found in any of the following information components for any library datasets that you can access.</p> ${render_searched_components()} --- a/test/base/twilltestcase.py Mon Jan 31 12:57:42 2011 -0500 +++ b/test/base/twilltestcase.py Mon Jan 31 16:41:03 2011 -0500 @@ -2106,6 +2106,13 @@ % ( self.url, cntrller, library_id, ldda_ids, do_action ) ) for check_str in strings_displayed: self.check_page_for_string( check_str ) + def import_datasets_to_histories( self, cntrller, library_id, ldda_ids='', new_history_name='Unnamed history', strings_displayed=[] ): + # Can't use the ~/library_admin/libraries form as twill barfs on it so we'll simulate the form submission + # by going directly to the form action + self.visit_url( '%s/library_common/import_datasets_to_histories?cntrller=%s&library_id=%s&ldda_ids=%s&new_history_name=%s&import_datasets_to_histories_button=Import+library+datasets' \ + % ( self.url, cntrller, library_id, ldda_ids, new_history_name ) ) + for check_str in strings_displayed: + self.check_page_for_string( check_str ) def download_archive_of_library_files( self, cntrller, library_id, ldda_ids, format ): self.home() # Here it would be ideal to have twill set form values and submit the form, but --- a/test/functional/test_library_security.py Mon Jan 31 12:57:42 2011 -0500 +++ b/test/functional/test_library_security.py Mon Jan 31 16:41:03 2011 -0500 @@ -296,11 +296,11 @@ strings_displayed=[ '2.bed', 'This is the latest version of this library dataset', 'Edit attributes of 2.bed' ] ) - self.act_on_multiple_datasets( 'library', - self.security.encode_id( library1.id ), - 'import_to_history', - ldda_ids=self.security.encode_id( ldda2.id ), - strings_displayed=[ '1 dataset(s) have been imported into your history' ] ) + self.import_datasets_to_histories( cntrller='library', + library_id=self.security.encode_id( library1.id ), + ldda_ids=self.security.encode_id( ldda2.id ), + new_history_name='goodbye', + strings_displayed=[ '1 dataset have been imported into 1 history' ] ) self.logout() # regular_user2 should not be able to see ldda2 self.login( email=regular_user2.email ) @@ -382,10 +382,10 @@ def check_edit_page( lddas, strings_displayed=[], strings_not_displayed=[] ): for ldda in lddas: # Import each library dataset into our history - self.act_on_multiple_datasets( 'library', - self.security.encode_id( library1.id ), - 'import_to_history', - ldda_ids=self.security.encode_id( ldda.id ) ) + self.import_datasets_to_histories( cntrller='library', + library_id=self.security.encode_id( library1.id ), + ldda_ids=self.security.encode_id( ldda.id ), + new_history_name='hello' ) # Determine the new HistoryDatasetAssociation id created when the library dataset was imported into our history last_hda_created = get_latest_hda() self.edit_hda_attribute_info( str( last_hda_created.id ), @@ -393,8 +393,8 @@ # admin_user is associated with role1, so should have all permissions on imported datasets check_edit_page( latest_3_lddas, strings_displayed=[ 'Manage dataset permissions on', - 'can manage the roles associated with permissions on this dataset', - 'can import this dataset into their history for analysis' ] ) + 'can manage the roles associated with permissions on this dataset', + 'can import this dataset into their history for analysis' ] ) self.logout() # regular_user1 is associated with role1, so should have all permissions on imported datasets self.login( email=regular_user1.email ) --- a/tools/data_source/access_libraries.xml Mon Jan 31 12:57:42 2011 -0500 +++ b/tools/data_source/access_libraries.xml Mon Jan 31 16:41:03 2011 -0500 @@ -2,7 +2,7 @@ <tool name="Access Libraries" id="library_access1"><description>stored locally</description><inputs action="/library/index" method="get" target="_parent"> - <param name="default_action" type="hidden" value="add" /> + <param name="default_action" type="hidden" value="import_to_histories" /></inputs><uihints minwidth="800"/></tool> --- a/universe_wsgi.ini.sample Mon Jan 31 12:57:42 2011 -0500 +++ b/universe_wsgi.ini.sample Mon Jan 31 16:41:03 2011 -0500 @@ -337,18 +337,17 @@ #transfer_manager_port = 8163 # Search data libraries with whoosh -enable_whoosh_library_search = True +#enable_whoosh_library_search = True # Whoosh indexes are stored in this directory. -whoosh_index_dir = database/whoosh_indexes +#whoosh_index_dir = database/whoosh_indexes # Search data libraries with lucene #enable_lucene_library_search = False # maxiumum file size to index for searching, in MB #fulltext_max_size = 500 #fulltext_noindex_filetypes=bam,sam,wig,bigwig,fasta,fastq,fastqsolexa,fastqillumina,fastqsanger -# base URL of server providing search functionality using whoosh. -#fulltext_index_url = http://localhost:8081/index -#fulltext_find_url = http://localhost:8081/find +# base URL of server providing search functionality using lucene +#fulltext_url = http://localhost:8081 # -- Users and Security Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket