details: http://www.bx.psu.edu/hg/galaxy/rev/30a175ace572 changeset: 2794:30a175ace572 user: Nate Coraor <nate@bx.psu.edu> date: Tue Sep 29 12:17:57 2009 -0400 description: Asynchronous library upload. 11 file(s) affected in this change: lib/galaxy/tools/actions/upload.py lib/galaxy/tools/actions/upload_common.py lib/galaxy/web/controllers/library.py lib/galaxy/web/controllers/library_admin.py lib/galaxy/web/controllers/library_dataset.py lib/galaxy/web/controllers/tool_runner.py static/june_2007_style/blue/library.css static/june_2007_style/library.css.tmpl templates/base_panels.mako templates/library/library_dataset_common.mako templates/library/library_item_info.mako diffs (678 lines): diff -r 389226e41780 -r 30a175ace572 lib/galaxy/tools/actions/upload.py --- a/lib/galaxy/tools/actions/upload.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/tools/actions/upload.py Tue Sep 29 12:17:57 2009 -0400 @@ -15,14 +15,12 @@ precreated_datasets = upload_common.get_precreated_datasets( trans, incoming, trans.app.model.HistoryDatasetAssociation ) incoming = upload_common.persist_uploads( incoming ) - json_file_path, data_list = upload_common.create_paramfile( trans, incoming, precreated_datasets, dataset_upload_inputs ) + uploaded_datasets = upload_common.get_uploaded_datasets( trans, incoming, precreated_datasets, dataset_upload_inputs ) upload_common.cleanup_unused_precreated_datasets( precreated_datasets ) - if not data_list: - try: - os.remove( json_file_path ) - except: - pass + if not uploaded_datasets: return 'No data was entered in the upload form, please go back and choose data to upload.' + json_file_path = upload_common.create_paramfile( uploaded_datasets ) + data_list = [ ud.data for ud in uploaded_datasets ] return upload_common.create_job( trans, incoming, tool, json_file_path, data_list ) diff -r 389226e41780 -r 30a175ace572 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/tools/actions/upload_common.py Tue Sep 29 12:17:57 2009 -0400 @@ -31,7 +31,32 @@ params['files'] = new_files return params -def get_precreated_datasets( trans, params, data_obj ): +def handle_library_params( trans, params, folder_id, replace_dataset=None ): + library_bunch = util.bunch.Bunch() + library_bunch.replace_dataset = replace_dataset + library_bunch.message = params.get( 'message', '' ) + # See if we have any template field contents + library_bunch.template_field_contents = [] + template_id = params.get( 'template_id', None ) + library_bunch.folder = trans.app.model.LibraryFolder.get( folder_id ) + # We are inheriting the folder's info_association, so we did not + # receive any inherited contents, but we may have redirected here + # after the user entered template contents ( due to errors ). + if template_id not in [ None, 'None' ]: + library_bunch.template = trans.app.model.FormDefinition.get( template_id ) + for field_index in range( len( library_bunch.template.fields ) ): + field_name = 'field_%i' % field_index + if params.get( field_name, False ): + field_value = util.restore_text( params.get( field_name, '' ) ) + library_bunch.template_field_contents.append( field_value ) + else: + library_bunch.template = None + library_bunch.roles = [] + for role_id in util.listify( params.get( 'roles', [] ) ): + library_bunch.roles.append( trans.app.model.Role.get( role_id ) ) + return library_bunch + +def get_precreated_datasets( trans, params, data_obj, controller='root' ): """ Get any precreated datasets (when using asynchronous uploads). """ @@ -54,7 +79,7 @@ else: rval.append( data ) elif data_obj is trans.app.model.LibraryDatasetDatasetAssociation: - if not trans.app.security_agent.can_add_library_item( user, roles, data.library_dataset.folder ): + if controller == 'library' and not trans.app.security_agent.can_add_library_item( user, roles, data.library_dataset.folder ): log.error( 'Got a precreated dataset (%s) but this user (%s) is not allowed to write to it' % ( data.id, user.id ) ) else: rval.append( data ) @@ -78,127 +103,143 @@ data.state = data.states.ERROR data.info = 'No file contents were available.' -def new_history_upload( trans, uploaded_dataset ): +def new_history_upload( trans, uploaded_dataset, state=None ): hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name, extension = uploaded_dataset.file_type, dbkey = uploaded_dataset.dbkey, history = trans.history, create_dataset = True ) - hda.state = hda.states.QUEUED + if state: + hda.state = state + else: + hda.state = hda.states.QUEUED hda.flush() trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey ) permissions = trans.app.security_agent.history_get_default_permissions( trans.history ) trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions ) return hda -def new_library_upload( trans, uploaded_dataset, replace_dataset, folder, - template, template_field_contents, roles, message ): - if replace_dataset: - ld = replace_dataset +def new_library_upload( trans, uploaded_dataset, library_bunch, state=None ): + user, roles = trans.get_user_and_roles() + if not ( trans.app.security_agent.can_add_library_item( user, roles, library_bunch.folder ) \ + or trans.user.email in trans.app.config.get( "admin_users", "" ).split( "," ) ): + # This doesn't have to be pretty - the only time this should happen is if someone's being malicious. + raise Exception( "User is not authorized to add datasets to this library." ) + if library_bunch.replace_dataset: + ld = library_bunch.replace_dataset else: - ld = trans.app.model.LibraryDataset( folder=folder, name=uploaded_dataset.name ) + ld = trans.app.model.LibraryDataset( folder=library_bunch.folder, name=uploaded_dataset.name ) ld.flush() - trans.app.security_agent.copy_library_permissions( folder, ld ) + trans.app.security_agent.copy_library_permissions( library_bunch.folder, ld ) ldda = trans.app.model.LibraryDatasetDatasetAssociation( name = uploaded_dataset.name, extension = uploaded_dataset.file_type, dbkey = uploaded_dataset.dbkey, library_dataset = ld, user = trans.user, create_dataset = True ) - ldda.state = ldda.states.QUEUED - ldda.message = message + if state: + ldda.state = state + else: + ldda.state = ldda.states.QUEUED + ldda.message = library_bunch.message ldda.flush() # Permissions must be the same on the LibraryDatasetDatasetAssociation and the associated LibraryDataset trans.app.security_agent.copy_library_permissions( ld, ldda ) - if replace_dataset: + if library_bunch.replace_dataset: # Copy the Dataset level permissions from replace_dataset to the new LibraryDatasetDatasetAssociation.dataset - trans.app.security_agent.copy_dataset_permissions( replace_dataset.library_dataset_dataset_association.dataset, ldda.dataset ) + trans.app.security_agent.copy_dataset_permissions( library_bunch.replace_dataset.library_dataset_dataset_association.dataset, ldda.dataset ) else: # Copy the current user's DefaultUserPermissions to the new LibraryDatasetDatasetAssociation.dataset trans.app.security_agent.set_all_dataset_permissions( ldda.dataset, trans.app.security_agent.user_get_default_permissions( trans.user ) ) - folder.add_library_dataset( ld, genome_build=uploaded_dataset.dbkey ) - folder.flush() + library_bunch.folder.add_library_dataset( ld, genome_build=uploaded_dataset.dbkey ) + library_bunch.folder.flush() ld.library_dataset_dataset_association_id = ldda.id ld.flush() # Handle template included in the upload form, if any - if template and template_field_contents: + if library_bunch.template and library_bunch.template_field_contents: # Since information templates are inherited, the template fields can be displayed on the upload form. # If the user has added field contents, we'll need to create a new form_values and info_association # for the new library_dataset_dataset_association object. # Create a new FormValues object, using the template we previously retrieved - form_values = trans.app.model.FormValues( template, template_field_contents ) + form_values = trans.app.model.FormValues( library_bunch.template, library_bunch.template_field_contents ) form_values.flush() # Create a new info_association between the current ldda and form_values - info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( ldda, template, form_values ) + info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( ldda, library_bunch.template, form_values ) info_association.flush() # If roles were selected upon upload, restrict access to the Dataset to those roles - if roles: - for role in roles: + if library_bunch.roles: + for role in library_bunch.roles: dp = trans.app.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS.action, ldda.dataset, role ) dp.flush() return ldda -def create_paramfile( trans, params, precreated_datasets, dataset_upload_inputs, - replace_dataset=None, folder=None, template=None, - template_field_contents=None, roles=None, message=None ): +def new_upload( trans, uploaded_dataset, library_bunch=None, state=None ): + if library_bunch: + return new_library_upload( trans, uploaded_dataset, library_bunch, state ) + else: + return new_history_upload( trans, uploaded_dataset, state ) + +def get_uploaded_datasets( trans, params, precreated_datasets, dataset_upload_inputs, library_bunch=None ): + uploaded_datasets = [] + for dataset_upload_input in dataset_upload_inputs: + uploaded_datasets.extend( dataset_upload_input.get_uploaded_datasets( trans, params ) ) + for uploaded_dataset in uploaded_datasets: + data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name ) + if not data: + data = new_upload( trans, uploaded_dataset, library_bunch ) + else: + data.extension = uploaded_dataset.file_type + data.dbkey = uploaded_dataset.dbkey + data.flush() + if library_bunch: + library_bunch.folder.genome_build = uploaded_dataset.dbkey + library_bunch.folder.flush() + else: + trans.history.genome_build = uploaded_dataset.dbkey + uploaded_dataset.data = data + return uploaded_datasets + +def create_paramfile( uploaded_datasets ): """ Create the upload tool's JSON "param" file. """ - data_list = [] json_file = tempfile.mkstemp() json_file_path = json_file[1] json_file = os.fdopen( json_file[0], 'w' ) - for dataset_upload_input in dataset_upload_inputs: - uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, params ) - for uploaded_dataset in uploaded_datasets: - data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name ) - if not data: - if folder: - data = new_library_upload( trans, uploaded_dataset, replace_dataset, folder, template, template_field_contents, roles, message ) - else: - data = new_history_upload( trans, uploaded_dataset ) - else: - data.extension = uploaded_dataset.file_type - data.dbkey = uploaded_dataset.dbkey - data.flush() - if folder: - folder.genome_build = uploaded_dataset.dbkey - folder.flush() - else: - trans.history.genome_build = uploaded_dataset.dbkey - if uploaded_dataset.type == 'composite': - # we need to init metadata before the job is dispatched - data.init_meta() - for meta_name, meta_value in uploaded_dataset.metadata.iteritems(): - setattr( data.metadata, meta_name, meta_value ) - data.flush() - json = dict( file_type = uploaded_dataset.file_type, - dataset_id = data.dataset.id, - dbkey = uploaded_dataset.dbkey, - type = uploaded_dataset.type, - metadata = uploaded_dataset.metadata, - primary_file = uploaded_dataset.primary_file, - extra_files_path = data.extra_files_path, - composite_file_paths = uploaded_dataset.composite_files, - composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) ) - else: - try: - is_binary = uploaded_dataset.datatype.is_binary - except: - is_binary = None - json = dict( file_type = uploaded_dataset.file_type, - ext = uploaded_dataset.ext, - name = uploaded_dataset.name, - dataset_id = data.dataset.id, - dbkey = uploaded_dataset.dbkey, - type = uploaded_dataset.type, - is_binary = is_binary, - space_to_tab = uploaded_dataset.space_to_tab, - path = uploaded_dataset.path ) - json_file.write( to_json_string( json ) + '\n' ) - data_list.append( data ) + for uploaded_dataset in uploaded_datasets: + data = uploaded_dataset.data + if uploaded_dataset.type == 'composite': + # we need to init metadata before the job is dispatched + data.init_meta() + for meta_name, meta_value in uploaded_dataset.metadata.iteritems(): + setattr( data.metadata, meta_name, meta_value ) + data.flush() + json = dict( file_type = uploaded_dataset.file_type, + dataset_id = data.dataset.id, + dbkey = uploaded_dataset.dbkey, + type = uploaded_dataset.type, + metadata = uploaded_dataset.metadata, + primary_file = uploaded_dataset.primary_file, + extra_files_path = data.extra_files_path, + composite_file_paths = uploaded_dataset.composite_files, + composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) ) + else: + try: + is_binary = uploaded_dataset.datatype.is_binary + except: + is_binary = None + json = dict( file_type = uploaded_dataset.file_type, + ext = uploaded_dataset.ext, + name = uploaded_dataset.name, + dataset_id = data.dataset.id, + dbkey = uploaded_dataset.dbkey, + type = uploaded_dataset.type, + is_binary = is_binary, + space_to_tab = uploaded_dataset.space_to_tab, + path = uploaded_dataset.path ) + json_file.write( to_json_string( json ) + '\n' ) json_file.close() - return ( json_file_path, data_list ) + return json_file_path def create_job( trans, params, tool, json_file_path, data_list, folder=None ): """ diff -r 389226e41780 -r 30a175ace572 lib/galaxy/web/controllers/library.py --- a/lib/galaxy/web/controllers/library.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/web/controllers/library.py Tue Sep 29 12:17:57 2009 -0400 @@ -114,8 +114,11 @@ messagetype='error' ) ) created_ldda_ids = params.get( 'created_ldda_ids', '' ) hidden_folder_ids = util.listify( util.restore_text( params.get( 'hidden_folder_ids', '' ) ) ) + if created_ldda_ids and not msg: + msg = "%d datasets are now uploading in the background to the library '%s' ( each is selected ). Please do not navigate away from Galaxy or use the browser's \"stop\" or \"reload\" buttons (on this tab) until the upload(s) change from the \"uploading\" state." % ( len( created_ldda_ids.split(',') ), library.name ) + messagetype = "info" return trans.fill_template( '/library/browse_library.mako', - library=trans.app.model.Library.get( id ), + library=library, created_ldda_ids=created_ldda_ids, hidden_folder_ids=hidden_folder_ids, default_action=params.get( 'default_action', None ), @@ -716,7 +719,7 @@ messagetype=messagetype ) if trans.app.security_agent.can_add_library_item( user, roles, folder ) or \ ( replace_dataset and trans.app.security_agent.can_modify_library_item( user, roles, replace_dataset ) ): - if params.get( 'runtool_btn', False ): + if params.get( 'runtool_btn', False ) or params.get( 'ajax_upload', False ): # See if we have any inherited templates, but do not inherit contents. info_association, inherited = folder.get_info_association( inherited=True ) if info_association: diff -r 389226e41780 -r 30a175ace572 lib/galaxy/web/controllers/library_admin.py --- a/lib/galaxy/web/controllers/library_admin.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/web/controllers/library_admin.py Tue Sep 29 12:17:57 2009 -0400 @@ -49,8 +49,11 @@ msg=util.sanitize_text( msg ), messagetype='error' ) ) created_ldda_ids = params.get( 'created_ldda_ids', '' ) + if created_ldda_ids and not msg: + msg = "%d datasets are now uploading in the background to the library '%s' ( each is selected ). Please do not navigate away from Galaxy or use the browser's \"stop\" or \"reload\" buttons (on this tab) until the upload(s) change from the \"uploading\" state." % ( len( created_ldda_ids.split(',') ), library.name ) + messagetype = "info" return trans.fill_template( '/admin/library/browse_library.mako', - library=trans.app.model.Library.get( id ), + library=library, deleted=deleted, created_ldda_ids=created_ldda_ids, forms=get_all_forms( trans, filter=dict(deleted=False) ), @@ -428,7 +431,7 @@ # The built-in 'id' is overwritten in lots of places as well ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ] ldatatypes.sort() - if params.get( 'runtool_btn', False ): + if params.get( 'runtool_btn', False ) or params.get( 'ajax_upload', False ): # See if we have any inherited templates, but do not inherit contents. info_association, inherited = folder.get_info_association( inherited=True ) if info_association: diff -r 389226e41780 -r 30a175ace572 lib/galaxy/web/controllers/library_dataset.py --- a/lib/galaxy/web/controllers/library_dataset.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/web/controllers/library_dataset.py Tue Sep 29 12:17:57 2009 -0400 @@ -32,7 +32,6 @@ #"force_history_refresh": force_history_refresh } return rval - @web.expose def upload_dataset( self, trans, controller, library_id, folder_id, replace_dataset=None, **kwd ): # Set up the traditional tool state/params tool_id = 'upload1' @@ -46,6 +45,7 @@ dataset_upload_inputs.append( input ) # Library-specific params params = util.Params( kwd ) # is this filetoolparam safe? + library_bunch = upload_common.handle_library_params( trans, params, folder_id, replace_dataset ) msg = util.restore_text( params.get( 'msg', '' ) ) messagetype = params.get( 'messagetype', 'done' ) server_dir = util.restore_text( params.get( 'server_dir', '' ) ) @@ -53,25 +53,8 @@ replace_id = replace_dataset.id else: replace_id = None - message = params.get( 'message', '' ) upload_option = params.get( 'upload_option', 'upload_file' ) err_redirect = False - # See if we have any template field contents - template_field_contents = [] - template_id = params.get( 'template_id', None ) - folder = trans.app.model.LibraryFolder.get( folder_id ) - # We are inheriting the folder's info_association, so we did not - # receive any inherited contents, but we may have redirected here - # after the user entered template contents ( due to errors ). - if template_id not in [ None, 'None' ]: - template = trans.app.model.FormDefinition.get( template_id ) - for field_index in range( len( template.fields ) ): - field_name = 'field_%i' % field_index - if params.get( field_name, False ): - field_value = util.restore_text( params.get( field_name, '' ) ) - template_field_contents.append( field_value ) - else: - template = None if upload_option == 'upload_directory': if server_dir in [ None, 'None', '' ]: err_redirect = True @@ -90,25 +73,18 @@ msg = 'Select a directory' else: msg = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc - roles = [] - for role_id in util.listify( params.get( 'roles', [] ) ): - roles.append( trans.app.model.Role.get( role_id ) ) # Proceed with (mostly) regular upload processing - precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.HistoryDatasetAssociation ) + precreated_datasets = upload_common.get_precreated_datasets( trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=controller ) if upload_option == 'upload_file': tool_params = upload_common.persist_uploads( tool_params ) - json_file_path, data_list = upload_common.create_paramfile( trans, tool_params, precreated_datasets, dataset_upload_inputs, replace_dataset, folder, template, template_field_contents, roles, message ) + uploaded_datasets = upload_common.get_uploaded_datasets( trans, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch ) elif upload_option == 'upload_directory': - json_file_path, data_list = self.create_server_dir_paramfile( trans, params, full_dir, import_dir_desc, folder, template, template_field_contents, roles, message, err_redirect, msg ) + uploaded_datasets = self.get_server_dir_uploaded_datasets( trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg ) upload_common.cleanup_unused_precreated_datasets( precreated_datasets ) - if upload_option == 'upload_file' and not data_list: + if upload_option == 'upload_file' and not uploaded_datasets: msg = 'Select a file, enter a URL or enter text' err_redirect = True if err_redirect: - try: - os.remove( json_file_path ) - except: - pass trans.response.send_redirect( web.url_for( controller=controller, action='library_dataset_dataset_association', library_id=library_id, @@ -117,12 +93,10 @@ upload_option=upload_option, msg=util.sanitize_text( msg ), messagetype='error' ) ) - return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=folder ) - def create_server_dir_paramfile( self, trans, params, full_dir, import_dir_desc, folder, template, - template_field_contents, roles, message, err_redirect, msg ): - """ - Create JSON param file for the upload tool when using the server_dir upload. - """ + json_file_path = upload_common.create_paramfile( uploaded_datasets ) + data_list = [ ud.data for ud in uploaded_datasets ] + return upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder ) + def get_server_dir_uploaded_datasets( self, trans, params, full_dir, import_dir_desc, library_bunch, err_redirect, msg ): files = [] try: for entry in os.listdir( full_dir ): @@ -132,34 +106,24 @@ except Exception, e: msg = "Unable to get file list for configured %s, error: %s" % ( import_dir_desc, str( e ) ) err_redirect = True - return ( None, None ) + return None if not files: msg = "The directory '%s' contains no valid files" % full_dir err_redirect = True - return ( None, None ) - data_list = [] - json_file = tempfile.mkstemp() - json_file_path = json_file[1] - json_file = os.fdopen( json_file[0], 'w' ) + return None + uploaded_datasets = [] for file in files: - full_file = os.path.join( full_dir, file ) - if not os.path.isfile( full_file ): + library_bunch.replace_dataset = None + uploaded_dataset = util.bunch.Bunch() + uploaded_dataset.path = os.path.join( full_dir, file ) + if not os.path.isfile( uploaded_dataset.path ): continue - uploaded_dataset = util.bunch.Bunch() + uploaded_dataset.type = 'server_dir' uploaded_dataset.name = file + uploaded_dataset.ext = None uploaded_dataset.file_type = params.file_type uploaded_dataset.dbkey = params.dbkey - data = upload_common.new_library_upload( trans, uploaded_dataset, None, folder, template, template_field_contents, roles, message ) - json = dict( file_type = uploaded_dataset.file_type, - ext = None, - name = uploaded_dataset.name, - dataset_id = data.dataset.id, - dbkey = uploaded_dataset.dbkey, - type = 'server_dir', - is_binary = None, - space_to_tab = params.space_to_tab, - path = full_file ) - json_file.write( to_json_string( json ) + '\n' ) - data_list.append( data ) - json_file.close() - return ( json_file_path, data_list ) + uploaded_dataset.space_to_tab = params.space_to_tab + uploaded_dataset.data = upload_common.new_upload( trans, uploaded_dataset, library_bunch ) + uploaded_datasets.append( uploaded_dataset ) + return uploaded_datasets diff -r 389226e41780 -r 30a175ace572 lib/galaxy/web/controllers/tool_runner.py --- a/lib/galaxy/web/controllers/tool_runner.py Mon Sep 28 18:57:25 2009 -0400 +++ b/lib/galaxy/web/controllers/tool_runner.py Tue Sep 29 12:17:57 2009 -0400 @@ -6,6 +6,7 @@ from galaxy.util.bunch import Bunch from galaxy.tools import DefaultToolState from galaxy.tools.parameters.basic import UnvalidatedValue +from galaxy.tools.actions import upload_common import logging log = logging.getLogger( __name__ ) @@ -137,20 +138,24 @@ Precreate datasets for asynchronous uploading. """ permissions = trans.app.security_agent.history_get_default_permissions( trans.history ) - def create_dataset( name, history ): - data = trans.app.model.HistoryDatasetAssociation( create_dataset = True ) - data.name = name - data.state = data.states.UPLOAD - data.history = history - data.flush() - history.add_dataset( data ) - trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions ) - return data + def create_dataset( name ): + ud = Bunch( name=name, file_type=None, dbkey=None ) + # Okay, time to make this crap actually use the upload_common functions, which means making them get called from outside the json_paramfile method. + if nonfile_params.get( 'folder_id', False ): + replace_id = nonfile_params.get( 'replace_id', None ) + if replace_id not in [ None, 'None' ]: + replace_dataset = trans.app.model.LibraryDataset.get( int( replace_id ) ) + else: + replace_dataset = None + library_bunch = upload_common.handle_library_params( trans, nonfile_params, nonfile_params.folder_id, replace_dataset ) + else: + library_bunch = None + return upload_common.new_upload( trans, ud, library_bunch=library_bunch, state=trans.app.model.HistoryDatasetAssociation.states.UPLOAD ) tool = self.get_toolbox().tools_by_id.get( tool_id, None ) if not tool: return False # bad tool_id - #params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool ) - if "tool_state" in kwd: + nonfile_params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool ) + if kwd.get( 'tool_state', None ) not in ( None, 'None' ): encoded_state = util.string_to_object( kwd["tool_state"] ) tool_state = DefaultToolState() tool_state.decode( encoded_state, tool, trans.app ) @@ -167,7 +172,7 @@ d_type = dataset_upload_input.get_datatype( trans, kwd ) if d_type.composite_type is not None: - datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ), trans.history ) ) + datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ) ) ) else: params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) if params.file_data not in [ None, "" ]: @@ -176,7 +181,7 @@ name = name.rsplit('/',1)[1] if name.count('\\'): name = name.rsplit('\\',1)[1] - datasets.append( create_dataset( name, trans.history ) ) + datasets.append( create_dataset( name ) ) if params.url_paste not in [ None, "" ]: url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) url = False @@ -186,13 +191,13 @@ continue elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): url = True - datasets.append( create_dataset( line, trans.history ) ) + datasets.append( create_dataset( line ) ) else: if url: continue # non-url when we've already processed some urls else: # pasted data - datasets.append( create_dataset( 'Pasted Entry', trans.history ) ) + datasets.append( create_dataset( 'Pasted Entry' ) ) break if datasets: trans.model.flush() diff -r 389226e41780 -r 30a175ace572 static/june_2007_style/blue/library.css --- a/static/june_2007_style/blue/library.css Mon Sep 28 18:57:25 2009 -0400 +++ b/static/june_2007_style/blue/library.css Tue Sep 29 12:17:57 2009 -0400 @@ -18,3 +18,4 @@ .libraryItem-error{margin-right:2px;padding:0 2px 0 2px;border:1px solid #AA6666;background:#FFCCCC;} .libraryItem-queued{margin-right:2px;padding:0 2px 0 2px;border:1px solid #888888;background:#EEEEEE;} .libraryItem-running{margin-right:2px;padding:0 2px 0 2px;border:1px solid #AAAA66;background:#FFFFCC;} +.libraryItem-upload{margin-right:2px;padding:0 2px 0 2px;border:1px solid #6666AA;background:#CCCCFF;} diff -r 389226e41780 -r 30a175ace572 static/june_2007_style/library.css.tmpl --- a/static/june_2007_style/library.css.tmpl Mon Sep 28 18:57:25 2009 -0400 +++ b/static/june_2007_style/library.css.tmpl Tue Sep 29 12:17:57 2009 -0400 @@ -109,3 +109,9 @@ background: $history_running_bg; } +.libraryItem-upload { + margin-right: 2px; + padding: 0 2px 0 2px; + border: 1px solid $history_upload_border; + background: $history_upload_bg; +} diff -r 389226e41780 -r 30a175ace572 templates/base_panels.mako --- a/templates/base_panels.mako Mon Sep 28 18:57:25 2009 -0400 +++ b/templates/base_panels.mako Tue Sep 29 12:17:57 2009 -0400 @@ -70,12 +70,28 @@ ## Handle AJAX (actually hidden iframe) upload tool <![if !IE]> <script type="text/javascript"> + var upload_form_error = function( msg ) { + if ( ! $("iframe#galaxy_main").contents().find("body").find("div[name='upload_error']").size() ) { + $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage" name="upload_error">' + msg + '</div><p/>' ); + } else { + $("iframe#galaxy_main").contents().find("body").find("div[name='upload_error']").text( msg ); + } + } jQuery( function() { $("iframe#galaxy_main").load( function() { $(this).contents().find("form").each( function() { if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){ $(this).submit( function() { - var error_set = false; + // Only bother using a hidden iframe if there's a file (e.g. big data) upload + var file_upload = false; + $(this).find("input[galaxy-ajax-upload]").each( function() { + if ( $(this).val() != '' ) { + file_upload = true; + } + }); + if ( ! file_upload ) { + return true; + } // Make a synchronous request to create the datasets first var async_datasets; $.ajax( { @@ -87,10 +103,7 @@ success: function( d, s ) { async_datasets = d.join() } } ); if (async_datasets == '') { - if (! error_set) { - $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' ); - error_set = true; - } + upload_form_error( 'No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.' ); return false; } else { $(this).find("input[name=async_datasets]").val( async_datasets ); @@ -98,7 +111,16 @@ } // iframe submit is required for nginx (otherwise the encoding is wrong) $(this).ajaxSubmit( { iframe: true } ); - $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}"); + if ( $(this).find("input[name='folder_id']").val() != undefined ) { + var library_id = $(this).find("input[name='library_id']").val(); + if ( location.pathname.indexOf( 'library_admin' ) ) { + $("iframe#galaxy_main").attr("src","${h.url_for(controller='library_admin', action='browse_library' )}?id=" + library_id + "&created_ldda_ids=" + async_datasets); + } else { + $("iframe#galaxy_main").attr("src","${h.url_for(controller='library', action='browse_library' )}?id=" + library_id + "&created_ldda_ids=" + async_datasets); + } + } else { + $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}"); + } return false; }); } diff -r 389226e41780 -r 30a175ace572 templates/library/library_dataset_common.mako --- a/templates/library/library_dataset_common.mako Mon Sep 28 18:57:25 2009 -0400 +++ b/templates/library/library_dataset_common.mako Tue Sep 29 12:17:57 2009 -0400 @@ -8,9 +8,10 @@ <div class="toolFormTitle">Upload a directory of files</div> %endif <div class="toolFormBody"> - <form name="upload_library_dataset" action="${h.url_for( controller=controller, action='library_dataset_dataset_association', library_id=library_id )}" enctype="multipart/form-data" method="post"> - <input type="hidden" name="tool_id" value="upload_library_dataset"/> + <form name="upload_library_dataset" action="${h.url_for( controller=controller, action='library_dataset_dataset_association' )}" enctype="multipart/form-data" method="post"> + <input type="hidden" name="tool_id" value="upload1"/> <input type="hidden" name="tool_state" value="None"/> + <input type="hidden" name="library_id" value="${library_id}"/> <input type="hidden" name="folder_id" value="${folder_id}"/> <input type="hidden" name="upload_option" value="${upload_option}"/> %if replace_dataset not in [ None, 'None' ]: @@ -40,8 +41,7 @@ <div class="form-row"> <label>File:</label> <div class="form-row-input"> - ##<input type="file" name="files_0|file_data" galaxy-ajax-upload="true"/> - <input type="file" name="files_0|file_data"/> + <input type="file" name="files_0|file_data" galaxy-ajax-upload="true"/> </div> <div style="clear: both"></div> </div> diff -r 389226e41780 -r 30a175ace572 templates/library/library_item_info.mako --- a/templates/library/library_item_info.mako Mon Sep 28 18:57:25 2009 -0400 +++ b/templates/library/library_item_info.mako Tue Sep 29 12:17:57 2009 -0400 @@ -5,6 +5,8 @@ <div class="libraryItem-${ldda.state}">This job is queued</div> %elif ldda.state == 'running': <div class="libraryItem-${ldda.state}">This job is running</div> + %elif ldda.state == 'upload': + <div class="libraryItem-${ldda.state}">This dataset is uploading</div> %else: ${ldda.message} %endif