[hg] galaxy 2433: Allow the uploading of composite datatypes. A...
details: http://www.bx.psu.edu/hg/galaxy/rev/73a8b43f1d97 changeset: 2433:73a8b43f1d97 user: Dan Blankenberg <dan@bx.psu.edu> date: Mon Jun 08 12:49:26 2009 -0400 description: Allow the uploading of composite datatypes. A new grouping parameter, UploadDataset, is used to contain and process the file_data/url_paste/space_to_tab used to upload a file - multiple sets are displayed when uploading a composite datatype (similar to a repeat). Composite files can now be declared to the datatypes registry (required for proper uploading), but they are stored in the same manner as before (the extra_files_path) and should be backwards compatible. When uploading a composite datatype, only one dataset can be uploaded at a time. The ability to upload multiple datasets (url_paste (contents or urls) + file_data) for non-composite datatypes remains unchanged. A more structured way of storing these files (rather than dumping in a directory) is worth considering. 15 file(s) affected in this change: lib/galaxy/datatypes/data.py lib/galaxy/datatypes/genetics.py lib/galaxy/datatypes/registry.py lib/galaxy/tools/__init__.py lib/galaxy/tools/actions/upload.py lib/galaxy/tools/parameters/__init__.py lib/galaxy/tools/parameters/basic.py lib/galaxy/tools/parameters/grouping.py lib/galaxy/tools/parameters/validation.py lib/galaxy/util/__init__.py lib/galaxy/web/controllers/tool_runner.py lib/galaxy/web/form_builder.py templates/base_panels.mako templates/tool_form.mako tools/data_source/upload.xml diffs (1641 lines): diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/data.py --- a/lib/galaxy/datatypes/data.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/datatypes/data.py Mon Jun 08 12:49:26 2009 -0400 @@ -1,5 +1,7 @@ import logging, os, sys, time, sets, tempfile from galaxy import util +from galaxy.util.odict import odict +from galaxy.util.bunch import Bunch from cgi import escape import metadata from metadata import MetadataElement #import directly to maintain ease of use in Datatype class definitions @@ -48,10 +50,16 @@ """If False, the peek is regenerated whenever a dataset of this type is copied""" copy_safe_peek = True + #Composite datatypes + composite_type = None + composite_files = odict() + primary_file_name = 'index' + def __init__(self, **kwd): """Initialize the datatype""" object.__init__(self, **kwd) self.supported_display_apps = self.supported_display_apps.copy() + self.composite_files = self.composite_files.copy() def write_from_stream(self, dataset, stream): """Writes data from a stream""" fd = open(dataset.file_name, 'wb') @@ -242,7 +250,49 @@ def after_edit( self, dataset ): """This function is called on the dataset after metadata is edited.""" dataset.clear_associated_files( metadata_safe = True ) - + def __new_composite_file( self, optional = False, mimetype = None, description = None, substitute_name_with_metadata = None, **kwds ): + kwds[ 'optional' ] = optional + kwds[ 'mimetype' ] = mimetype + kwds[ 'description' ] = description + kwds[ 'substitute_name_with_metadata' ] = substitute_name_with_metadata + return Bunch( **kwds ) + def add_composite_file( self, name, **kwds ): + #self.composite_files = self.composite_files.copy() + self.composite_files[ name ] = self.__new_composite_file( **kwds ) + + + def __substitute_composite_key( self, key, composite_file, dataset = None ): + if composite_file.substitute_name_with_metadata: + if dataset: + meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) ) + else: + meta_value = self.spec[composite_file.substitute_name_with_metadata].default + return key % meta_value + return key + @property + def writable_files( self, dataset = None ): + files = odict() + if self.composite_type != 'auto_primary_file': + files[ self.primary_file_name ] = self.__new_composite_file() + for key, value in self.get_composite_files( dataset = dataset ).iteritems(): + files[ key ] = value + return files + def get_composite_files( self, dataset = None ): + def substitute_composite_key( key, composite_file ): + if composite_file.substitute_name_with_metadata: + if dataset: + meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) ) + else: + meta_value = self.metadata_spec[ composite_file.substitute_name_with_metadata ].default + return key % meta_value + return key + files = odict() + for key, value in self.composite_files.iteritems(): + files[ substitute_composite_key( key, value ) ] = value + return files + def generate_auto_primary_file( self, dataset = None ): + raise Exception( "generate_auto_primary_file is not implemented for this datatype." ) + @property def has_resolution(self): return False diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/genetics.py --- a/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:49:26 2009 -0400 @@ -117,15 +117,26 @@ """class to use for rgenetics""" """Add metadata elements""" - MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", readonly=True) + MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="galaxy", readonly=True) file_ext="html" - + composite_type = 'auto_primary_file' + def missing_meta( self, dataset ): """Checks for empty meta values""" for key, value in dataset.metadata.items(): if not value: return True return False + + def generate_primary_file( self, dataset = None ): + rval = ['<html><head><title>Files for Composite Dataset (%s)</title></head><p/>This composite dataset is composed of the following files:<p/><ul>' % ( self.file_ext ) ] + for composite_name, composite_file in self.get_composite_files( dataset = dataset ).iteritems(): + opt_text = '' + if composite_file.optional: + opt_text = ' (optional)' + rval.append( '<li><a href="%s">%s</a>%s' % ( composite_name, composite_name, opt_text ) ) + rval.append( '</ul></html>' ) + return "\n".join( rval ) class SNPMatrix(Rgenetics): """fake class to distinguish different species of Rgenetics data collections @@ -148,6 +159,12 @@ """fake class to distinguish different species of Rgenetics data collections """ file_ext="lped" + + def __init__( self, **kwd ): + Rgenetics.__init__( self, **kwd ) + self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name' ) + class Pphe(Rgenetics): """fake class to distinguish different species of Rgenetics data collections @@ -180,11 +197,33 @@ """fake class to distinguish different species of Rgenetics data collections """ file_ext="pbed" + + def __init__( self, **kwd ): + Rgenetics.__init__( self, **kwd ) + self.add_composite_file( '%s.bim', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.bed', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' ) + class Eigenstratgeno(Rgenetics): """fake class to distinguish different species of Rgenetics data collections """ file_ext="eigenstratgeno" + + def __init__( self, **kwd ): + Rgenetics.__init__( self, **kwd ) + self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' ) + self.add_composite_file( '%s_fo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' ) + self.add_composite_file( '%s_fo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' ) + self.add_composite_file( '%s_fo.map', substitute_name_with_metadata = 'base_name', optional = 'True' ) + self.add_composite_file( '%s_oo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' ) + self.add_composite_file( '%s_oo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' ) + self.add_composite_file( '%s_oo.map', substitute_name_with_metadata = 'base_name', optional = 'True' ) + + class Eigenstratpca(Rgenetics): """fake class to distinguish different species of Rgenetics data collections diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/registry.py --- a/lib/galaxy/datatypes/registry.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/datatypes/registry.py Mon Jun 08 12:49:26 2009 -0400 @@ -67,6 +67,15 @@ indexer_config = indexer.get( 'file', None ) if indexer_config: self.indexers.append( (indexer_config, extension) ) + for composite_file in elem.findall( 'composite_file' ): + # add composite files + name = composite_file.get( 'name', None ) + if name is None: + log.warning( "You must provide a name for your composite_file (%s)." % composite_file ) + optional = composite_file.get( 'optional', False ) + mimetype = composite_file.get( 'mimetype', None ) + self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype ) + except Exception, e: self.log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) ) # Load datatype sniffers from the config @@ -294,3 +303,7 @@ ret_data = None return ( convert_ext, ret_data ) return ( None, None ) + + def get_composite_extensions( self ): + return [ ext for ( ext, d_type ) in self.datatypes_by_extension.iteritems() if d_type.composite_type is not None ] + diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/__init__.py Mon Jun 08 12:49:26 2009 -0400 @@ -591,7 +591,7 @@ group = Repeat() group.name = elem.get( "name" ) group.title = elem.get( "title" ) - group.inputs = self.parse_input_elem( elem, enctypes, context ) + group.inputs = self.parse_input_elem( elem, enctypes, context ) rval[group.name] = group elif elem.tag == "conditional": group = Conditional() @@ -609,6 +609,16 @@ case.inputs = self.parse_input_elem( case_elem, enctypes, context ) group.cases.append( case ) rval[group.name] = group + elif elem.tag == "upload_dataset": + group = UploadDataset() + group.name = elem.get( "name" ) + group.title = elem.get( "title" ) + group.file_type_name = elem.get( 'file_type_name', group.file_type_name ) + group.default_file_type = elem.get( 'default_file_type', group.default_file_type ) + rval[ group.file_type_name ].refresh_on_change = True + rval[ group.file_type_name ].refresh_on_change_values = self.app.datatypes_registry.get_composite_extensions() + group.inputs = self.parse_input_elem( elem, enctypes, context ) + rval[ group.name ] = group elif elem.tag == "param": param = self.parse_param_elem( elem, enctypes, context ) rval[param.name] = param @@ -951,6 +961,56 @@ group_state['__current_case__'] = current_case # Store the value of the test element group_state[ input.test_param.name ] = value + elif isinstance( input, UploadDataset ): + group_state = state[input.name] + group_errors = [] + group_old_errors = old_errors.get( input.name, None ) + any_group_errors = False + d_type = input.get_datatype( trans, context ) + writable_files = d_type.writable_files + #remove extra files + while len( group_state ) > len( writable_files ): + del group_state[-1] + if group_old_errors: + del group_old_errors[-1] + # Update state + max_index = -1 + for i, rep_state in enumerate( group_state ): + rep_index = rep_state['__index__'] + max_index = max( max_index, rep_index ) + rep_prefix = "%s_%d|" % ( key, rep_index ) + if group_old_errors: + rep_old_errors = group_old_errors[i] + else: + rep_old_errors = {} + rep_errors = self.update_state( trans, + input.inputs, + rep_state, + incoming, + prefix=rep_prefix, + context=context, + update_only=update_only, + old_errors=rep_old_errors, + changed_dependencies=changed_dependencies, + item_callback=item_callback ) + if rep_errors: + any_group_errors = True + group_errors.append( rep_errors ) + else: + group_errors.append( {} ) + #add new fileupload as needed + offset = 1 + while len( writable_files ) > len( group_state ): + new_state = {} + new_state['__index__'] = max_index + offset + offset += 1 + self.fill_in_new_state( trans, input.inputs, new_state, context ) + group_state.append( new_state ) + if any_group_errors: + group_errors.append( {} ) + # Were there *any* errors for any repetition? + if any_group_errors: + errors[input.name] = group_errors else: if key not in incoming \ and "__force_update__" + key not in incoming \ diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/actions/upload.py --- a/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:49:26 2009 -0400 @@ -19,19 +19,15 @@ except: log.exception( 'failure removing temporary file: %s' % filename ) def execute( self, tool, trans, incoming={}, set_output_hid = True ): - data_file = incoming['file_data'] - file_type = incoming['file_type'] - dbkey = incoming['dbkey'] - url_paste = incoming['url_paste'] - is_multi_byte = False - space_to_tab = False - if 'space_to_tab' in incoming: - if incoming['space_to_tab'] not in ["None", None]: - space_to_tab = True + dataset_upload_inputs = [] + for input_name, input in tool.inputs.iteritems(): + if input.type == "upload_dataset": + dataset_upload_inputs.append( input ) + assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) # Get any precreated datasets (when using asynchronous uploads) async_datasets = [] self.precreated_datasets = [] - if incoming['async_datasets'] not in ["None", "", None]: + if incoming.get( 'async_datasets', None ) not in ["None", "", None]: async_datasets = incoming['async_datasets'].split(',') for id in async_datasets: try: @@ -45,8 +41,39 @@ log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) ) else: self.precreated_datasets.append( data ) - temp_name = "" data_list = [] + for dataset_upload_input in dataset_upload_inputs: + uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming ) + for uploaded_dataset in uploaded_datasets: + precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name ) + dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset ) + if uploaded_dataset.composite_files: + os.mkdir( dataset.extra_files_path ) #make extra files path + for name, value in uploaded_dataset.composite_files.iteritems(): + #what about binary files here, need to skip converting newlines + if value is None and not dataset.datatype.writable_files[ name ].optional: + dataset.info = "A required composite data file was not provided (%s)" % name + dataset.state = dataset.states.ERROR + break + elif value is not None: + if value.space_to_tab: + sniff.convert_newlines_sep2tabs( value.filename ) + else: + sniff.convert_newlines( value.filename ) + shutil.move( value.filename, os.path.join( dataset.extra_files_path, name ) ) + data_list.append( dataset ) + #clean up extra temp names + uploaded_dataset.clean_up_temp_files() + + #cleanup unclaimed precreated datasets: + for data in self.precreated_datasets: + log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) ) + data.state = data.states.ERROR + data.info = 'No file contents were available.' + + if data_list: + trans.app.model.flush() + # Create the job object job = trans.app.model.Job() job.session_id = trans.get_galaxy_session().id @@ -56,104 +83,14 @@ # For backward compatibility, some tools may not have versions yet. job.tool_version = tool.version except: - job.tool_version = "1.0.0" + job.tool_version = "1.0.1" job.state = trans.app.model.Job.states.UPLOAD job.flush() log.info( 'tool %s created job id %d' % ( tool.id, job.id ) ) trans.log_event( 'created job id %d' % job.id, tool_id=tool.id ) - if 'local_filename' in dir( data_file ): - # Use the existing file - file_name = data_file.filename - file_name = file_name.split( '\\' )[-1] - file_name = file_name.split( '/' )[-1] - precreated_dataset = self.get_precreated_dataset( file_name ) - try: - data_list.append( self.add_file( trans, data_file.local_filename, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) ) - except Exception, e: - log.exception( 'exception in add_file using datafile.local_filename %s: %s' % ( data_file.local_filename, str( e ) ) ) - self.remove_tempfile( data_file.local_filename ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - elif 'filename' in dir( data_file ): - file_name = data_file.filename - file_name = file_name.split( '\\' )[-1] - file_name = file_name.split( '/' )[-1] - precreated_dataset = self.get_precreated_dataset( file_name ) - try: - temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' ) - except Exception, e: - log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - try: - data_list.append( self.add_file( trans, temp_name, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) ) - except Exception, e: - log.exception( 'exception in add_file using file temp_name %s: %s' % ( str( temp_name ), str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - if url_paste not in [ None, "" ]: - if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ): - # If we were sent a DATA_URL from an external application in a post, NAME and INFO - # values should be in the request - if 'NAME' in incoming and incoming[ 'NAME' ] not in [ "None", None ]: - NAME = incoming[ 'NAME' ] - else: - NAME = '' - if 'INFO' in incoming and incoming[ 'INFO' ] not in [ "None", None ]: - INFO = incoming[ 'INFO' ] - else: - INFO = "uploaded url" - url_paste = url_paste.replace( '\r', '' ).split( '\n' ) - name_set_from_line = False #if we are setting the name from the line, it needs to be the line that creates that dataset - for line in url_paste: - line = line.strip() - if line: - if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ): - continue # non-url line, ignore - if not NAME or name_set_from_line: - NAME = line - name_set_from_line = True - precreated_dataset = self.get_precreated_dataset( NAME ) - try: - temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' ) - except Exception, e: - log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - try: - data_list.append( self.add_file( trans, temp_name, NAME, file_type, is_multi_byte, dbkey, info="uploaded url", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) ) - except Exception, e: - log.exception( 'exception in add_file using url_paste temp_name %s: %s' % ( str( temp_name ), str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - else: - precreated_dataset = self.get_precreated_dataset( 'Pasted Entry' ) - is_valid = False - for line in url_paste: - line = line.rstrip( '\r\n' ) - if line: - is_valid = True - break - if is_valid: - try: - temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' ) - except Exception, e: - log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - try: - data_list.append( self.add_file( trans, temp_name, 'Pasted Entry', file_type, is_multi_byte, dbkey, info="pasted entry", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) ) - except Exception, e: - log.exception( 'exception in add_file using StringIO.StringIO( url_paste ) temp_name %s: %s' % ( str( temp_name ), str( e ) ) ) - self.remove_tempfile( temp_name ) - return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset ) - else: - return self.upload_empty( trans, job, "No data error:", "you pasted no data.", precreated_dataset=precreated_dataset ) - if self.empty: - return self.upload_empty( trans, job, "Empty file error:", "you attempted to upload an empty file." ) - elif len( data_list ) < 1: - return self.upload_empty( trans, job, "No data error:", "either you pasted no data, the url you specified is invalid, or you have not specified a file." ) + #if we could make a 'real' job here, then metadata could be set before job.finish() is called - hda = data_list[0] #only our first hda is being added as input for the job, why? + hda = data_list[0] #only our first hda is being added as output for the job, why? job.state = trans.app.model.Job.states.OK file_size_str = datatypes.data.nice_size( hda.dataset.file_size ) job.info = "%s, size: %s" % ( hda.info, file_size_str ) @@ -162,7 +99,7 @@ log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) ) trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id ) return dict( output=hda ) - + def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None): if precreated_dataset is not None: data = precreated_dataset @@ -188,7 +125,7 @@ trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id ) return dict( output=data ) - def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ): + def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ): data_type = None # See if we have an empty file if not os.path.getsize( temp_name ) > 0: @@ -254,7 +191,7 @@ data_type = 'binary' if not data_type: # We must have a text file - if self.check_html( temp_name ): + if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ): raise BadFileException( "you attempted to upload an inappropriate file." ) if data_type != 'binary' and data_type != 'zip': if space_to_tab: @@ -404,7 +341,7 @@ return self.precreated_datasets.pop( names.index( name ) ) else: return None - + class BadFileException( Exception ): pass diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/__init__.py --- a/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:49:26 2009 -0400 @@ -20,7 +20,7 @@ be nice to unify all the places that recursively visit inputs. """ for input in inputs.itervalues(): - if isinstance( input, Repeat ): + if isinstance( input, Repeat ) or isinstance( input, UploadDataset ): for i, d in enumerate( input_values[ input.name ] ): index = d['__index__'] new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index ) diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400 @@ -23,6 +23,7 @@ def __init__( self, tool, param, context=None ): self.tool = tool self.refresh_on_change = False + self.refresh_on_change_values = [] self.name = param.get("name") self.type = param.get("type") self.label = util.xml_text(param, "label") @@ -301,7 +302,7 @@ self.name = elem.get( 'name' ) self.ajax = str_bool( elem.get( 'ajax-upload' ) ) def get_html_field( self, trans=None, value=None, other_values={} ): - return form_builder.FileField( self.name, self.ajax ) + return form_builder.FileField( self.name, ajax = self.ajax, value = value ) def from_html( self, value, trans=None, other_values={} ): # Middleware or proxies may encode files in special ways (TODO: this # should be pluggable) @@ -325,10 +326,11 @@ """ return "multipart/form-data" def to_string( self, value, app ): - if value is None: + if value in [ None, '' ]: return None - else: - raise Exception( "FileToolParameter cannot be persisted" ) + elif isinstance( value, unicode ) or isinstance( value, str ): + return value + raise Exception( "FileToolParameter cannot be persisted" ) def to_python( self, value, app ): if value is None: return None @@ -401,13 +403,13 @@ >>> print p.name blah >>> print p.get_html() - <select name="blah"> + <select name="blah" last_selected_value="y"> <option value="x">I am X</option> <option value="y" selected>I am Y</option> <option value="z">I am Z</option> </select> >>> print p.get_html( value="z" ) - <select name="blah"> + <select name="blah" last_selected_value="z"> <option value="x">I am X</option> <option value="y">I am Y</option> <option value="z" selected>I am Z</option> @@ -426,13 +428,13 @@ >>> print p.name blah >>> print p.get_html() - <select name="blah" multiple> + <select name="blah" multiple last_selected_value="z"> <option value="x">I am X</option> <option value="y" selected>I am Y</option> <option value="z" selected>I am Z</option> </select> >>> print p.get_html( value=["x","y"]) - <select name="blah" multiple> + <select name="blah" multiple last_selected_value="y"> <option value="x" selected>I am X</option> <option value="y" selected>I am Y</option> <option value="z">I am Z</option> @@ -520,7 +522,7 @@ return form_builder.TextField( self.name, value=(value or "") ) if value is not None: if not isinstance( value, list ): value = [ value ] - field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change ) + field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values ) options = self.get_options( trans, context ) for text, optval, selected in options: if isinstance( optval, UnvalidatedValue ): @@ -676,7 +678,7 @@ >>> # hg17 should be selected by default >>> print p.get_html( trans ) # doctest: +ELLIPSIS - <select name="blah"> + <select name="blah" last_selected_value="hg17"> <option value="?">unspecified (?)</option> ... <option value="hg18">Human Mar. 2006 (hg18)</option> @@ -687,7 +689,7 @@ >>> # If the user selected something else already, that should be used >>> # instead >>> print p.get_html( trans, value='hg18' ) # doctest: +ELLIPSIS - <select name="blah"> + <select name="blah" last_selected_value="hg18"> <option value="?">unspecified (?)</option> ... <option value="hg18" selected>Human Mar. 2006 (hg18)</option> @@ -942,7 +944,7 @@ return form_builder.TextArea( self.name, value=value ) else: return form_builder.TextField( self.name, value=(value or "") ) - return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value ) + return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value, refresh_on_change_values = self.refresh_on_change_values ) def from_html( self, value, trans=None, other_values={} ): if self.need_late_validation( trans, other_values ): @@ -1108,7 +1110,7 @@ if value is not None: if type( value ) != list: value = [ value ] - field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change ) + field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values ) # CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field() def dataset_collector( hdas, parent_hid ): for i, hda in enumerate( hdas ): diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/grouping.py --- a/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:49:26 2009 -0400 @@ -4,6 +4,14 @@ from basic import ToolParameter from galaxy.util.expressions import ExpressionContext + +import logging +log = logging.getLogger( __name__ ) + +import StringIO, os, urllib +from galaxy.datatypes import sniff +from galaxy.util.bunch import Bunch +from galaxy.util.odict import odict class Group( object ): def __init__( self ): @@ -74,7 +82,322 @@ input.visit_inputs( new_prefix, d[input.name], callback ) def get_initial_value( self, trans, context ): return [] + +class UploadDataset( Group ): + type = "upload_dataset" + def __init__( self ): + Group.__init__( self ) + self.title = None + self.inputs = None + self.file_type_name = 'file_type' + self.default_file_type = 'txt' + self.file_type_to_ext = { 'auto':self.default_file_type } + def get_file_type( self, context ): + return context.get( self.file_type_name, self.default_file_type ) + def get_datatype_ext( self, trans, context ): + ext = self.get_file_type( context ) + if ext in self.file_type_to_ext: + ext = self.file_type_to_ext[ext] #when using autodetect, we will use composite info from 'text', i.e. only the main file + return ext + def get_datatype( self, trans, context ): + ext = self.get_datatype_ext( trans, context ) + return trans.app.datatypes_registry.get_datatype_by_extension( ext ) + @property + def title_plural( self ): + if self.title.endswith( "s" ): + return self.title + else: + return self.title + "s" + def group_title( self, context ): + return "%s (%s)" % ( self.title, context.get( self.file_type_name, self.default_file_type ) ) + def title_by_index( self, trans, index, context ): + d_type = self.get_datatype( trans, context ) + for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ): + if i == index: + rval = composite_name + if composite_file.description: + rval = "%s (%s)" % ( rval, composite_file.description ) + if composite_file.optional: + rval = "%s [optional]" % rval + return rval + return None + def value_to_basic( self, value, app ): + rval = [] + for d in value: + rval_dict = {} + # Propogate __index__ + if '__index__' in d: + rval_dict['__index__'] = d['__index__'] + for input in self.inputs.itervalues(): + rval_dict[ input.name ] = input.value_to_basic( d[input.name], app ) + rval.append( rval_dict ) + return rval + def value_from_basic( self, value, app, ignore_errors=False ): + rval = [] + for i, d in enumerate( value ): + rval_dict = {} + # If the special __index__ key is not set, create it (for backward + # compatibility) + rval_dict['__index__'] = d.get( '__index__', i ) + # Restore child inputs + for input in self.inputs.itervalues(): + if ignore_errors and input.name not in d: #this wasn't tested + rval_dict[ input.name ] = input.get_initial_value( None, d ) + else: + rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors ) + rval.append( rval_dict ) + return rval + def visit_inputs( self, prefix, value, callback ): + for i, d in enumerate( value ): + for input in self.inputs.itervalues(): + new_prefix = prefix + "%s_%d|" % ( self.name, i ) + if isinstance( input, ToolParameter ): + callback( new_prefix, input, d[input.name], parent = d ) + else: + input.visit_inputs( new_prefix, d[input.name], callback ) + def get_initial_value( self, trans, context ): + d_type = self.get_datatype( trans, context ) + rval = [] + for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ): + rval_dict = {} + rval_dict['__index__'] = i # create __index__ + for input in self.inputs.itervalues(): + rval_dict[ input.name ] = input.get_initial_value( trans, context ) #input.value_to_basic( d[input.name], app ) + rval.append( rval_dict ) + return rval + def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ): + def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ): + dataset_name = override_name + dataset_info = override_info + def get_file_name( file_name ): + file_name = file_name.split( '\\' )[-1] + file_name = file_name.split( '/' )[-1] + return file_name + if 'local_filename' in dir( data_file ): + # Use the existing file + return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte + elif 'filename' in dir( data_file ): + #create a new tempfile + try: + temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' ) + precreated_name = get_file_name( data_file.filename ) + if not dataset_name: + dataset_name = precreated_name + if not dataset_info: + dataset_info = 'uploaded file' + return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info + except Exception, e: + log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) ) + self.remove_temp_file( temp_name ) + return None, None, is_multi_byte, None, None + def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ): + filenames = [] + if url_paste not in [ None, "" ]: + if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ): + url_paste = url_paste.replace( '\r', '' ).split( '\n' ) + for line in url_paste: + line = line.strip() + if line: + if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ): + continue # non-url line, ignore + precreated_name = line + dataset_name = override_name + if not dataset_name: + dataset_name = line + dataset_info = override_info + if not dataset_info: + dataset_info = 'uploaded url' + try: + temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' ) + except Exception, e: + temp_name = None + precreated_name = str( e ) + log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) ) + try: + self.remove_temp_file( temp_name ) + except: + pass + yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info ) + #yield ( None, str( e ), False, dataset_name, dataset_info ) + else: + dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here + if override_name: + dataset_name = override_name + if override_info: + dataset_info = override_info + is_valid = False + for line in url_paste: #Trim off empty lines from begining + line = line.rstrip( '\r\n' ) + if line: + is_valid = True + break + if is_valid: + try: + temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' ) + except Exception, e: + log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) ) + temp_name = None + precreated_name = str( e ) + try: + self.remove_temp_file( temp_name ) + except: + pass + yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info ) + #yield ( None, str( e ), False, dataset_name, dataset_info ) + def get_one_filename( context ): + data_file = context['file_data'] + url_paste = context['url_paste'] + name = context.get( 'NAME', None ) + info = context.get( 'INFO', None ) + warnings = [] + is_multi_byte = False + space_to_tab = False + if context.get( 'space_to_tab', None ) not in ["None", None]: + space_to_tab = True + temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info ) + if temp_name: + if url_paste.strip(): + warnings.append( "All file contents specified in the paste box were ignored." ) + else: #we need to use url_paste + #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ) + for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names: + if temp_name: + break + ###this check will cause an additional file to be retrieved and created...so lets not do that + #try: #check to see if additional paste contents were available + # file_names.next() + # warnings.append( "Additional file contents were specified in the paste box, but ignored." ) + #except StopIteration: + # pass + return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings + + def get_filenames( context ): + rval = [] + data_file = context['file_data'] + url_paste = context['url_paste'] + name = context.get( 'NAME', None ) + info = context.get( 'INFO', None ) + warnings = [] + is_multi_byte = False + space_to_tab = False + if context.get( 'space_to_tab', None ) not in ["None", None]: + space_to_tab = True + temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info ) + if temp_name: + rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) ) + for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ): + if temp_name: + rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) ) + return rval + class UploadedDataset( Bunch ): + def __init__( self, **kwd ): + Bunch.__init__( self, **kwd ) + self.primary_file = None + self.composite_files = odict() + self.dbkey = None + self.warnings = [] + + self._temp_filenames = [] #store all created filenames here, delete on cleanup + def register_temp_file( self, filename ): + if isinstance( filename, list ): + self._temp_filenames.extend( filename ) + else: + self._temp_filenames.append( filename ) + def remove_temp_file( self, filename ): + try: + os.unlink( filename ) + except Exception, e: + pass + #log.warning( str( e ) ) + def clean_up_temp_files( self ): + for filename in self._temp_filenames: + self.remove_temp_file( filename ) + + file_type = self.get_file_type( context ) + d_type = self.get_datatype( trans, context ) + dbkey = context.get( 'dbkey', None ) + writable_files = d_type.writable_files + writable_files_offset = 0 + groups_incoming = [ None for filename in writable_files ] + for group_incoming in context.get( self.name, [] ): + i = int( group_incoming['__index__'] ) + groups_incoming[ i ] = group_incoming + + if d_type.composite_type is not None: + #handle uploading of composite datatypes + #Only one Dataset can be created + + dataset = UploadedDataset() + dataset.file_type = file_type + dataset.datatype = d_type + dataset.dbkey = dbkey + + temp_name = None + precreated_name = None + is_multi_byte = False + space_to_tab = False + warnings = [] + + dataset_name = None + dataset_info = None + if dataset.datatype.composite_type == 'auto_primary_file': + #replace sniff here with just creating an empty file + temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' ) + precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type ) + else: + temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] ) + writable_files_offset = 1 + if temp_name is None:#remove this before finish, this should create an empty dataset + raise Exception( 'No primary dataset file was available for composite upload' ) + dataset.primary_file = temp_name + dataset.is_multi_byte = is_multi_byte + dataset.space_to_tab = space_to_tab + dataset.precreated_name = precreated_name + dataset.name = dataset_name + dataset.info = dataset_info + dataset.warnings.extend( warnings ) + dataset.register_temp_file( temp_name ) + + keys = writable_files.keys() + for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ): + key = keys[ i + writable_files_offset ] + if group_incoming is None and not writable_files[ key ].optional: + dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) ) + dataset.composite_files[ key ] = None + else: + temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming ) + if temp_name: + dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name ) + dataset.register_temp_file( temp_name ) + else: + dataset.composite_files[ key ] = None + if not writable_files[ key ].optional: + dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) ) + return [ dataset ] + else: + rval = [] + for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ): + dataset = UploadedDataset() + dataset.file_type = file_type + dataset.datatype = d_type + dataset.dbkey = dbkey + dataset.primary_file = temp_name + dataset.is_multi_byte = is_multi_byte + dataset.space_to_tab = space_to_tab + dataset.name = dataset_name + dataset.info = dataset_info + dataset.precreated_name = precreated_name + dataset.register_temp_file( temp_name ) + rval.append( dataset ) + return rval + def remove_temp_file( self, filename ): + try: + os.unlink( filename ) + except Exception, e: + log.warning( str( e ) ) + + class Conditional( Group ): type = "conditional" def __init__( self ): diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/validation.py --- a/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:49:26 2009 -0400 @@ -72,14 +72,18 @@ """ @classmethod def from_element( cls, param, elem ): - return cls( elem.get( 'message' ), elem.text ) - def __init__( self, message, expression ): + return cls( elem.get( 'message' ), elem.text, elem.get( 'substitute_value_in_message' ) ) + def __init__( self, message, expression, substitute_value_in_message ): self.message = message + self.substitute_value_in_message = substitute_value_in_message # Save compiled expression, code objects are thread safe (right?) - self.expression = compile( expression, '<string>', 'eval' ) + self.expression = compile( expression, '<string>', 'eval' ) def validate( self, value, history=None ): if not( eval( self.expression, dict( value=value ) ) ): - raise ValueError( self.message ) + message = self.message + if self.substitute_value_in_message: + message = message % value + raise ValueError( message ) class InRangeValidator( Validator ): """ diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/util/__init__.py Mon Jun 08 12:49:26 2009 -0400 @@ -146,6 +146,7 @@ elif isinstance( value, list ): return map(sanitize_text, value) else: + print value raise Exception, 'Unknown parameter type (%s)' % ( type( value ) ) class Params: @@ -222,7 +223,7 @@ pass if not value and not new_value: new_value = tool.param_trans_dict[ key ][1] - if key not in self.NEVER_SANITIZE and sanitize: + if sanitize and not ( key in self.NEVER_SANITIZE or True in [ key.endswith( "|%s" % nonsanitize_parameter ) for nonsanitize_parameter in self.NEVER_SANITIZE ] ): #sanitize check both ungrouped and grouped parameters by name self.__dict__[ new_key ] = sanitize_param( new_value ) else: self.__dict__[ new_key ] = new_value diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/controllers/tool_runner.py --- a/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:49:26 2009 -0400 @@ -3,6 +3,8 @@ """ from galaxy.web.base.controller import * +from galaxy.util.bunch import Bunch +from galaxy.tools import DefaultToolState import logging log = logging.getLogger( __name__ ) @@ -75,32 +77,51 @@ tool = self.get_toolbox().tools_by_id.get( tool_id, None ) if not tool: return False # bad tool_id - params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool ) + #params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool ) + if "tool_state" in kwd: + encoded_state = util.string_to_object( kwd["tool_state"] ) + tool_state = DefaultToolState() + tool_state.decode( encoded_state, tool, trans.app ) + else: + tool_state = tool.new_state( trans ) + errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True ) datasets = [] - if params.file_data not in [ None, "" ]: - name = params.file_data - if name.count('/'): - name = name.rsplit('/',1)[1] - if name.count('\\'): - name = name.rsplit('\\',1)[1] - datasets.append( create_dataset( name, trans.history ) ) - if params.url_paste not in [ None, "" ]: - url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) - url = False - for line in url_paste: - line = line.rstrip( '\r\n' ).strip() - if not line: - continue - elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): - url = True - datasets.append( create_dataset( line, trans.history ) ) - else: - if url: - continue # non-url when we've already processed some urls - else: - # pasted data - datasets.append( create_dataset( 'Pasted Entry', trans.history ) ) - break + dataset_upload_inputs = [] + for input_name, input in tool.inputs.iteritems(): + if input.type == "upload_dataset": + dataset_upload_inputs.append( input ) + assert dataset_upload_inputs, Exception( "No dataset upload groups were found." ) + for dataset_upload_input in dataset_upload_inputs: + d_type = dataset_upload_input.get_datatype( trans, kwd ) + + if d_type.composite_type is not None: + datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ), trans.history ) ) + else: + params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] ) + if params.file_data not in [ None, "" ]: + name = params.file_data + if name.count('/'): + name = name.rsplit('/',1)[1] + if name.count('\\'): + name = name.rsplit('\\',1)[1] + datasets.append( create_dataset( name, trans.history ) ) + if params.url_paste not in [ None, "" ]: + url_paste = params.url_paste.replace( '\r', '' ).split( '\n' ) + url = False + for line in url_paste: + line = line.rstrip( '\r\n' ).strip() + if not line: + continue + elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ): + url = True + datasets.append( create_dataset( line, trans.history ) ) + else: + if url: + continue # non-url when we've already processed some urls + else: + # pasted data + datasets.append( create_dataset( 'Pasted Entry', trans.history ) ) + break if datasets: trans.model.flush() return [ d.id for d in datasets ] diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/form_builder.py --- a/lib/galaxy/web/form_builder.py Mon Jun 08 12:35:38 2009 -0400 +++ b/lib/galaxy/web/form_builder.py Mon Jun 08 12:49:26 2009 -0400 @@ -78,17 +78,21 @@ >>> print FileField( "foo" ).get_html() <input type="file" name="foo"> - >>> print FileField( "foo", True ).get_html() + >>> print FileField( "foo", ajax = True ).get_html() <input type="file" name="foo" galaxy-ajax-upload="true"> """ - def __init__( self, name, ajax=False ): + def __init__( self, name, value = None, ajax=False ): self.name = name self.ajax = ajax + self.value = value def get_html( self, prefix="" ): + value_text = "" + if self.value: + value_text = ' value="%s"' % self.value + ajax_text = "" if self.ajax: - return '<input type="file" name="%s%s" galaxy-ajax-upload="true">' % ( prefix, self.name ) - else: - return '<input type="file" name="%s%s">' % ( prefix, self.name ) + ajax_text = ' galaxy-ajax-upload="true"' + return '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text ) class HiddenField(BaseField): """ @@ -120,7 +124,7 @@ >>> t.add_option( "automatic", 3 ) >>> t.add_option( "bazooty", 4, selected=True ) >>> print t.get_html() - <select name="bar"> + <select name="bar" last_selected_value="4"> <option value="3">automatic</option> <option value="4" selected>bazooty</option> </select> @@ -140,7 +144,7 @@ <div><input type="checkbox" name="bar" value="3">automatic</div> <div><input type="checkbox" name="bar" value="4" checked>bazooty</div> """ - def __init__( self, name, multiple=None, display=None, refresh_on_change=False ): + def __init__( self, name, multiple=None, display=None, refresh_on_change = False, refresh_on_change_values = [] ): self.name = name self.multiple = multiple or False self.options = list() @@ -152,8 +156,11 @@ raise Exception, "Unknown display type: %s" % display self.display = display self.refresh_on_change = refresh_on_change + self.refresh_on_change_values = refresh_on_change_values if self.refresh_on_change: self.refresh_on_change_text = ' refresh_on_change="true"' + if self.refresh_on_change_values: + self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) ) else: self.refresh_on_change_text = '' def add_option( self, text, value, selected = False ): @@ -195,11 +202,17 @@ def get_html_default( self, prefix="" ): if self.multiple: multiple = " multiple" else: multiple = "" - rval = [ '<select name="%s%s"%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text ) ] + rval = [] + last_selected_value = "" for text, value, selected in self.options: - if selected: selected_text = " selected" + if selected: + selected_text = " selected" + last_selected_value = value else: selected_text = "" rval.append( '<option value="%s"%s>%s</option>' % ( value, selected_text, text ) ) + if last_selected_value: + last_selected_value = ' last_selected_value="%s"' % last_selected_value + rval.insert( 0, '<select name="%s%s"%s%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text, last_selected_value ) ) rval.append( '</select>' ) return "\n".join( rval ) @@ -253,7 +266,7 @@ </li> </ul></div> """ - def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [] ): + def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [], refresh_on_change_values = [] ): self.name = name self.multiple = multiple or False self.options = options @@ -270,8 +283,11 @@ raise Exception, "Unknown display type: %s" % display self.display = display self.refresh_on_change = refresh_on_change + self.refresh_on_change_values = refresh_on_change_values if self.refresh_on_change: self.refresh_on_change_text = ' refresh_on_change="true"' + if self.refresh_on_change_values: + self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) ) else: self.refresh_on_change_text = '' def get_html( self, prefix="" ): @@ -308,6 +324,7 @@ rval.append( '</ul></div>' ) return '\n'.join( rval ) + def get_suite(): """Get unittest suite for this module""" import doctest, sys diff -r c0c50620b89d -r 73a8b43f1d97 templates/base_panels.mako --- a/templates/base_panels.mako Mon Jun 08 12:35:38 2009 -0400 +++ b/templates/base_panels.mako Mon Jun 08 12:49:26 2009 -0400 @@ -59,19 +59,19 @@ <script type="text/javascript" src="${h.url_for('/static/scripts/galaxy.panels.js')}"></script> <script type="text/javascript"> - ensure_dd_helper(); + ensure_dd_helper(); - %if self.has_left_panel: + %if self.has_left_panel: var lp = make_left_panel( $("#left"), $("#center"), $("#left-border" ) ); force_left_panel = lp.force_panel; %endif - %if self.has_right_panel: + %if self.has_right_panel: var rp = make_right_panel( $("#right"), $("#center"), $("#right-border" ) ); handle_minwidth_hint = rp.handle_minwidth_hint; force_right_panel = rp.force_panel; %endif - + </script> ## Handle AJAX (actually hidden iframe) upload tool <![if !IE]> @@ -81,34 +81,36 @@ ##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() { ##$("iframe")[0].contentDocument.body.innerHTML = "HELLO" ##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() { - $(this).contents().find("input[galaxy-ajax-upload]").each( function() { - var error_set = false; - $(this).parents("form").submit( function() { - // Make a synchronous request to create the datasets first - var async_datasets; - $.ajax( { - async: false, - type: "POST", - url: "${h.url_for(controller='tool_runner', action='upload_async_create')}", - data: $(this).formSerialize(), - dataType: "json", - success: function( d, s ) { async_datasets = d.join() } - } ); - if (async_datasets == '') { - if (! error_set) { - $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' ); - error_set = true; + $(this).contents().find("form").each( function() { + if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){ + $(this).submit( function() { + var error_set = false; + // Make a synchronous request to create the datasets first + var async_datasets; + $.ajax( { + async: false, + type: "POST", + url: "${h.url_for(controller='tool_runner', action='upload_async_create')}", + data: $(this).formSerialize(), + dataType: "json", + success: function( d, s ) { async_datasets = d.join() } + } ); + if (async_datasets == '') { + if (! error_set) { + $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' ); + error_set = true; + } + return false; + } else { + $(this).find("input[name=async_datasets]").val( async_datasets ); + $(this).append("<input type='hidden' name='ajax_upload' value='true'>"); } + // iframe submit is required for nginx (otherwise the encoding is wrong) + $(this).ajaxSubmit( { iframe: true } ); + $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}"); return false; - } else { - $(this).find("input[name=async_datasets]").val( async_datasets ); - $(this).append("<input type='hidden' name='ajax_upload' value='true'>"); - } - // iframe submit is required for nginx (otherwise the encoding is wrong) - $(this).ajaxSubmit( { iframe: true } ); - $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}"); - return false; - }); + }); + } }); }); }); @@ -120,88 +122,88 @@ <%def name="masthead()"> <div class="title" style="float: left;"> - <a target="_blank" href="${app.config.wiki_url}"> - <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;"> - </a> - Galaxy - %if app.config.brand: - <span class='brand'>/${app.config.brand}</span> - %endif + <a target="_blank" href="${app.config.wiki_url}"> + <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;"> + </a> + Galaxy + %if app.config.brand: + <span class='brand'>/${app.config.brand}</span> + %endif </div> <div style="position: absolute; left: 50%;"> <div class="tab-group" style="position: relative; left: -50%;"> - - <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )"> - <% - cls = "tab" - if extra_class: - cls += " " + extra_class - if self.active_view == id: - cls += " active" - style = "" - if not visible: - style = "display: none;" - %> - <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span> - </%def> - ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")} + <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )"> + <% + cls = "tab" + if extra_class: + cls += " " + extra_class + if self.active_view == id: + cls += " active" + style = "" + if not visible: + style = "display: none;" + %> + <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span> + </%def> + + ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")} - ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))} + ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))} - ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))} + ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))} ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))} - ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )} - - <span class="tab"> - <a>Help</a> - <div class="submenu"> - <ul> - <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li> - <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li> - <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li> - </ul> - </div> - </span> + ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )} - <span class="tab"> - <a>User</a> - <% - if trans.user: - user_email = trans.user.email - style1 = "display: none;" - style2 = ""; - else: - user_email = "" - style1 = "" - style2 = "display: none;" - %> - <div class="submenu"> - <ul class="loggedout-only" style="${style1}"> - <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li> - %if app.config.allow_user_creation: - <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li> - %endif - </ul> - <ul class="loggedin-only" style="${style2}"> - <li>Logged in as <span id="user-email">${user_email}</span></li> - <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li> - <% - if app.config.require_login: - logout_target = "" - logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' ) - else: - logout_target = "galaxy_main" - logout_url = h.url_for( controller='user', action='logout' ) - %> - <li><a target="${logout_target}" href="${logout_url}">Logout</a></li> - </ul> - </div> - </span> - + <span class="tab"> + <a>Help</a> + <div class="submenu"> + <ul> + <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li> + <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li> + <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li> + </ul> + </div> + </span> + + <span class="tab"> + <a>User</a> + <% + if trans.user: + user_email = trans.user.email + style1 = "display: none;" + style2 = ""; + else: + user_email = "" + style1 = "" + style2 = "display: none;" + %> + <div class="submenu"> + <ul class="loggedout-only" style="${style1}"> + <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li> + %if app.config.allow_user_creation: + <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li> + %endif + </ul> + <ul class="loggedin-only" style="${style2}"> + <li>Logged in as <span id="user-email">${user_email}</span></li> + <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li> + <% + if app.config.require_login: + logout_target = "" + logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' ) + else: + logout_target = "galaxy_main" + logout_url = h.url_for( controller='user', action='logout' ) + %> + <li><a target="${logout_target}" href="${logout_url}">Logout</a></li> + </ul> + </div> + </span> + </div> </div> @@ -213,32 +215,32 @@ <div id="overlay" %if not self.overlay_visible: - style="display: none;" + style="display: none;" %endif > - ## - <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div> - - ## Need a table here for centering in IE6 - <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0" - %if not self.overlay_visible: - style="display: none;" - %endif - ><tr><td> - <div class="dialog-box-wrapper"> - <div class="dialog-box"> - <div class="unified-panel-header"> - <div class="unified-panel-header-inner"><span class='title'>${title}</span></div> - </div> - <div class="body" style="max-height: 600px; overflow: auto;">${content}</div> - <div> - <div class="buttons" style="display: none; float: right;"></div> - <div class="extra_buttons" style="display: none; padding: 5px;"></div> - <div style="clear: both;"></div> - </div> - </div> - </div> - </td></tr></table> + ## + <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div> + + ## Need a table here for centering in IE6 + <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0" + %if not self.overlay_visible: + style="display: none;" + %endif + ><tr><td> + <div class="dialog-box-wrapper"> + <div class="dialog-box"> + <div class="unified-panel-header"> + <div class="unified-panel-header-inner"><span class='title'>${title}</span></div> + </div> + <div class="body" style="max-height: 600px; overflow: auto;">${content}</div> + <div> + <div class="buttons" style="display: none; float: right;"></div> + <div class="extra_buttons" style="display: none; padding: 5px;"></div> + <div style="clear: both;"></div> + </div> + </div> + </div> + </td></tr></table> </div> </%def> @@ -268,7 +270,7 @@ ${self.message_box_content()} %endif </div> - ${self.overlay()} + ${self.overlay()} %if self.has_left_panel: <div id="left"> ${self.left_panel()} diff -r c0c50620b89d -r 73a8b43f1d97 templates/tool_form.mako --- a/templates/tool_form.mako Mon Jun 08 12:35:38 2009 -0400 +++ b/templates/tool_form.mako Mon Jun 08 12:49:26 2009 -0400 @@ -15,7 +15,39 @@ <script type="text/javascript"> $( function() { $( "select[refresh_on_change='true']").change( function() { - $( "#tool_form" ).submit(); + var refresh = false; + var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' ) + if ( refresh_on_change_values ) { + refresh_on_change_values = refresh_on_change_values.value.split( ',' ); + var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' ); + for( i= 0; i < refresh_on_change_values.length; i++ ) { + if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){ + refresh = true; + break; + } + } + } + else { + refresh = true; + } + if ( refresh ){ + $( ':file' ).each( function() { + var file_value = $( this )[0].value; + if ( file_value ) { + //disable file input, since we don't want to upload the file on refresh + var file_name = $( this )[0].name; + $( this )[0].name = 'replaced_file_input_' + file_name + $( this )[0].disable = true; + //create a new hidden field which stores the filename and has the original name of the file input + var new_file_input = document.createElement( 'input' ); + new_file_input.type = 'hidden'; + new_file_input.value = file_value; + new_file_input.name = file_name; + document.getElementById( 'tool_form' ).appendChild( new_file_input ); + } + } ); + $( "#tool_form" ).submit(); + } }); }); %if not add_frame.debug: @@ -72,6 +104,38 @@ %> ${row_for_param( group_prefix, input.test_param, group_state, group_errors, other_values )} ${do_inputs( input.cases[current_case].inputs, group_state, group_errors, group_prefix, other_values )} + %elif input.type == "upload_dataset": + %if input.get_datatype( trans, other_values ).composite_type is None: #have non-composite upload appear as before + <% + if input.name in errors: + rep_errors = errors[input.name][0] + else: + rep_errors = dict() + %> + ${do_inputs( input.inputs, tool_state[input.name][0], rep_errors, prefix + input.name + "_" + str( 0 ) + "|", other_values )} + %else: + <div class="repeat-group"> + <div class="form-title-row"><b>${input.group_title( other_values )}</b></div> + <% + repeat_state = tool_state[input.name] + %> + %for i in range( len( repeat_state ) ): + <div class="repeat-group-item"> + <% + if input.name in errors: + rep_errors = errors[input.name][i] + else: + rep_errors = dict() + index = repeat_state[i]['__index__'] + %> + <div class="form-title-row"><b>File Contents for ${input.title_by_index( trans, i, other_values )}</b></div> + ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )} + ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div> + </div> + %endfor + ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div> + </div> + %endif %else: ${row_for_param( prefix, input, tool_state, errors, other_values )} %endif @@ -127,7 +191,7 @@ <br/> %endif -<div class="toolForm" id="$tool.id"> +<div class="toolForm" id="${tool.id}"> %if tool.has_multiple_pages: <div class="toolFormTitle">${tool.name} (step ${tool_state.page+1} of ${tool.npages})</div> %else: diff -r c0c50620b89d -r 73a8b43f1d97 tools/data_source/upload.xml --- a/tools/data_source/upload.xml Mon Jun 08 12:35:38 2009 -0400 +++ b/tools/data_source/upload.xml Mon Jun 08 12:49:26 2009 -0400 @@ -1,17 +1,11 @@ <?xml version="1.0"?> -<tool name="Upload File" id="upload1"> +<tool name="Upload File" id="upload1" version="1.0.1"> <description> from your computer </description> <action module="galaxy.tools.actions.upload" class="UploadToolAction"/> <inputs> - <param name="async_datasets" type="hidden" value="None"/> - <param name="file_data" type="file" size="30" label="File" ajax-upload="true"/> - <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/> - <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."> - <option value="Yes">Yes</option> - </param> <param name="file_type" type="select" label="File Format" help="Which format? See help below"> <options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]"> <column name="value" index="1"/> @@ -20,6 +14,16 @@ <filter type="add_value" name="Auto-detect" value="auto" index="0"/> </options> </param> + <param name="async_datasets" type="hidden" value="None"/> + <upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type"> + <param name="file_data" type="file" size="30" label="File" ajax-upload="true"> + <validator type="expression" message="You will need to reselect the file you specified (%s)." substitute_value_in_message="True">not ( ( isinstance( value, unicode ) or isinstance( value, str ) ) and value != "" )</validator> <!-- use validator to post message to user about needing to reselect the file, since most browsers won't accept the value attribute for file inputs --> + </param> + <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/> + <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."> + <option value="Yes">Yes</option> + </param> + </upload_dataset> <param name="dbkey" type="genomebuild" label="Genome" /> </inputs> <help>
participants (1)
-
Greg Von Kuster