[hg] galaxy 3357: Change naming of converter to conversion for 3...

details: http://www.bx.psu.edu/hg/galaxy/rev/dedb7be9aa44 changeset: 3357:dedb7be9aa44 user: Dan Blankenberg <dan@bx.psu.edu> date: Mon Feb 08 21:33:12 2010 -0500 description: Change naming of converter to conversion for 3356:c64ef44ed4c5 to more properly reflect the function. diffstat: lib/galaxy/tools/__init__.py | 18 ++++++++-------- lib/galaxy/tools/actions/__init__.py | 40 ++++++++++++++++++------------------ lib/galaxy/tools/parameters/basic.py | 8 +++--- 3 files changed, 33 insertions(+), 33 deletions(-) diffs (128 lines): diff -r c64ef44ed4c5 -r dedb7be9aa44 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py Mon Feb 08 12:45:28 2010 -0500 +++ b/lib/galaxy/tools/__init__.py Mon Feb 08 21:33:12 2010 -0500 @@ -1198,27 +1198,27 @@ current = values["__current_case__"] wrap_values( input.cases[current].inputs, values ) elif isinstance( input, DataToolParameter ): - ##FIXME: We're populating param_dict with converters when wrapping values, + ##FIXME: We're populating param_dict with conversions when wrapping values, ##this should happen as a separate step before wrapping (or call this wrapping step something more generic) ##(but iterating this same list twice would be wasteful) - #add explicit converters by name to current parent - for converter_name, converter_extensions, converter_datatypes in input.converters: + #add explicit conversions by name to current parent + for conversion_name, conversion_extensions, conversion_datatypes in input.conversions: #if we are at building cmdline step, then converters have already executed - conv_ext, converted_dataset = input_values[ input.name ].find_conversion_destination( converter_datatypes ) + conv_ext, converted_dataset = input_values[ input.name ].find_conversion_destination( conversion_datatypes ) #when dealing with optional inputs, we'll provide a valid extension to be used for None converted dataset if not conv_ext: - conv_ext = converter_extensions[0] + conv_ext = conversion_extensions[0] #input_values[ input.name ] is None when optional dataset, #'conversion' of optional dataset should create wrapper around NoneDataset for converter output if input_values[ input.name ] and not converted_dataset: #input that converter is based from has a value, but converted dataset does not exist - raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_values[ input.name ].extension, converter_extensions ) + raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_values[ input.name ].extension, conversion_extensions ) else: - input_values[ converter_name ] = \ + input_values[ conversion_name ] = \ DatasetFilenameWrapper( converted_dataset, datatypes_registry = self.app.datatypes_registry, - tool = Bunch( converter_name = Bunch( extensions = conv_ext ) ), #trick wrapper into using target conv ext (when None) without actually being a tool parameter - name = converter_name ) + tool = Bunch( conversion_name = Bunch( extensions = conv_ext ) ), #trick wrapper into using target conv ext (when None) without actually being a tool parameter + name = conversion_name ) #wrap actual input dataset input_values[ input.name ] = \ DatasetFilenameWrapper( input_values[ input.name ], diff -r c64ef44ed4c5 -r dedb7be9aa44 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py Mon Feb 08 12:45:28 2010 -0500 +++ b/lib/galaxy/tools/actions/__init__.py Mon Feb 08 21:33:12 2010 -0500 @@ -63,41 +63,41 @@ # are stored as name1, name2, ... for i, v in enumerate( value ): input_datasets[ prefix + input.name + str( i + 1 ) ] = process_dataset( v ) - converters = [] - for converter_name, converter_extensions, converter_datatypes in input.converters: - new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], converter_datatypes ) - if not new_data or isinstance( new_data.datatype, converter_datatypes ): - input_datasets[ prefix + converter_name + str( i + 1 ) ] = new_data - converters.append( ( converter_name, new_data ) ) + conversions = [] + for conversion_name, conversion_extensions, conversion_datatypes in input.conversions: + new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], conversion_datatypes ) + if not new_data or isinstance( new_data.datatype, conversion_datatypes ): + input_datasets[ prefix + conversion_name + str( i + 1 ) ] = new_data + conversions.append( ( conversion_name, new_data ) ) else: - raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, converter_extensions ) + raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name + str( i + 1 ) ].extension, conversion_extensions ) if parent: parent[input.name] = input_datasets[ prefix + input.name + str( i + 1 ) ] - for converter_name, converter_data in converters: + for conversion_name, conversion_data in conversions: #allow explicit conversion to be stored in job_parameter table - parent[ converter_name ] = converter_data.id #a more robust way to determine JSONable value is desired + parent[ conversion_name ] = conversion_data.id #a more robust way to determine JSONable value is desired else: param_values[input.name][i] = input_datasets[ prefix + input.name + str( i + 1 ) ] - for converter_name, converter_data in converters: + for conversion_name, conversion_data in conversions: #allow explicit conversion to be stored in job_parameter table - param_values[ converter_name ][i] = converter_data.id #a more robust way to determine JSONable value is desired + param_values[ conversion_name ][i] = conversion_data.id #a more robust way to determine JSONable value is desired else: input_datasets[ prefix + input.name ] = process_dataset( value ) - converters = [] - for converter_name, converter_extensions, converter_datatypes in input.converters: - new_data = process_dataset( input_datasets[ prefix + input.name ], converter_datatypes ) - if not new_data or isinstance( new_data.datatype, converter_datatypes ): - input_datasets[ prefix + converter_name ] = new_data - converters.append( ( converter_name, new_data ) ) + conversions = [] + for conversion_name, conversion_extensions, conversion_datatypes in input.conversions: + new_data = process_dataset( input_datasets[ prefix + input.name ], conversion_datatypes ) + if not new_data or isinstance( new_data.datatype, conversion_datatypes ): + input_datasets[ prefix + conversion_name ] = new_data + conversions.append( ( conversion_name, new_data ) ) else: - raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name ].extension, converter_extensions ) + raise Exception, 'A path for explicit datatype conversion has not been found: %s --/--> %s' % ( input_datasets[ prefix + input.name ].extension, conversion_extensions ) target_dict = parent if not target_dict: target_dict = param_values target_dict[ input.name ] = input_datasets[ prefix + input.name ] - for converter_name, converter_data in converters: + for conversion_name, conversion_data in conversions: #allow explicit conversion to be stored in job_parameter table - target_dict[ converter_name ] = converter_data.id #a more robust way to determine JSONable value is desired + target_dict[ conversion_name ] = conversion_data.id #a more robust way to determine JSONable value is desired tool.visit_inputs( param_values, visitor ) return input_datasets diff -r c64ef44ed4c5 -r dedb7be9aa44 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py Mon Feb 08 12:45:28 2010 -0500 +++ b/lib/galaxy/tools/parameters/basic.py Mon Feb 08 21:33:12 2010 -0500 @@ -1168,15 +1168,15 @@ else: self.options = dynamic_options.DynamicOptions( options, self ) self.is_dynamic = self.options is not None - # Load converters required for the dataset input - self.converters = [] - for conv_elem in elem.findall( "converter" ): + # Load conversions required for the dataset input + self.conversions = [] + for conv_elem in elem.findall( "conversion" ): name = conv_elem.get( "name" ) #name for commandline substitution conv_extensions = conv_elem.get( "type" ) #target datatype extension # FIXME: conv_extensions should be able to be an ordered list assert None not in [ name, type ], 'A name (%s) and type (%s) are required for explicit conversion' % ( name, type ) conv_types = tool.app.datatypes_registry.get_datatype_by_extension( conv_extensions.lower() ).__class__ - self.converters.append( ( name, conv_extensions, conv_types ) ) + self.conversions.append( ( name, conv_extensions, conv_types ) ) def get_html_field( self, trans=None, value=None, other_values={} ): filter_value = None
participants (1)
-
Greg Von Kuster