commit/galaxy-central: 6 new changesets
6 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/changeset/c2c7e22ee51c/ changeset: c2c7e22ee51c user: jmchilton date: 2012-11-14 22:49:53 summary: Fixes for multiple input data parameters, they would fail if only a single input and in production settings where multiple Galaxy processes are used (the input parameters must take a different path through the code in this case). This changeset address both of those issues. The introduction of the DatasetListWrapper class to address this may seem like overkill right now, but I think over the coming months it will prove useful. Once there are multiple ways of selecting many files (be it this, by tag, the composite dataset stuff I am working on) it will prove valuable to have uniform ways of accessing those files in Cheetah templates, this class will help pave the way for that. affected #: 2 files diff -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 -r c2c7e22ee51c001e4a19ce397c90ccb61e4d4ca2 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -2256,12 +2256,11 @@ current = values["__current_case__"] wrap_values( input.cases[current].inputs, values ) elif isinstance( input, DataToolParameter ) and input.multiple: - values = input_values[ input.name ] input_values[ input.name ] = \ - [DatasetFilenameWrapper( value, - datatypes_registry = self.app.datatypes_registry, - tool = self, - name = input.name ) for value in values] + DatasetListWrapper( input_values[ input.name ], + datatypes_registry = self.app.datatypes_registry, + tool = self, + name = input.name ) elif isinstance( input, DataToolParameter ): ## FIXME: We're populating param_dict with conversions when ## wrapping values, this should happen as a separate @@ -2333,10 +2332,13 @@ # but this should be considered DEPRECATED, instead use: # $dataset.get_child( 'name' ).filename for name, data in input_datasets.items(): - param_dict[name] = DatasetFilenameWrapper( data, - datatypes_registry = self.app.datatypes_registry, - tool = self, - name = name ) + param_dict_value = param_dict.get(name, None) + if not isinstance(param_dict_value, (DatasetFilenameWrapper, DatasetListWrapper)): + param_dict[name] = DatasetFilenameWrapper( data, + datatypes_registry = self.app.datatypes_registry, + tool = self, + name = name ) + if data: for child in data.children: param_dict[ "_CHILD___%s___%s" % ( name, child.designation ) ] = DatasetFilenameWrapper( child ) @@ -3102,7 +3104,16 @@ return getattr( self.dataset, key ) def __nonzero__( self ): return bool( self.dataset ) - + +class DatasetListWrapper( list ): + """ + """ + def __init__( self, datasets, **kwargs ): + if not isinstance(datasets, list): + datasets = [datasets] + list.__init__( self, [DatasetFilenameWrapper(dataset, **kwargs) for dataset in datasets] ) + + def json_fix( val ): if isinstance( val, list ): return [ json_fix( v ) for v in val ] diff -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 -r c2c7e22ee51c001e4a19ce397c90ccb61e4d4ca2 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -145,6 +145,12 @@ values = input_values[ input.name ] current = values[ "__current_case__" ] wrap_values( input.cases[current].inputs, values, skip_missing_values = skip_missing_values ) + elif isinstance( input, DataToolParameter ) and input.multiple: + input_values[ input.name ] = \ + galaxy.tools.DatasetListWrapper( input_values[ input.name ], + datatypes_registry = trans.app.datatypes_registry, + tool = tool, + name = input.name ) elif isinstance( input, DataToolParameter ): input_values[ input.name ] = \ galaxy.tools.DatasetFilenameWrapper( input_values[ input.name ], https://bitbucket.org/galaxy/galaxy-central/changeset/e8c84dd71578/ changeset: e8c84dd71578 user: jmchilton date: 2012-11-14 23:10:31 summary: Allow output data attributes `format` and `metadata_source` to work with multiple input data parameters - the first item selected will be used as the source. This decision was discussed with Dannon in IRC and he thought it was an acceptable approach. affected #: 1 file diff -r c2c7e22ee51c001e4a19ce397c90ccb61e4d4ca2 -r e8c84dd715782e7c1d709d8068e6033b835f7f39 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -62,7 +62,11 @@ # If there are multiple inputs with the same name, they # are stored as name1, name2, ... for i, v in enumerate( value ): - input_datasets[ prefix + input.name + str( i + 1 ) ] = process_dataset( v ) + processed_dataset = process_dataset( v ) + if i == 0: + # Allow copying metadata to output, first item will be source. + input_datasets[ prefix + input.name ] = processed_dataset + input_datasets[ prefix + input.name + str( i + 1 ) ] = processed_dataset conversions = [] for conversion_name, conversion_extensions, conversion_datatypes in input.conversions: new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], conversion_datatypes ) https://bitbucket.org/galaxy/galaxy-central/changeset/df2eb3960fd9/ changeset: df2eb3960fd9 user: jmchilton date: 2012-11-15 05:17:55 summary: Fix another error encountered only in multiple process mode, this one related to job rerunning. affected #: 1 file diff -r e8c84dd715782e7c1d709d8068e6033b835f7f39 -r df2eb3960fd92e7877bdd31cf5368cb062a9471c lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1551,6 +1551,8 @@ raise ValueError( "History does not include a dataset of the required format / build" ) if value in [None, "None"]: return None + if isinstance( value, str ) and value.find( "," ) > 0: + value = [ int( value_part ) for value_part in value.split( "," ) ] if isinstance( value, list ): rval = [ trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( v ) for v in value ] elif isinstance( value, trans.app.model.HistoryDatasetAssociation ): https://bitbucket.org/galaxy/galaxy-central/changeset/71dbc3556ee6/ changeset: 71dbc3556ee6 user: jmchilton date: 2012-11-28 18:24:20 summary: Fix for "View Details" display of jobs run with multiple input data parameters. affected #: 1 file diff -r df2eb3960fd92e7877bdd31cf5368cb062a9471c -r 71dbc3556ee69959d35bae887e7b783561d779f6 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1600,8 +1600,10 @@ return value.file_name def value_to_display_text( self, value, app ): + if not isinstance(value, list): + value = [ value ] if value: - return "%s: %s" % ( value.hid, value.name ) + return ", ".join( [ "%s: %s" % ( item.hid, item.name ) for item in value ] ) else: return "No dataset" https://bitbucket.org/galaxy/galaxy-central/changeset/caa480e454b3/ changeset: caa480e454b3 user: jmchilton date: 2012-12-03 20:10:30 summary: Fix for this issue: http://dev.list.galaxyproject.org/workflow-input-param-issue-td4657311.html. Reporter of bug, Marc Logghe, verified this fixed the immediate problem caused by my previous pull request, though the underlying issues with workflows when parameter names are the same seem to still be a problem but that is outside the scope of this pull request. affected #: 2 files diff -r 71dbc3556ee69959d35bae887e7b783561d779f6 -r caa480e454b377a94a5b1ebc545c253b0594643e templates/workflow/display.mako --- a/templates/workflow/display.mako +++ b/templates/workflow/display.mako @@ -45,9 +45,12 @@ %if isinstance( param, DataToolParameter ): %if ( prefix + param.name ) in step.input_connections_by_name: <% - conn = step.input_connections_by_name[ prefix + param.name ] + conns = step.input_connections_by_name[ prefix + param.name ] + if not isinstance(conns, list): + conns = [conns] + vals = ["Output dataset '%s' from step %d" % (conn.output_name, int(conn.output_step.order_index)+1) for conn in conns] %> - Output dataset '${conn.output_name}' from step ${int(conn.output_step.order_index)+1} + ${",".join(vals)} %else: <i>select at runtime</i> %endif diff -r 71dbc3556ee69959d35bae887e7b783561d779f6 -r caa480e454b377a94a5b1ebc545c253b0594643e templates/workflow/run.mako --- a/templates/workflow/run.mako +++ b/templates/workflow/run.mako @@ -243,9 +243,12 @@ %if isinstance( param, DataToolParameter ): %if ( prefix + param.name ) in step.input_connections_by_name: <% - conn = step.input_connections_by_name[ prefix + param.name ] + conns = step.input_connections_by_name[ prefix + param.name ] + if not isinstance(conns, list): + conns = [conns] + vals = ["Output dataset '%s' from step %d" % (conn.output_name, int(conn.output_step.order_index)+1) for conn in conns] %> - Output dataset '${conn.output_name}' from step ${int(conn.output_step.order_index)+1} + ${",".join(vals)} %else: ## FIXME: Initialize in the controller <% https://bitbucket.org/galaxy/galaxy-central/changeset/1ac27213bafb/ changeset: 1ac27213bafb user: dannon date: 2012-12-03 23:19:52 summary: Merged in jmchilton/galaxy-central-multi-input-tool-fixes-2 (pull request #85) affected #: 5 files diff -r 8a3f874b8e0a20afb362b8c4e92989a6cf76509b -r 1ac27213bafb6d3fef210c17728eeef0338bd8ad lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -2256,12 +2256,11 @@ current = values["__current_case__"] wrap_values( input.cases[current].inputs, values ) elif isinstance( input, DataToolParameter ) and input.multiple: - values = input_values[ input.name ] input_values[ input.name ] = \ - [DatasetFilenameWrapper( value, - datatypes_registry = self.app.datatypes_registry, - tool = self, - name = input.name ) for value in values] + DatasetListWrapper( input_values[ input.name ], + datatypes_registry = self.app.datatypes_registry, + tool = self, + name = input.name ) elif isinstance( input, DataToolParameter ): ## FIXME: We're populating param_dict with conversions when ## wrapping values, this should happen as a separate @@ -2333,10 +2332,13 @@ # but this should be considered DEPRECATED, instead use: # $dataset.get_child( 'name' ).filename for name, data in input_datasets.items(): - param_dict[name] = DatasetFilenameWrapper( data, - datatypes_registry = self.app.datatypes_registry, - tool = self, - name = name ) + param_dict_value = param_dict.get(name, None) + if not isinstance(param_dict_value, (DatasetFilenameWrapper, DatasetListWrapper)): + param_dict[name] = DatasetFilenameWrapper( data, + datatypes_registry = self.app.datatypes_registry, + tool = self, + name = name ) + if data: for child in data.children: param_dict[ "_CHILD___%s___%s" % ( name, child.designation ) ] = DatasetFilenameWrapper( child ) @@ -3102,7 +3104,16 @@ return getattr( self.dataset, key ) def __nonzero__( self ): return bool( self.dataset ) - + +class DatasetListWrapper( list ): + """ + """ + def __init__( self, datasets, **kwargs ): + if not isinstance(datasets, list): + datasets = [datasets] + list.__init__( self, [DatasetFilenameWrapper(dataset, **kwargs) for dataset in datasets] ) + + def json_fix( val ): if isinstance( val, list ): return [ json_fix( v ) for v in val ] diff -r 8a3f874b8e0a20afb362b8c4e92989a6cf76509b -r 1ac27213bafb6d3fef210c17728eeef0338bd8ad lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -62,7 +62,11 @@ # If there are multiple inputs with the same name, they # are stored as name1, name2, ... for i, v in enumerate( value ): - input_datasets[ prefix + input.name + str( i + 1 ) ] = process_dataset( v ) + processed_dataset = process_dataset( v ) + if i == 0: + # Allow copying metadata to output, first item will be source. + input_datasets[ prefix + input.name ] = processed_dataset + input_datasets[ prefix + input.name + str( i + 1 ) ] = processed_dataset conversions = [] for conversion_name, conversion_extensions, conversion_datatypes in input.conversions: new_data = process_dataset( input_datasets[ prefix + input.name + str( i + 1 ) ], conversion_datatypes ) @@ -145,6 +149,12 @@ values = input_values[ input.name ] current = values[ "__current_case__" ] wrap_values( input.cases[current].inputs, values, skip_missing_values = skip_missing_values ) + elif isinstance( input, DataToolParameter ) and input.multiple: + input_values[ input.name ] = \ + galaxy.tools.DatasetListWrapper( input_values[ input.name ], + datatypes_registry = trans.app.datatypes_registry, + tool = tool, + name = input.name ) elif isinstance( input, DataToolParameter ): input_values[ input.name ] = \ galaxy.tools.DatasetFilenameWrapper( input_values[ input.name ], diff -r 8a3f874b8e0a20afb362b8c4e92989a6cf76509b -r 1ac27213bafb6d3fef210c17728eeef0338bd8ad lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1551,6 +1551,8 @@ raise ValueError( "History does not include a dataset of the required format / build" ) if value in [None, "None"]: return None + if isinstance( value, str ) and value.find( "," ) > 0: + value = [ int( value_part ) for value_part in value.split( "," ) ] if isinstance( value, list ): rval = [ trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( v ) for v in value ] elif isinstance( value, trans.app.model.HistoryDatasetAssociation ): @@ -1598,8 +1600,10 @@ return value.file_name def value_to_display_text( self, value, app ): + if not isinstance(value, list): + value = [ value ] if value: - return "%s: %s" % ( value.hid, value.name ) + return ", ".join( [ "%s: %s" % ( item.hid, item.name ) for item in value ] ) else: return "No dataset" diff -r 8a3f874b8e0a20afb362b8c4e92989a6cf76509b -r 1ac27213bafb6d3fef210c17728eeef0338bd8ad templates/workflow/display.mako --- a/templates/workflow/display.mako +++ b/templates/workflow/display.mako @@ -45,9 +45,12 @@ %if isinstance( param, DataToolParameter ): %if ( prefix + param.name ) in step.input_connections_by_name: <% - conn = step.input_connections_by_name[ prefix + param.name ] + conns = step.input_connections_by_name[ prefix + param.name ] + if not isinstance(conns, list): + conns = [conns] + vals = ["Output dataset '%s' from step %d" % (conn.output_name, int(conn.output_step.order_index)+1) for conn in conns] %> - Output dataset '${conn.output_name}' from step ${int(conn.output_step.order_index)+1} + ${",".join(vals)} %else: <i>select at runtime</i> %endif diff -r 8a3f874b8e0a20afb362b8c4e92989a6cf76509b -r 1ac27213bafb6d3fef210c17728eeef0338bd8ad templates/workflow/run.mako --- a/templates/workflow/run.mako +++ b/templates/workflow/run.mako @@ -243,9 +243,12 @@ %if isinstance( param, DataToolParameter ): %if ( prefix + param.name ) in step.input_connections_by_name: <% - conn = step.input_connections_by_name[ prefix + param.name ] + conns = step.input_connections_by_name[ prefix + param.name ] + if not isinstance(conns, list): + conns = [conns] + vals = ["Output dataset '%s' from step %d" % (conn.output_name, int(conn.output_step.order_index)+1) for conn in conns] %> - Output dataset '${conn.output_name}' from step ${int(conn.output_step.order_index)+1} + ${",".join(vals)} %else: ## FIXME: Initialize in the controller <% Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
Bitbucket