2 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/d2f7780bd1b1/ Changeset: d2f7780bd1b1 User: jmchilton Date: 2014-11-11 18:41:00+00:00 Summary: Refactor dataset matcher stuff for use outside of field generation. (i.e. for reuse in filtering datasets on the client side) Affected #: 2 files diff -r d519ad65435883112008e90ec06ebf44a91586e7 -r d2f7780bd1b1d5b545c6c222fb383d6a35833855 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1796,26 +1796,52 @@ field_name = "%s%s" % ( self.name, suffix ) field = form_builder.SelectField( field_name, multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) + for history_dataset_collection in self.match_collections( history, dataset_matcher, reduction=reduction ): + name = history_dataset_collection.name + hid = str( history_dataset_collection.hid ) + hidden_text = "" # TODO + id = value_modifier( dataset_matcher.trans.security.encode_id( history_dataset_collection.id ) ) + selected = value and history_dataset_collection in value + text = "%s:%s %s" % ( hid, hidden_text, name ) + field.add_option( text, id, selected ) + + return field + + def _get_select_dataset_field( self, history, dataset_matcher, multiple=False, suffix="" ): + field_name = "%s%s" % ( self.name, suffix ) + field = form_builder.SelectField( field_name, multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) + + for hda_match, hid in self.match_datasets( history, dataset_matcher ): + if not hda_match.implicit_conversion: + hda = hda_match.hda + hda_name = hda.name + selected = dataset_matcher.selected( hda ) + if hda.visible: + hidden_text = "" + else: + hidden_text = " (hidden)" + field.add_option( "%s:%s %s" % ( hid, hidden_text, hda_name ), hda.id, selected ) + else: + hda_name = hda_match.original_hda.name + hda = hda_match.hda # Get converted dataset + target_ext = hda_match.target_ext + selected = dataset_matcher.selected( hda ) + field.add_option( "%s: (as %s) %s" % ( hid, target_ext, hda_name ), hda.id, selected ) + + self._ensure_selection( field ) + return field + + def match_collections( self, history, dataset_matcher, reduction=True ): dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher ) for history_dataset_collection in history.active_dataset_collections: if dataset_collection_matcher.hdca_match( history_dataset_collection, reduction=reduction ): - name = history_dataset_collection.name - hid = str( history_dataset_collection.hid ) - hidden_text = "" # TODO - id = value_modifier( dataset_matcher.trans.security.encode_id( history_dataset_collection.id ) ) - selected = value and history_dataset_collection in value - text = "%s:%s %s" % ( hid, hidden_text, name ) - field.add_option( text, id, selected ) + yield history_dataset_collection - return field + def match_datasets( self, history, dataset_matcher ): - def _get_select_dataset_field( self, history, dataset_matcher, multiple=False, suffix="" ): - - # CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field() def dataset_collector( hdas, parent_hid ): for i, hda in enumerate( hdas ): - hda_name = hda.name if parent_hid is not None: hid = "%s.%d" % ( parent_hid, i + 1 ) else: @@ -1823,27 +1849,13 @@ hda_match = dataset_matcher.hda_match( hda ) if not hda_match: continue - if not hda_match.implicit_conversion: - selected = dataset_matcher.selected( hda ) - if hda.visible: - hidden_text = "" - else: - hidden_text = " (hidden)" - field.add_option( "%s:%s %s" % ( hid, hidden_text, hda_name ), hda.id, selected ) - else: - hda = hda_match.hda # Get converted dataset - target_ext = hda_match.target_ext - selected = dataset_matcher.selected( hda ) - field.add_option( "%s: (as %s) %s" % ( hid, target_ext, hda_name ), hda.id, selected ) + yield (hda_match, hid) # Also collect children via association object - dataset_collector( hda.children, hid ) + for item in dataset_collector( hda.children, hid ): + yield item - field_name = "%s%s" % ( self.name, suffix ) - field = form_builder.SelectField( field_name, multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) - - dataset_collector( history.active_datasets_children_and_roles, None ) - self._ensure_selection( field ) - return field + for item in dataset_collector( history.active_datasets_children_and_roles, None ): + yield item def get_initial_value( self, trans, context, history=None ): return self.get_initial_value_from_history_prevent_repeats(trans, context, None, history=history) @@ -2098,15 +2110,31 @@ return self._switch_fields( fields, default_field=default_field ) - def _get_single_collection_field( self, trans, history, value, other_values ): - field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) + def match_collections( self, trans, history, dataset_matcher ): dataset_collections = trans.app.dataset_collections_service.history_dataset_collections( history, self._history_query( trans ) ) - dataset_matcher = DatasetMatcher( trans, self, value, other_values ) dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher ) for dataset_collection_instance in dataset_collections: if not dataset_collection_matcher.hdca_match( dataset_collection_instance ): continue + yield dataset_collection_instance + + def match_multirun_collections( self, trans, history, dataset_matcher ): + dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher ) + + for history_dataset_collection in history.active_dataset_collections: + if not self._history_query( trans ).can_map_over( history_dataset_collection ): + continue + + datasets_match = dataset_collection_matcher.hdca_match( history_dataset_collection ) + if datasets_match: + yield history_dataset_collection + + def _get_single_collection_field( self, trans, history, value, other_values ): + field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) + dataset_matcher = DatasetMatcher( trans, self, value, other_values ) + + for dataset_collection_instance in self.match_collections( trans, history, dataset_matcher ): instance_id = dataset_collection_instance.hid instance_name = dataset_collection_instance.name selected = ( value and ( dataset_collection_instance == value ) ) @@ -2122,22 +2150,16 @@ field_name = "%s%s" % ( self.name, suffix ) field = form_builder.SelectField( field_name, multiple, None, self.refresh_on_change, refresh_on_change_values=self.refresh_on_change_values ) dataset_matcher = DatasetMatcher( trans, self, value, other_values ) - dataset_collection_matcher = DatasetCollectionMatcher( dataset_matcher ) - for history_dataset_collection in history.active_dataset_collections: - if not self._history_query( trans ).can_map_over( history_dataset_collection ): - continue + for history_dataset_collection in self.match_multirun_collections( trans, history, dataset_matcher ): + name = history_dataset_collection.name + hid = str( history_dataset_collection.hid ) + hidden_text = "" # TODO + subcollection_type = self._history_query( trans ).collection_type_description.collection_type + id = "%s|%s" % ( dataset_matcher.trans.security.encode_id( history_dataset_collection.id ), subcollection_type ) + text = "%s:%s %s" % ( hid, hidden_text, name ) - datasets_match = dataset_collection_matcher.hdca_match( history_dataset_collection ) - if datasets_match: - name = history_dataset_collection.name - hid = str( history_dataset_collection.hid ) - hidden_text = "" # TODO - subcollection_type = self._history_query( trans ).collection_type_description.collection_type - id = "%s|%s" % ( dataset_matcher.trans.security.encode_id( history_dataset_collection.id ), subcollection_type ) - text = "%s:%s %s" % ( hid, hidden_text, name ) - - field.add_option( text, id, False ) + field.add_option( text, id, False ) return field diff -r d519ad65435883112008e90ec06ebf44a91586e7 -r d2f7780bd1b1d5b545c6c222fb383d6a35833855 lib/galaxy/tools/parameters/dataset_matcher.py --- a/lib/galaxy/tools/parameters/dataset_matcher.py +++ b/lib/galaxy/tools/parameters/dataset_matcher.py @@ -53,11 +53,12 @@ return False target_ext, converted_dataset = hda.find_conversion_destination( formats ) if target_ext: + original_hda = hda if converted_dataset: hda = converted_dataset if check_security and not self.__can_access_dataset( hda.dataset ): return False - return HdaImplicitMatch( hda, target_ext ) + return HdaImplicitMatch( hda, target_ext, original_hda ) return False def hda_match( self, hda, check_implicit_conversions=True, ensure_visible=True ): @@ -117,7 +118,8 @@ conversion). """ - def __init__( self, hda, target_ext ): + def __init__( self, hda, target_ext, original_hda ): + self.original_hda = original_hda self.hda = hda self.target_ext = target_ext https://bitbucket.org/galaxy/galaxy-central/commits/e4c9f0b5a738/ Changeset: e4c9f0b5a738 User: jmchilton Date: 2014-11-11 18:41:00+00:00 Summary: Rework ToolDataParameter and ToolDataCollectionParameter to_dict to filter over valid matches. Doesn't do anything with them yet - passing this work off to Sam to populate the dictionaries in whatever manner makes most sense for UI consumption. Affected #: 1 file diff -r d2f7780bd1b1d5b545c6c222fb383d6a35833855 -r e4c9f0b5a73851ab2d8644889d305aaab80208d9 lib/galaxy/tools/parameters/basic.py --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -2066,12 +2066,26 @@ ref = ref() return ref - def to_dict( self, trans, view='collection', value_mapper=None ): + def to_dict( self, trans, view='collection', value_mapper=None, other_values=None ): d = super( DataToolParameter, self ).to_dict( trans ) d['extensions'] = self.extensions d['multiple'] = self.multiple + if other_values is None: + # No need to produce lists of datasets for history. + return d + + dataset_matcher = DatasetMatcher( trans, self, None, other_values ) + history = trans.history + multiple = self.multiple + for hda_match, hid in self.match_datasets( history, dataset_matcher ): + # hda_match not an hda - it is a description of the match, may + # describe match after implicit conversion. + pass + for history_dataset_collection in self.match_collections( history, dataset_matcher, reduction=multiple ): + pass return d + class DataCollectionToolParameter( BaseDataToolParameter ): """ """ @@ -2238,6 +2252,22 @@ def validate( self, value, history=None ): return True # TODO + def to_dict( self, trans, view='collection', value_mapper=None, other_values=None ): + d = super( DataCollectionToolParameter, self ).to_dict( trans ) + if other_values is None: + # No need to produce lists of datasets for history. + return d + + dataset_matcher = DatasetMatcher( trans, self, None, other_values ) + history = trans.history + + for hdca in self.match_collections( trans, history, dataset_matcher ): + pass + + for hdca in self.match_multirun_collections( trans, history, dataset_matcher ): + subcollection_type = self._history_query( trans ).collection_type_description.collection_type + pass + class HiddenDataToolParameter( HiddenToolParameter, DataToolParameter ): """ Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.