4 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/6ef71c8b55a5/ Changeset: 6ef71c8b55a5 User: jmchilton Date: 2014-05-15 19:04:58 Summary: Bugfix: Copying datasets to existing and new history at same time. Previously would give error about set not having an append method. Affected #: 1 file diff -r d2300bcd600861bd4b2e5b5ec2d0e0303f7da1f0 -r 6ef71c8b55a56be844a5a57aad9fe91a4222663e lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -1000,7 +1000,7 @@ elif target_history_ids: if not isinstance( target_history_ids, list ): target_history_ids = target_history_ids.split(",") - target_history_ids = set([ trans.security.decode_id(h) for h in target_history_ids if h ]) + target_history_ids = list(set([ trans.security.decode_id(h) for h in target_history_ids if h ])) else: target_history_ids = [] done_msg = error_msg = "" https://bitbucket.org/galaxy/galaxy-central/commits/e1a1ffe4bfaf/ Changeset: e1a1ffe4bfaf User: jmchilton Date: 2014-05-15 19:04:58 Summary: Allow 'Copy Datasets' page to copy collections as well. Affected #: 2 files diff -r 6ef71c8b55a56be844a5a57aad9fe91a4222663e -r e1a1ffe4bfaf06b1c0ab293d0057c6e793fee493 lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -981,7 +981,7 @@ return trans.fill_template( "show_params.mako", inherit_chain=inherit_chain, history=trans.get_history(), hda=hda, job=job, tool=tool, params_objects=params_objects, upgrade_messages=upgrade_messages, has_parameter_errors=has_parameter_errors ) @web.expose - def copy_datasets( self, trans, source_history=None, source_dataset_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ): + def copy_datasets( self, trans, source_history=None, source_content_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ): user = trans.get_user() if source_history is not None: history = self.get_history(trans, source_history) @@ -989,12 +989,16 @@ else: history = current_history = trans.get_history() refresh_frames = [] - if source_dataset_ids: - if not isinstance( source_dataset_ids, list ): - source_dataset_ids = source_dataset_ids.split(",") - source_dataset_ids = set(map( trans.security.decode_id, source_dataset_ids )) + if source_content_ids: + if not isinstance( source_content_ids, list ): + source_content_ids = source_content_ids.split(",") + encoded_dataset_collection_ids = [ s[ len("dataset_collection|"): ] for s in source_content_ids if s.startswith("dataset_collection|") ] + encoded_dataset_ids = [ s[ len("dataset|"): ] for s in source_content_ids if s.startswith("dataset|") ] + decoded_dataset_collection_ids = set(map( trans.security.decode_id, encoded_dataset_collection_ids )) + decoded_dataset_ids = set(map( trans.security.decode_id, encoded_dataset_ids )) else: - source_dataset_ids = [] + decoded_dataset_collection_ids = [] + decoded_dataset_ids = [] if target_history_id: target_history_ids = [ trans.security.decode_id(target_history_id) ] elif target_history_ids: @@ -1006,8 +1010,8 @@ done_msg = error_msg = "" new_history = None if do_copy: - invalid_datasets = 0 - if not source_dataset_ids or not ( target_history_ids or new_history_name ): + invalid_contents = 0 + if not ( decoded_dataset_ids or decoded_dataset_collection_ids ) or not ( target_history_ids or new_history_name ): error_msg = "You must provide both source datasets and target histories. " else: if new_history_name: @@ -1023,18 +1027,22 @@ target_histories = [ history ] if len( target_histories ) != len( target_history_ids ): error_msg = error_msg + "You do not have permission to add datasets to %i requested histories. " % ( len( target_history_ids ) - len( target_histories ) ) - source_hdas = map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, source_dataset_ids ) - source_hdas.sort(key=lambda hda: hda.hid) - for hda in source_hdas: - if hda is None: + source_contents = map( trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get, decoded_dataset_ids ) + source_contents.extend( map( trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get, decoded_dataset_collection_ids ) ) + source_contents.sort(key=lambda content: content.hid) + for content in source_contents: + if content is None: error_msg = error_msg + "You tried to copy a dataset that does not exist. " - invalid_datasets += 1 - elif hda.history != history: + invalid_contents += 1 + elif content.history != history: error_msg = error_msg + "You tried to copy a dataset which is not in your current history. " - invalid_datasets += 1 + invalid_contents += 1 else: for hist in target_histories: - hist.add_dataset( hda.copy( copy_children = True ) ) + if content.history_content_type == "dataset": + hist.add_dataset( content.copy( copy_children=True ) ) + else: + hist.add_dataset_collection( content.copy( ) ) if current_history in target_histories: refresh_frames = ['history'] trans.sa_session.flush() @@ -1042,21 +1050,21 @@ ( url_for( controller="history", action="switch_to_history", \ hist_id=trans.security.encode_id( hist.id ) ), hist.name ) \ for hist in target_histories ] ) - num_source = len( source_dataset_ids ) - invalid_datasets + num_source = len( source_content_ids ) - invalid_contents num_target = len(target_histories) done_msg = "%i %s copied to %i %s: %s." % (num_source, inflector.cond_plural(num_source, "dataset"), num_target, inflector.cond_plural(num_target, "history"), hist_names_str ) trans.sa_session.refresh( history ) - source_datasets = history.visible_datasets + source_contents = history.active_contents target_histories = [history] if user: target_histories = user.active_histories return trans.fill_template( "/dataset/copy_view.mako", source_history = history, current_history = current_history, - source_dataset_ids = source_dataset_ids, + source_content_ids = source_content_ids, target_history_id = target_history_id, target_history_ids = target_history_ids, - source_datasets = source_datasets, + source_contents = source_contents, target_histories = target_histories, new_history_name = new_history_name, done_msg = done_msg, diff -r 6ef71c8b55a56be844a5a57aad9fe91a4222663e -r e1a1ffe4bfaf06b1c0ab293d0057c6e793fee493 templates/webapps/galaxy/dataset/copy_view.mako --- a/templates/webapps/galaxy/dataset/copy_view.mako +++ b/templates/webapps/galaxy/dataset/copy_view.mako @@ -59,17 +59,18 @@ </select></div><div class="toolFormBody"> - %if len(source_datasets) > 0: - %for data in source_datasets: + %if source_contents: + %for data in source_contents: <% checked = "" encoded_id = trans.security.encode_id(data.id) - if data.id in source_dataset_ids: + input_id = "%s|%s" % ( data.history_content_type, encoded_id ) + if input_id in source_content_ids: checked = " checked='checked'" %><div class="form-row"> - <input type="checkbox" name="source_dataset_ids" id="dataset_${encoded_id}" value="${encoded_id}"${checked}/> - <label for="dataset_${encoded_id}" style="display: inline;font-weight:normal;"> ${data.hid}: ${h.to_unicode(data.name)}</label> + <input type="checkbox" name="source_content_ids" id="${input_id}" value="${input_id}"${checked}/> + <label for="${input_id}" style="display: inline;font-weight:normal;"> ${data.hid}: ${h.to_unicode(data.name)}</label></div> %endfor %else: https://bitbucket.org/galaxy/galaxy-central/commits/1f35c462c606/ Changeset: 1f35c462c606 User: jmchilton Date: 2014-05-15 19:04:58 Summary: PEP-8 fixes for DatasetInterface.copy_datasets. Affected #: 1 file diff -r e1a1ffe4bfaf06b1c0ab293d0057c6e793fee493 -r 1f35c462c606dca541e5f5f16d2c3f4e33334cfe lib/galaxy/webapps/galaxy/controllers/dataset.py --- a/lib/galaxy/webapps/galaxy/controllers/dataset.py +++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py @@ -1047,9 +1047,9 @@ refresh_frames = ['history'] trans.sa_session.flush() hist_names_str = ", ".join( ['<a href="%s" target="_top">%s</a>' % - ( url_for( controller="history", action="switch_to_history", \ - hist_id=trans.security.encode_id( hist.id ) ), hist.name ) \ - for hist in target_histories ] ) + ( url_for( controller="history", action="switch_to_history", + hist_id=trans.security.encode_id( hist.id ) ), hist.name ) + for hist in target_histories ] ) num_source = len( source_content_ids ) - invalid_contents num_target = len(target_histories) done_msg = "%i %s copied to %i %s: %s." % (num_source, inflector.cond_plural(num_source, "dataset"), num_target, inflector.cond_plural(num_target, "history"), hist_names_str ) @@ -1057,19 +1057,19 @@ source_contents = history.active_contents target_histories = [history] if user: - target_histories = user.active_histories + target_histories = user.active_histories return trans.fill_template( "/dataset/copy_view.mako", - source_history = history, - current_history = current_history, - source_content_ids = source_content_ids, - target_history_id = target_history_id, - target_history_ids = target_history_ids, - source_contents = source_contents, - target_histories = target_histories, - new_history_name = new_history_name, - done_msg = done_msg, - error_msg = error_msg, - refresh_frames = refresh_frames ) + source_history=history, + current_history=current_history, + source_content_ids=source_content_ids, + target_history_id=target_history_id, + target_history_ids=target_history_ids, + source_contents=source_contents, + target_histories=target_histories, + new_history_name=new_history_name, + done_msg=done_msg, + error_msg=error_msg, + refresh_frames=refresh_frames ) def _copy_datasets( self, trans, dataset_ids, target_histories, imported=False ): """ Helper method for copying datasets. """ https://bitbucket.org/galaxy/galaxy-central/commits/b5a1cd130f71/ Changeset: b5a1cd130f71 User: jmchilton Date: 2014-05-15 19:04:58 Summary: Order history.{active,visible}_dataset_collections by hid instead of id. Affected #: 1 file diff -r 1f35c462c606dca541e5f5f16d2c3f4e33334cfe -r b5a1cd130f71b20a25f1bf110b0e1e24843df4b7 lib/galaxy/model/mapping.py --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -1461,9 +1461,9 @@ model.HistoryDatasetCollectionAssociation, primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.history_id ) == model.History.table.c.id ) & not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ), - order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.id ), + order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ), viewonly=True, - ), # TODO:orderbyhid + ), visible_datasets=relation( model.HistoryDatasetAssociation, primaryjoin=( ( model.HistoryDatasetAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetAssociation.table.c.deleted ) & model.HistoryDatasetAssociation.table.c.visible ), @@ -1473,7 +1473,7 @@ visible_dataset_collections=relation( model.HistoryDatasetCollectionAssociation, primaryjoin=( ( model.HistoryDatasetCollectionAssociation.table.c.history_id == model.History.table.c.id ) & not_( model.HistoryDatasetCollectionAssociation.table.c.deleted ) & model.HistoryDatasetCollectionAssociation.table.c.visible ), - order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.id ), + order_by=asc( model.HistoryDatasetCollectionAssociation.table.c.hid ), viewonly=True, ), tags=relation( model.HistoryTagAssociation, order_by=model.HistoryTagAssociation.table.c.id, backref="histories" ), Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.