details:
http://www.bx.psu.edu/hg/galaxy/rev/07dffb2735dd
changeset: 2518:07dffb2735dd
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Fri Jul 31 12:01:02 2009 -0400
description:
merging heads
0 file(s) affected in this change:
diffs (528 lines):
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Fri Jul 31 12:01:02 2009 -0400
@@ -51,6 +51,8 @@
copy_safe_peek = True
is_binary = True #The dataset contains binary data --> do not space_to_tab or
convert newlines, etc. Allow binary file uploads of this type when True.
+
+ allow_datatype_change = True #Allow user to change between this datatype and others.
If False, this datatype cannot be changed from or into.
#Composite datatypes
composite_type = None
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Fri Jul 31 12:01:02 2009 -0400
@@ -122,6 +122,7 @@
file_ext="html"
composite_type = 'auto_primary_file'
+ allow_datatype_change = False
def missing_meta( self, dataset ):
"""Checks for empty meta values"""
@@ -255,6 +256,8 @@
file_ext = None
is_binary = True
+
+ allow_datatype_change = False
composite_type = 'basic'
diff -r 61dd78cafa09 -r 07dffb2735dd
lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py Fri Jul 31 12:01:02
2009 -0400
@@ -0,0 +1,51 @@
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False,
transactional=False ) )
+
+HistoryDatasetAssociationDisplayAtAuthorization_table = Table(
"history_dataset_association_display_at_authorization", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+ Column( "history_dataset_association_id", Integer, ForeignKey(
"history_dataset_association.id" ), index=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ),
index=True ),
+ Column( "site", TrimmedString( 255 ) ) )
+
+def upgrade():
+ if migrate_engine.name == 'mysql':
+ # Load existing tables
+ metadata.reflect()
+ i = Index( "ix_hdadaa_history_dataset_association_id",
HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+ try:
+ i.create()
+ except Exception, e:
+ log.debug( "Adding index
'ix_hdadaa_history_dataset_association_id' to table
'history_dataset_association_display_at_authorization' table failed: %s" %
str( e ) )
+
+def downgrade():
+ if migrate_engine.name == 'mysql':
+ # Load existing tables
+ metadata.reflect()
+ i = Index( "ix_hdadaa_history_dataset_association_id",
HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+ try:
+ i.drop()
+ except Exception, e:
+ log.debug( "Removing index
'ix_hdadaa_history_dataset_association_id' from table
'history_dataset_association_display_at_authorization' table failed: %s" %
str( e ) )
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Fri Jul 31 12:01:02 2009 -0400
@@ -1094,7 +1094,7 @@
replace_dataset = None
# Let's not overwrite the imported datatypes module with the variable
datatypes?
# The built-in 'id' is overwritten in lots of places as well
- ldatatypes = [ x for x in
trans.app.datatypes_registry.datatypes_by_extension.iterkeys() ]
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in
trans.app.datatypes_registry.datatypes_by_extension.iteritems() if
dtype_value.allow_datatype_change ]
ldatatypes.sort()
if params.get( 'new_dataset_button', False ):
upload_option = params.get( 'upload_option', 'upload_file' )
@@ -1247,17 +1247,20 @@
elif action == 'edit_info':
if params.get( 'change', False ):
# The user clicked the Save button on the 'Change data type'
form
- trans.app.datatypes_registry.change_datatype( ldda, params.datatype
)
- trans.app.model.flush()
- msg = "Data type changed for library dataset '%s'"
% ldda.name
- return trans.fill_template(
"/admin/library/ldda_edit_info.mako",
- ldda=ldda,
- library_id=library_id,
- datatypes=ldatatypes,
- restrict=params.get( 'restrict',
True ),
- render_templates=params.get(
'render_templates', False ),
- msg=msg,
- messagetype=messagetype )
+ if ldda.datatype.allow_datatype_change and
trans.app.datatypes_registry.get_datatype_by_extension( params.datatype
).allow_datatype_change:
+ trans.app.datatypes_registry.change_datatype( ldda,
params.datatype )
+ trans.app.model.flush()
+ msg = "Data type changed for library dataset
'%s'" % ldda.name
+ return trans.fill_template(
"/admin/library/ldda_edit_info.mako",
+ ldda=ldda,
+ library_id=library_id,
+ datatypes=ldatatypes,
+ restrict=params.get(
'restrict', True ),
+ render_templates=params.get(
'render_templates', False ),
+ msg=msg,
+ messagetype=messagetype )
+ else:
+ return trans.show_error_message( "You are unable to change
datatypes in this manner. Changing %s to %s is not allowed." % ( ldda.extension,
params.datatype ) )
elif params.get( 'save', False ):
# The user clicked the Save button on the 'Edit Attributes'
form
old_name = ldda.name
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Fri Jul 31 12:01:02 2009 -0400
@@ -144,7 +144,7 @@
redirect_url = kwd['redirect_url'] % urllib.quote_plus(
kwd['display_url'] )
if trans.app.security_agent.allow_action( None,
data.permitted_actions.DATASET_ACCESS, dataset = data ):
return trans.response.send_redirect( redirect_url ) # anon access already
permitted by rbac
- if trans.app.security_agent.allow_action( trans.user,
data.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset = data ):
+ if trans.app.security_agent.allow_action( trans.user,
data.permitted_actions.DATASET_ACCESS, dataset = data ):
trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site
)
return trans.response.send_redirect( redirect_url )
else:
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Fri Jul 31 12:01:02 2009 -0400
@@ -166,7 +166,7 @@
default_permissions[ default_action ] = [ private_user_role ]
trans.app.security_agent.history_set_default_permissions( history,
default_permissions )
n_undeleted += 1
- trans.log_event( "History (%s) %d marked as undeleted" %
history.name )
+ trans.log_event( "History (%s) %d marked as undeleted" % (
history.name, history.id ) )
status = SUCCESS
message_parts = []
if n_undeleted:
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Fri Jul 31 12:01:02 2009 -0400
@@ -430,7 +430,7 @@
replace_dataset = None
# Let's not overwrite the imported datatypes module with the variable
datatypes?
# The built-in 'id' is overwritten in lots of places as well
- ldatatypes = [ x for x in
trans.app.datatypes_registry.datatypes_by_extension.iterkeys() ]
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in
trans.app.datatypes_registry.datatypes_by_extension.iteritems() if
dtype_value.allow_datatype_change ]
ldatatypes.sort()
if id:
if params.get( 'permissions', False ):
@@ -505,10 +505,14 @@
if trans.app.security_agent.allow_action( trans.user,
trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
library_item=ldda ):
- trans.app.datatypes_registry.change_datatype( ldda,
params.datatype )
- trans.app.model.flush()
- msg = "Data type changed for library dataset
'%s'" % ldda.name
- messagetype = 'done'
+ if ldda.datatype.allow_datatype_change and
trans.app.datatypes_registry.get_datatype_by_extension( params.datatype
).allow_datatype_change:
+ trans.app.datatypes_registry.change_datatype( ldda,
params.datatype )
+ trans.app.model.flush()
+ msg = "Data type changed for library dataset
'%s'" % ldda.name
+ messagetype = 'done'
+ else:
+ msg = "You are unable to change datatypes in this
manner. Changing %s to %s is not allowed." % ( ldda.extension, params.datatype )
+ messagetype = 'error'
else:
msg = "You are not authorized to change the data type of
dataset '%s'" % ldda.name
messagetype = 'error'
diff -r 61dd78cafa09 -r 07dffb2735dd lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Fri Jul 31 11:48:37 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Fri Jul 31 12:01:02 2009 -0400
@@ -247,8 +247,11 @@
params = util.Params( kwd, safe=False )
if params.change:
# The user clicked the Save button on the 'Change data type'
form
- trans.app.datatypes_registry.change_datatype( data, params.datatype )
- trans.app.model.flush()
+ if data.datatype.allow_datatype_change and
trans.app.datatypes_registry.get_datatype_by_extension( params.datatype
).allow_datatype_change:
+ trans.app.datatypes_registry.change_datatype( data, params.datatype
)
+ trans.app.model.flush()
+ else:
+ return trans.show_error_message( "You are unable to change
datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension,
params.datatype ) )
elif params.save:
# The user clicked the Save button on the 'Edit Attributes' form
data.name = params.name
@@ -314,7 +317,7 @@
data.metadata.dbkey = data.dbkey
# let's not overwrite the imported datatypes module with the variable
datatypes?
# the built-in 'id' is overwritten in lots of places as well
- ldatatypes = [x for x in
trans.app.datatypes_registry.datatypes_by_extension.iterkeys()]
+ ldatatypes = [ dtype_name for dtype_name, dtype_value in
trans.app.datatypes_registry.datatypes_by_extension.iteritems() if
dtype_value.allow_datatype_change ]
ldatatypes.sort()
trans.log_event( "Opened edit view on dataset %s" % str(id) )
return trans.fill_template( "/dataset/edit_attributes.mako",
data=data, datatypes=ldatatypes )
diff -r 61dd78cafa09 -r 07dffb2735dd scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Fri Jul 31 11:48:37 2009 -0400
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Fri Jul 31 12:01:02 2009 -0400
@@ -24,6 +24,7 @@
parser.add_option( "-d", "--days", dest="days",
action="store", type="int", help="number of days (60)",
default=60 )
parser.add_option( "-r", "--remove_from_disk",
action="store_true", dest="remove_from_disk", help="remove
datasets from disk when purged", default=False )
parser.add_option( "-i", "--info_only",
action="store_true", dest="info_only", help="info about the
requested action", default=False )
+ parser.add_option( "-f", "--force_retry",
action="store_true", dest="force_retry", help="performs the
requested actions, but ignores whether it might have been done before. Useful when -r
wasn't used, but should have been", default=False )
parser.add_option( "-1", "--delete_userless_histories",
action="store_true", dest="delete_userless_histories", default=False,
help="delete userless histories and datasets" )
@@ -73,28 +74,32 @@
print "# Datasets will NOT be removed from disk.\n"
if options.delete_userless_histories:
- delete_userless_histories( app, cutoff_time, info_only = options.info_only )
+ delete_userless_histories( app, cutoff_time, info_only = options.info_only,
force_retry = options.force_retry )
elif options.purge_histories:
- purge_histories( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only )
+ purge_histories( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only, force_retry = options.force_retry )
elif options.purge_datasets:
- purge_datasets( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only )
+ purge_datasets( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only, force_retry = options.force_retry )
elif options.purge_libraries:
- purge_libraries( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only )
+ purge_libraries( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only, force_retry = options.force_retry )
elif options.purge_folders:
- purge_folders( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only )
+ purge_folders( app, cutoff_time, options.remove_from_disk, info_only =
options.info_only, force_retry = options.force_retry )
sys.exit(0)
-def delete_userless_histories( app, cutoff_time, info_only = False ):
+def delete_userless_histories( app, cutoff_time, info_only = False, force_retry = False
):
# Deletes userless histories whose update_time value is older than the cutoff_time.
# The purge history script will handle marking DatasetInstances as deleted.
# Nothing is removed from disk yet.
history_count = 0
print '# The following datasets and associated userless histories have been
deleted'
start = time.clock()
- histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
- app.model.History.table.c.deleted==False,
- app.model.History.table.c.update_time < cutoff_time )
).all()# \
+ if force_retry:
+ histories = app.model.History.filter( and_(
app.model.History.table.c.user_id==None,
+ app.model.History.table.c.update_time <
cutoff_time ) ).all()
+ else:
+ histories = app.model.History.filter( and_(
app.model.History.table.c.user_id==None,
+ app.model.History.table.c.deleted==False,
+ app.model.History.table.c.update_time <
cutoff_time ) ).all()
for history in histories:
if not info_only:
history.deleted = True
@@ -106,7 +111,7 @@
print "Elapsed time: ", stop - start, "\n"
-def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_histories( app, cutoff_time, remove_from_disk, info_only = False, force_retry =
False ):
# Purges deleted histories whose update_time is older than the cutoff_time.
# The dataset associations of each history are also marked as deleted.
# The Purge Dataset method will purge each Dataset as necessary
@@ -115,10 +120,15 @@
history_count = 0
print '# The following datasets and associated deleted histories have been
purged'
start = time.clock()
- histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
- app.model.History.table.c.purged==False,
- app.model.History.table.c.update_time < cutoff_time )
) \
- .options( eagerload( 'datasets' ) ).all()
+ if force_retry:
+ histories = app.model.History.filter( and_(
app.model.History.table.c.deleted==True,
+ app.model.History.table.c.update_time <
cutoff_time ) ) \
+ .options( eagerload( 'datasets' ) ).all()
+ else:
+ histories = app.model.History.filter( and_(
app.model.History.table.c.deleted==True,
+ app.model.History.table.c.purged==False,
+ app.model.History.table.c.update_time <
cutoff_time ) ) \
+ .options( eagerload( 'datasets' ) ).all()
for history in histories:
for dataset_assoc in history.datasets:
_purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only =
info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the
Dataset as deleted if it is deletable
@@ -136,7 +146,7 @@
print '# Purged %d histories.' % ( history_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False, force_retry =
False ):
# Purges deleted libraries whose update_time is older than the cutoff_time.
# The dataset associations of each library are also marked as deleted.
# The Purge Dataset method will purge each Dataset as necessary
@@ -145,9 +155,13 @@
library_count = 0
print '# The following libraries and associated folders have been purged'
start = time.clock()
- libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
- app.model.Library.table.c.purged==False,
- app.model.Library.table.c.update_time < cutoff_time )
).all()
+ if force_retry:
+ libraries = app.model.Library.filter( and_(
app.model.Library.table.c.deleted==True,
+ app.model.Library.table.c.update_time <
cutoff_time ) ).all()
+ else:
+ libraries = app.model.Library.filter( and_(
app.model.Library.table.c.deleted==True,
+ app.model.Library.table.c.purged==False,
+ app.model.Library.table.c.update_time <
cutoff_time ) ).all()
for library in libraries:
_purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only
)
if not info_only:
@@ -159,7 +173,7 @@
print '# Purged %d libraries .' % ( library_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_folders( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_folders( app, cutoff_time, remove_from_disk, info_only = False, force_retry =
False ):
# Purges deleted folders whose update_time is older than the cutoff_time.
# The dataset associations of each folder are also marked as deleted.
# The Purge Dataset method will purge each Dataset as necessary
@@ -168,9 +182,13 @@
folder_count = 0
print '# The following folders have been purged'
start = time.clock()
- folders = app.model.LibraryFolder.filter( and_(
app.model.LibraryFolder.table.c.deleted==True,
- app.model.LibraryFolder.table.c.purged==False,
- app.model.LibraryFolder.table.c.update_time <
cutoff_time ) ).all()
+ if force_retry:
+ folders = app.model.LibraryFolder.filter( and_(
app.model.LibraryFolder.table.c.deleted==True,
+ app.model.LibraryFolder.table.c.update_time <
cutoff_time ) ).all()
+ else:
+ folders = app.model.LibraryFolder.filter( and_(
app.model.LibraryFolder.table.c.deleted==True,
+ app.model.LibraryFolder.table.c.purged==False,
+ app.model.LibraryFolder.table.c.update_time <
cutoff_time ) ).all()
for folder in folders:
_purge_folder( folder, app, remove_from_disk, info_only = info_only )
print "%d" % folder.id
@@ -179,17 +197,22 @@
print '# Purged %d folders.' % ( folder_count ), '\n'
print "Elapsed time: ", stop - start, "\n"
-def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False, repurge =
False, force_retry = False ):
# Purges deleted datasets whose update_time is older than cutoff_time. Files may or
may
# not be removed from disk.
dataset_count = 0
disk_space = 0
print '# The following deleted datasets have been purged'
start = time.clock()
- datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
- app.model.Dataset.table.c.purgable==True,
- app.model.Dataset.table.c.purged==False,
- app.model.Dataset.table.c.update_time < cutoff_time )
).all()
+ if force_retry:
+ datasets = app.model.Dataset.filter( and_(
app.model.Dataset.table.c.deleted==True,
+ app.model.Dataset.table.c.purgable==True,
+ app.model.Dataset.table.c.update_time < cutoff_time
) ).all()
+ else:
+ datasets = app.model.Dataset.filter( and_(
app.model.Dataset.table.c.deleted==True,
+ app.model.Dataset.table.c.purgable==True,
+ app.model.Dataset.table.c.purged==False,
+ app.model.Dataset.table.c.update_time < cutoff_time
) ).all()
for dataset in datasets:
file_size = dataset.file_size
_purge_dataset( dataset, remove_from_disk, info_only = info_only )
diff -r 61dd78cafa09 -r 07dffb2735dd templates/admin/library/ldda_edit_info.mako
--- a/templates/admin/library/ldda_edit_info.mako Fri Jul 31 11:48:37 2009 -0400
+++ b/templates/admin/library/ldda_edit_info.mako Fri Jul 31 12:01:02 2009 -0400
@@ -99,24 +99,30 @@
<div class="toolForm">
<div class="toolFormTitle">Change data type of
${ldda.name}</div>
<div class="toolFormBody">
- <form name="change_datatype" action="${h.url_for(
controller='admin', action='library_dataset_dataset_association',
library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}"
method="post">
- <input type="hidden" name="id"
value="${ldda.id}"/>
+ %if ldda.datatype.allow_datatype_change:
+ <form name="change_datatype" action="${h.url_for(
controller='admin', action='library_dataset_dataset_association',
library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}"
method="post">
+ <input type="hidden" name="id"
value="${ldda.id}"/>
+ <div class="form-row">
+ <label>New Type:</label>
+ <div style="float: left; width: 250px; margin-right:
10px;">
+ ${datatype( ldda, datatypes )}
+ </div>
+ <div class="toolParamHelp" style="clear:
both;">
+ This will change the datatype of the existing dataset
+ but <i>not</i> modify its contents. Use this if
Galaxy
+ has incorrectly guessed the type of your dataset.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="change"
value="Save"/>
+ </div>
+ </form>
+ %else:
<div class="form-row">
- <label>New Type:</label>
- <div style="float: left; width: 250px; margin-right:
10px;">
- ${datatype( ldda, datatypes )}
- </div>
- <div class="toolParamHelp" style="clear:
both;">
- This will change the datatype of the existing dataset
- but <i>not</i> modify its contents. Use this if Galaxy
- has incorrectly guessed the type of your dataset.
- </div>
- <div style="clear: both"></div>
+ <div class="warningmessagesmall">${_('Changing the
datatype of this dataset is not allowed.')}</div>
</div>
- <div class="form-row">
- <input type="submit" name="change"
value="Save"/>
- </div>
- </form>
+ %endif
</div>
</div>
diff -r 61dd78cafa09 -r 07dffb2735dd templates/dataset/edit_attributes.mako
--- a/templates/dataset/edit_attributes.mako Fri Jul 31 11:48:37 2009 -0400
+++ b/templates/dataset/edit_attributes.mako Fri Jul 31 12:01:02 2009 -0400
@@ -102,27 +102,34 @@
</div>
<p />
%endif
+
<div class="toolForm">
<div class="toolFormTitle">${_('Change data
type')}</div>
<div class="toolFormBody">
- <form name="change_datatype" action="${h.url_for(
controller='root', action='edit' )}" method="post">
- <input type="hidden" name="id"
value="${data.id}"/>
+ %if data.datatype.allow_datatype_change:
+ <form name="change_datatype" action="${h.url_for(
controller='root', action='edit' )}" method="post">
+ <input type="hidden" name="id"
value="${data.id}"/>
+ <div class="form-row">
+ <label>
+ ${_('New Type')}:
+ </label>
+ <div style="float: left; width: 250px; margin-right:
10px;">
+ ${datatype( data, datatypes )}
+ </div>
+ <div class="toolParamHelp" style="clear:
both;">
+ ${_('This will change the datatype of the existing dataset
but <i>not</i> modify its contents. Use this if Galaxy has incorrectly guessed
the type of your dataset.')}
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="change"
value="${_('Save')}"/>
+ </div>
+ </form>
+ %else:
<div class="form-row">
- <label>
- ${_('New Type')}:
- </label>
- <div style="float: left; width: 250px; margin-right:
10px;">
- ${datatype( data, datatypes )}
- </div>
- <div class="toolParamHelp" style="clear:
both;">
- ${_('This will change the datatype of the existing dataset but
<i>not</i> modify its contents. Use this if Galaxy has incorrectly guessed the
type of your dataset.')}
- </div>
- <div style="clear: both"></div>
+ <div class="warningmessagesmall">${_('Changing the
datatype of this dataset is not allowed.')}</div>
</div>
- <div class="form-row">
- <input type="submit" name="change"
value="${_('Save')}"/>
- </div>
- </form>
+ %endif
</div>
</div>
<p />
diff -r 61dd78cafa09 -r 07dffb2735dd templates/library/ldda_edit_info.mako
--- a/templates/library/ldda_edit_info.mako Fri Jul 31 11:48:37 2009 -0400
+++ b/templates/library/ldda_edit_info.mako Fri Jul 31 12:01:02 2009 -0400
@@ -99,24 +99,30 @@
<div class="toolForm">
<div class="toolFormTitle">Change data type</div>
<div class="toolFormBody">
- <form name="change_datatype" action="${h.url_for(
controller='library', action='library_dataset_dataset_association',
library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}"
method="post">
- <input type="hidden" name="id"
value="${ldda.id}"/>
+ %if ldda.datatype.allow_datatype_change:
+ <form name="change_datatype" action="${h.url_for(
controller='library', action='library_dataset_dataset_association',
library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}"
method="post">
+ <input type="hidden" name="id"
value="${ldda.id}"/>
+ <div class="form-row">
+ <label>New Type:</label>
+ <div style="float: left; width: 250px; margin-right:
10px;">
+ ${datatype( ldda, datatypes )}
+ </div>
+ <div class="toolParamHelp" style="clear:
both;">
+ This will change the datatype of the existing dataset
+ but <i>not</i> modify its contents. Use this if
Galaxy
+ has incorrectly guessed the type of your dataset.
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="change"
value="Save"/>
+ </div>
+ </form>
+ %else:
<div class="form-row">
- <label>New Type:</label>
- <div style="float: left; width: 250px; margin-right:
10px;">
- ${datatype( ldda, datatypes )}
- </div>
- <div class="toolParamHelp" style="clear:
both;">
- This will change the datatype of the existing dataset
- but <i>not</i> modify its contents. Use this if
Galaxy
- has incorrectly guessed the type of your dataset.
- </div>
- <div style="clear: both"></div>
+ <div class="warningmessagesmall">${_('Changing
the datatype of this dataset is not allowed.')}</div>
</div>
- <div class="form-row">
- <input type="submit" name="change"
value="Save"/>
- </div>
- </form>
+ %endif
</div>
</div>
<p/>