details: http://www.bx.psu.edu/hg/galaxy/rev/f06777cbd5bb
changeset: 2509:f06777cbd5bb
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Thu Jul 30 11:05:03 2009 -0400
description:
Add a new config setting to universe_wsgi.ini: new_user_dataset_access_role_default_private.
When set to True, new users will have default dataset access permissions for histories set to their Private role. Default is False (original behavior); datasets are left as public.
Resolves ticket #111.
4 file(s) affected in this change:
lib/galaxy/config.py
lib/galaxy/security/__init__.py
lib/galaxy/web/controllers/user.py
universe_wsgi.ini.sample
diffs (58 lines):
diff -r e01bfc281e09 -r f06777cbd5bb lib/galaxy/config.py
--- a/lib/galaxy/config.py Tue Jul 28 14:16:19 2009 -0400
+++ b/lib/galaxy/config.py Thu Jul 30 11:05:03 2009 -0400
@@ -46,6 +46,7 @@
self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
+ self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) )
self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
diff -r e01bfc281e09 -r f06777cbd5bb lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Tue Jul 28 14:16:19 2009 -0400
+++ b/lib/galaxy/security/__init__.py Thu Jul 30 11:05:03 2009 -0400
@@ -206,12 +206,16 @@
else:
return None
return role
- def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False ):
+ def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private = False ):
# bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
if user is None:
return None
if not permissions:
- permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS : [ self.get_private_user_role( user, auto_create=True ) ] }
+ #default permissions
+ permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS : [ self.get_private_user_role( user, auto_create=True ) ] }
+ #new_user_dataset_access_role_default_private is set as True in config file
+ if default_access_private:
+ permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ]
# Delete all of the current default permissions for the user
for dup in user.default_permissions:
dup.delete()
diff -r e01bfc281e09 -r f06777cbd5bb lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Tue Jul 28 14:16:19 2009 -0400
+++ b/lib/galaxy/web/controllers/user.py Thu Jul 30 11:05:03 2009 -0400
@@ -157,7 +157,7 @@
user.flush()
trans.app.security_agent.create_private_user_role( user )
# We set default user permissions, before we log in and set the default history permissions
- trans.app.security_agent.user_set_default_permissions( user )
+ trans.app.security_agent.user_set_default_permissions( user, default_access_private = trans.app.config.new_user_dataset_access_role_default_private )
# The handle_user_login() method has a call to the history_set_default_permissions() method
# (needed when logging in with a history), user needs to have default permissions set before logging in
trans.handle_user_login( user )
diff -r e01bfc281e09 -r f06777cbd5bb universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Tue Jul 28 14:16:19 2009 -0400
+++ b/universe_wsgi.ini.sample Thu Jul 30 11:05:03 2009 -0400
@@ -156,6 +156,9 @@
# Can an admin user delete user accounts?
#allow_user_deletion = False
+# Should default dataset access permissions be private for new users; default is False (datasets are public)
+new_user_dataset_access_role_default_private = False
+
# ---- Job Execution --------------------------------------------------------
# Number of concurrent jobs to run (local job runner)
Hi,
After Googling and browsing the tool config files for some time I
found the syntax for adding a link to the help section... To make a
future lookup a bit faster: could this please be added to http://g2.trac.bx.psu.edu/wiki/ToolConfigSyntax
on the Wiki?
Thanks,
Pi
-------------------------------------------------------------
Biomolecular Mass Spectrometry and Proteomics
Utrecht University
Visiting address:
H.R. Kruyt building room O607
Padualaan 8
3584 CH Utrecht
The Netherlands
Mail address:
P.O. box 80.082
3508 TB Utrecht
The Netherlands
phone: +31 (0)6-143 66 783
email: pieter.neerincx(a)gmail.com
skype: pieter.online
------------------------------------------------------------
details: http://www.bx.psu.edu/hg/galaxy/rev/ae750790079a
changeset: 2506:ae750790079a
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jul 27 11:33:12 2009 -0400
description:
Allow the upload of PDF files to libraries via the admin interface.
A more generic method of determining binary files' type and whether they are allowed to be uploaded via the various upload methods should be created.
1 file(s) affected in this change:
lib/galaxy/web/controllers/library_dataset.py
diffs (29 lines):
diff -r 5a92b5877cf7 -r ae750790079a lib/galaxy/web/controllers/library_dataset.py
--- a/lib/galaxy/web/controllers/library_dataset.py Mon Jul 27 10:46:25 2009 -0400
+++ b/lib/galaxy/web/controllers/library_dataset.py Mon Jul 27 11:33:12 2009 -0400
@@ -73,12 +73,19 @@
ext = name.split( "." )[1].strip().lower()
except:
ext = ''
- if not( ext == 'ab1' or ext == 'scf' ):
- raise BadFileException( "you attempted to upload an inappropriate file." )
- if ext == 'ab1' and file_format != 'ab1':
- raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
- elif ext == 'scf' and file_format != 'scf':
- raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
+ try:
+ is_pdf = open( temp_name ).read( len( '%PDF' ) ) == '%PDF'
+ except:
+ is_pdf = False #file failed to open or contents are smaller than pdf header
+ if is_pdf:
+ file_format = 'pdf' #allow the upload of PDFs to library via the admin interface.
+ else:
+ if not( ext == 'ab1' or ext == 'scf' ):
+ raise BadFileException( "you attempted to upload an inappropriate file." )
+ if ext == 'ab1' and file_format != 'ab1':
+ raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
+ elif ext == 'scf' and file_format != 'scf':
+ raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
data_type = 'binary'
if not data_type:
# We must have a text file
details: http://www.bx.psu.edu/hg/galaxy/rev/0f18a77ca03e
changeset: 2507:0f18a77ca03e
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jul 27 15:55:32 2009 -0400
description:
A new flag, force_history_refresh, has been added to Tool Configurations.
When set to "True", this flag will cause the entire history to reload when one of its outputs has finished running. Default is False.
This is useful, i.e., when a tool creates an indeterminate number of output datasets by dumping them into new_file_path.
Example:
<tool id="someTool" name="Some tool name" version="1.0.1" force_history_refresh="True">
...
</tool>
3 file(s) affected in this change:
lib/galaxy/tools/__init__.py
lib/galaxy/web/controllers/root.py
templates/root/history.mako
diffs (68 lines):
diff -r ae750790079a -r 0f18a77ca03e lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Mon Jul 27 11:33:12 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Mon Jul 27 15:55:32 2009 -0400
@@ -295,6 +295,8 @@
self.is_multi_byte = util.string_as_bool( root.get( "is_multi_byte", False ) )
# Type of tool
self.tool_type = root.get( "tool_type", None )
+ #Force history to fully refresh after job execution for this tool. Useful i.e. when an indeterminate number of outputs are created by a tool.
+ self.force_history_refresh = util.string_as_bool( root.get( 'force_history_refresh', 'False' ) )
# data_source tool
if self.tool_type == "data_source":
self.URL_method = root.get( "URL_method", "get" ) # get is the default
diff -r ae750790079a -r 0f18a77ca03e lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Mon Jul 27 11:33:12 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Mon Jul 27 15:55:32 2009 -0400
@@ -114,9 +114,16 @@
for id, state in zip( ids, states ):
data = self.app.model.HistoryDatasetAssociation.get( id )
if data.state != state:
+ job_hda = data
+ while job_hda.copied_from_history_dataset_association:
+ job_hda = job_hda.copied_from_history_dataset_association
+ force_history_refresh = False
+ if job_hda.creating_job_associations:
+ force_history_refresh = trans.app.toolbox.tools_by_id[ job_hda.creating_job_associations[ 0 ].job.tool_id ].force_history_refresh
rval[id] = {
"state": data.state,
- "html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' )
+ "html": unicode( trans.fill_template( "root/history_item.mako", data=data, hid=data.hid ), 'utf-8' ),
+ "force_history_refresh": force_history_refresh
}
return rval
diff -r ae750790079a -r 0f18a77ca03e templates/root/history.mako
--- a/templates/root/history.mako Mon Jul 27 11:33:12 2009 -0400
+++ b/templates/root/history.mako Mon Jul 27 15:55:32 2009 -0400
@@ -186,6 +186,7 @@
// Build request data
var ids = []
var states = []
+ var force_history_refresh = false
$.each( tracked_datasets, function ( id, state ) {
ids.push( id );
states.push( state );
@@ -205,13 +206,21 @@
initShowHide();
// If new state was terminal, stop tracking
if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
+ if ( val.force_history_refresh ){
+ force_history_refresh = true;
+ }
delete tracked_datasets[ parseInt(id) ];
} else {
tracked_datasets[ parseInt(id) ] = val.state;
}
});
- // Keep going (if there are still any items to track)
- updater( tracked_datasets );
+ if ( force_history_refresh ) {
+ parent.frames.galaxy_history.location.reload();
+ }
+ else {
+ // Keep going (if there are still any items to track)
+ updater( tracked_datasets );
+ }
},
error: function() {
// Just retry, like the old method, should try to be smarter
details: http://www.bx.psu.edu/hg/galaxy/rev/5a92b5877cf7
changeset: 2505:5a92b5877cf7
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jul 27 10:46:25 2009 -0400
description:
Allow for multiple optional outputs to be specified in a tool's xml file by specifying 'filter' tags. The text contents of the filter tag are evaled. If the result is False, the output will not be created. A NoneDataset object is provided in the param_dict for ease of constructing command lines.
This allows a variable number of output files to be created before the tool is run in cases when the number of outputs is static and can be determined from tool parameters.
Example of an output with filter:
<data format="txt" name="optional_output">
<filter>some_parameter_name == 'some_parameter_value'</filter>
</data>
The output dataset, optional_output, will only be created when the tool parameter 'some_parameter_name' is 'some_parameter_value'.
2 file(s) affected in this change:
lib/galaxy/tools/__init__.py
lib/galaxy/tools/actions/__init__.py
diffs (192 lines):
diff -r 857d3a8ebd3d -r 5a92b5877cf7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Sun Jul 26 12:08:44 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Mon Jul 27 10:46:25 2009 -0400
@@ -235,12 +235,13 @@
(format, metadata_source, parent)
"""
def __init__( self, name, format=None, metadata_source=None,
- parent=None, label=None ):
+ parent=None, label=None, filters = None ):
self.name = name
self.format = format
self.metadata_source = metadata_source
self.parent = parent
self.label = label
+ self.filters = filters or []
# Tuple emulation
@@ -413,6 +414,7 @@
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = util.xml_text( data_elem, "label" )
+ output.filters = data_elem.findall( 'filter' )
self.outputs[ output.name ] = output
# Any extra generated config files for the tool
self.config_files = []
@@ -1294,6 +1296,10 @@
param_dict[name].files_path = os.path.abspath(os.path.join( job_working_directory, "dataset_%s_files" % (hda.dataset.id) ))
for child in hda.children:
param_dict[ "_CHILD___%s___%s" % ( name, child.designation ) ] = DatasetFilenameWrapper( child )
+ for out_name, output in self.outputs.iteritems():
+ if out_name not in param_dict and output.filters:
+ #assume the reason we lack this output is because a filter failed to pass; for tool writing convienence, provide a NoneDataset
+ param_dict[ out_name ] = NoneDataset( datatypes_registry = self.app.datatypes_registry, ext = output.format )
# We add access to app here, this allows access to app.config, etc
param_dict['__app__'] = RawObjectWrapper( self.app )
# More convienent access to app.config.new_file_path; we don't need to wrap a string
diff -r 857d3a8ebd3d -r 5a92b5877cf7 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py Sun Jul 26 12:08:44 2009 -0400
+++ b/lib/galaxy/tools/actions/__init__.py Mon Jul 27 10:46:25 2009 -0400
@@ -156,76 +156,83 @@
parent_to_child_pairs = []
child_dataset_names = set()
for name, output in tool.outputs.items():
- if output.parent:
- parent_to_child_pairs.append( ( output.parent, name ) )
- child_dataset_names.add( name )
- ## What is the following hack for? Need to document under what
- ## conditions can the following occur? (james(a)bx.psu.edu)
- # HACK: the output data has already been created
- # this happens i.e. as a result of the async controller
- if name in incoming:
- dataid = incoming[name]
- data = trans.app.model.HistoryDatasetAssociation.get( dataid )
- assert data != None
- out_data[name] = data
- else:
- # the type should match the input
- ext = output.format
- if ext == "input":
- ext = input_ext
- #process change_format tags
- if output.change_format:
- for change_elem in output.change_format:
- for when_elem in change_elem.findall( 'when' ):
- check = incoming.get( when_elem.get( 'input' ), None )
- if check is not None:
- if check == when_elem.get( 'value', None ):
- ext = when_elem.get( 'format', ext )
- else:
- check = when_elem.get( 'input_dataset', None )
+ for filter in output.filters:
+ try:
+ if not eval( filter.text, globals(), incoming ):
+ break #do not create this dataset
+ except Exception, e:
+ log.debug( 'Dataset output filter failed: %s' % e )
+ else: #all filters passed
+ if output.parent:
+ parent_to_child_pairs.append( ( output.parent, name ) )
+ child_dataset_names.add( name )
+ ## What is the following hack for? Need to document under what
+ ## conditions can the following occur? (james(a)bx.psu.edu)
+ # HACK: the output data has already been created
+ # this happens i.e. as a result of the async controller
+ if name in incoming:
+ dataid = incoming[name]
+ data = trans.app.model.HistoryDatasetAssociation.get( dataid )
+ assert data != None
+ out_data[name] = data
+ else:
+ # the type should match the input
+ ext = output.format
+ if ext == "input":
+ ext = input_ext
+ #process change_format tags
+ if output.change_format:
+ for change_elem in output.change_format:
+ for when_elem in change_elem.findall( 'when' ):
+ check = incoming.get( when_elem.get( 'input' ), None )
if check is not None:
- check = inp_data.get( check, None )
+ if check == when_elem.get( 'value', None ):
+ ext = when_elem.get( 'format', ext )
+ else:
+ check = when_elem.get( 'input_dataset', None )
if check is not None:
- if str( getattr( check, when_elem.get( 'attribute' ) ) ) == when_elem.get( 'value', None ):
- ext = when_elem.get( 'format', ext )
- data = trans.app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True )
- # Commit the dataset immediately so it gets database assigned unique id
- data.flush()
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
- # Create an empty file immediately
- open( data.file_name, "w" ).close()
- # This may not be neccesary with the new parent/child associations
- data.designation = name
- # Copy metadata from one of the inputs if requested.
- if output.metadata_source:
- data.init_meta( copy_from=inp_data[output.metadata_source] )
- else:
- data.init_meta()
- # Take dbkey from LAST input
- data.dbkey = str(input_dbkey)
- # Set state
- # FIXME: shouldn't this be NEW until the job runner changes it?
- data.state = data.states.QUEUED
- data.blurb = "queued"
- # Set output label
- if output.label:
- params = make_dict_copy( incoming )
- # wrapping the params allows the tool config to contain things like
- # <outputs>
- # <data format="input" name="output" label="Blat on ${<input_param>.name}" />
- # </outputs>
- wrap_values( tool.inputs, params )
- params['tool'] = tool
- params['on_string'] = on_text
- data.name = fill_template( output.label, context=params )
- else:
- data.name = tool.name
- if on_text:
- data.name += ( " on " + on_text )
- # Store output
- out_data[ name ] = data
- # Store all changes to database
- trans.app.model.flush()
+ check = inp_data.get( check, None )
+ if check is not None:
+ if str( getattr( check, when_elem.get( 'attribute' ) ) ) == when_elem.get( 'value', None ):
+ ext = when_elem.get( 'format', ext )
+ data = trans.app.model.HistoryDatasetAssociation( extension=ext, create_dataset=True )
+ # Commit the dataset immediately so it gets database assigned unique id
+ data.flush()
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, output_permissions )
+ # Create an empty file immediately
+ open( data.file_name, "w" ).close()
+ # This may not be neccesary with the new parent/child associations
+ data.designation = name
+ # Copy metadata from one of the inputs if requested.
+ if output.metadata_source:
+ data.init_meta( copy_from=inp_data[output.metadata_source] )
+ else:
+ data.init_meta()
+ # Take dbkey from LAST input
+ data.dbkey = str(input_dbkey)
+ # Set state
+ # FIXME: shouldn't this be NEW until the job runner changes it?
+ data.state = data.states.QUEUED
+ data.blurb = "queued"
+ # Set output label
+ if output.label:
+ params = make_dict_copy( incoming )
+ # wrapping the params allows the tool config to contain things like
+ # <outputs>
+ # <data format="input" name="output" label="Blat on ${<input_param>.name}" />
+ # </outputs>
+ wrap_values( tool.inputs, params )
+ params['tool'] = tool
+ params['on_string'] = on_text
+ data.name = fill_template( output.label, context=params )
+ else:
+ data.name = tool.name
+ if on_text:
+ data.name += ( " on " + on_text )
+ # Store output
+ out_data[ name ] = data
+ # Store all changes to database
+ trans.app.model.flush()
# Add all the top-level (non-child) datasets to the history
for name in out_data.keys():
if name not in child_dataset_names and name not in incoming: #don't add children; or already existing datasets, i.e. async created