1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/a0fc8c81ff09/
changeset: a0fc8c81ff09
user: dannon
date: 2011-10-14 20:21:07
summary: Workflows: Fix for 5cfec3f4f17c to accommodate workflows with 'input dataset' steps.
affected #: 1 file (-1 bytes)
--- a/lib/galaxy/web/controllers/workflow.py Fri Oct 14 12:14:54 2011 -0400
+++ b/lib/galaxy/web/controllers/workflow.py Fri Oct 14 14:21:07 2011 -0400
@@ -1764,7 +1764,7 @@
# FIXME: Position should be handled inside module
step.position = step_dict['position']
module = module_factory.from_dict( trans, step_dict, secure=False )
- if module.tool is None:
+ if module.type == 'tool' and module.tool is None:
# A required tool is not available in the local Galaxy instance.
missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
if missing_tool_tup not in missing_tool_tups:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/5cfec3f4f17c/
changeset: 5cfec3f4f17c
user: greg
date: 2011-10-14 18:14:54
summary: Add the ability to upload exported Galaxy workflow files. Makew a first pass at fixing bugs when importing a workflow into a Galaxy instance that does not have all of the required tools. This code is still full of bugs.
affected #: 8 files (-1 bytes)
--- a/lib/galaxy/web/base/controller.py Fri Oct 14 09:58:59 2011 -0400
+++ b/lib/galaxy/web/base/controller.py Fri Oct 14 12:14:54 2011 -0400
@@ -1299,9 +1299,8 @@
@web.require_admin
def index( self, trans, **kwd ):
webapp = kwd.get( 'webapp', 'galaxy' )
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if webapp == 'galaxy':
cloned_repositories = trans.sa_session.query( trans.model.ToolShedRepository ) \
.filter( trans.model.ToolShedRepository.deleted == False ) \
@@ -1320,10 +1319,16 @@
@web.require_admin
def center( self, trans, **kwd ):
webapp = kwd.get( 'webapp', 'galaxy' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
if webapp == 'galaxy':
- return trans.fill_template( '/webapps/galaxy/admin/center.mako' )
+ return trans.fill_template( '/webapps/galaxy/admin/center.mako',
+ message=message,
+ status=status )
else:
- return trans.fill_template( '/webapps/community/admin/center.mako' )
+ return trans.fill_template( '/webapps/community/admin/center.mako',
+ message=message,
+ status=status )
@web.expose
@web.require_admin
def reload_tool( self, trans, **kwd ):
--- a/lib/galaxy/web/controllers/workflow.py Fri Oct 14 09:58:59 2011 -0400
+++ b/lib/galaxy/web/controllers/workflow.py Fri Oct 14 12:14:54 2011 -0400
@@ -792,10 +792,10 @@
'data_outputs': [],
'form_html': invalid_tool_form_html,
'annotation' : annotation_str,
+ 'input_connections' : {},
'post_job_actions' : {},
'workflow_outputs' : []
}
- step_dict['input_connections'] = input_conn_dict
# Position
step_dict['position'] = step.position
# Add to return value
@@ -958,19 +958,13 @@
"""
stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
return trans.fill_template( "/workflow/export.mako", item=stored, use_panels=True )
-
-
@web.expose
@web.require_login( "use workflows" )
def import_from_myexp( self, trans, myexp_id, myexp_username=None, myexp_password=None ):
"""
Imports a workflow from the myExperiment website.
"""
-
- #
# Get workflow XML.
- #
-
# Get workflow content.
conn = httplib.HTTPConnection( self.__myexp_url )
# NOTE: blocks web thread.
@@ -985,17 +979,16 @@
parser = SingleTagContentsParser( "content" )
parser.feed( workflow_xml )
workflow_content = base64.b64decode( parser.tag_content )
-
- #
# Process workflow XML and create workflow.
- #
parser = SingleTagContentsParser( "galaxy_json" )
parser.feed( workflow_content )
workflow_dict = from_json_string( parser.tag_content )
-
# Create workflow.
- workflow = self._workflow_from_dict( trans, workflow_dict, source="myExperiment" ).latest_workflow
-
+ workflow, missing_tool_tups = self._workflow_from_dict( trans, workflow_dict, source="myExperiment" ).latest_workflow
+ if missing_tool_tups:
+ # TODO: handle the case where the imported workflow requires tools that are not available in
+ # the local Galaxy instance.
+ pass
# Provide user feedback.
if workflow.has_errors:
return trans.show_warn_message( "Imported, but some steps in this workflow have validation errors" )
@@ -1003,7 +996,6 @@
return trans.show_warn_message( "Imported, but this workflow contains cycles" )
else:
return trans.show_message( "Workflow '%s' imported" % workflow.name )
-
@web.expose
@web.require_login( "use workflows" )
def export_to_myexp( self, trans, id, myexp_username, myexp_password ):
@@ -1102,40 +1094,91 @@
return stored_dict
@web.expose
- def import_workflow( self, trans, workflow_text=None, url=None ):
- if workflow_text is None and url is None:
- return form( url_for(), "Import Workflow", submit_text="Import", use_panels=True ) \
- .add_text( "url", "Workflow URL", "" ) \
- .add_input( "textarea", "Encoded workflow (as generated by export workflow)", "workflow_text", "" )
- if url:
- # Load workflow from external URL
- # NOTE: blocks the web thread.
- try:
- workflow_data = urllib2.urlopen( url ).read()
- except Exception, e:
- return trans.show_error_message( "Failed to open URL %s<br><br>Message: %s" % ( url, str( e ) ) )
- else:
- workflow_data = workflow_text
- # Convert incoming workflow data from json
- try:
- data = simplejson.loads( workflow_data )
- except Exception, e:
- return trans.show_error_message( "Data at '%s' does not appear to be a Galaxy workflow<br><br>Message: %s" % ( url, str( e ) ) )
-
- # Create workflow.
- workflow = self._workflow_from_dict( trans, data, source="uploaded file" ).latest_workflow
-
- # Provide user feedback and show workflow list.
- if workflow.has_errors:
- trans.set_message( "Imported, but some steps in this workflow have validation errors",
- type="warning" )
- if workflow.has_cycles:
- trans.set_message( "Imported, but this workflow contains cycles",
- type="warning" )
- else:
- trans.set_message( "Workflow '%s' imported" % workflow.name )
- return self.list( trans )
-
+ def import_workflow( self, trans, **kwd ):
+ url = kwd.get( 'url', '' )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ if kwd.get( 'import_button', False ):
+ workflow_data = None
+ if url:
+ # Load workflow from external URL
+ # NOTE: blocks the web thread.
+ try:
+ workflow_data = urllib2.urlopen( url ).read()
+ except Exception, e:
+ message = "Failed to open URL: <b>%s</b><br>Exception: %s" % ( url, str( e ) )
+ status = 'error'
+ else:
+ # Load workflow from browsed file.
+ file_data = kwd.get( 'file_data', '' )
+ if file_data in ( '', None ):
+ message = 'No exported Galaxy workflow files were selected.'
+ status = 'error'
+ else:
+ uploaded_file = file_data.file
+ uploaded_file_name = uploaded_file.name
+ uploaded_file_filename = file_data.filename
+ if os.path.getsize( os.path.abspath( uploaded_file_name ) ) > 0:
+ # We're reading the file as text so we can re-use the existing code below.
+ # This may not be ideal...
+ workflow_data = uploaded_file.read()
+ else:
+ message= 'You attempted to upload an empty file.'
+ status = 'error'
+ if workflow_data:
+ # Convert incoming workflow data from json
+ try:
+ data = simplejson.loads( workflow_data )
+ except Exception, e:
+ data = None
+ message = "The data content does not appear to be a Galaxy workflow.<br/>Exception: %s" % str( e )
+ status = 'error'
+ if data:
+ # Create workflow if possible. If a required tool is not available in the local
+ # Galaxy instance, the tool information will be available in the step_dict.
+ workflow, missing_tool_tups = self._workflow_from_dict( trans, data, source="uploaded file" )
+ workflow = workflow.latest_workflow
+ # Provide user feedback and show workflow list.
+ if workflow.has_errors:
+ message += "Imported, but some steps in this workflow have validation errors. "
+ status = "error"
+ if workflow.has_cycles:
+ message += "Imported, but this workflow contains cycles. "
+ status = "error"
+ else:
+ message += "Workflow '%s' imported successfully. " % workflow.name
+ if missing_tool_tups:
+ if trans.user_is_admin():
+ # A required tool is not available in the local Galaxy instance.
+ # TODO: It would sure be nice to be able to redirec to a mako tempalte here that displays a nice
+ # page including the links to the configured tool shed instead of this stupd message, but trying
+ # to get the panels back is a nightmare since workflow eliminates the Galaxy panels. Someone
+ #involved in workflow development needs to figure out what it will take to get the Galaxy panels back...
+ galaxy_url = trans.request.host
+ message += "The workflow requires the following tools that are not available in this Galaxy instance."
+ message += "You can likely install the required tools from one of the Galaxy tool sheds listed below.<br/><br/>"
+ for missing_tool_tup in missing_tool_tups:
+ missing_tool_id = missing_tool_tup[0]
+ for tool_shed_name, tool_shed_url in trans.app.tool_shed_registry.tool_sheds.items():
+ if tool_shed_url.endswith( '/' ):
+ tool_shed_url = tool_shed_url.rstrip( '/' )
+ url = '%s/repository/find_tools?tool_id=%s&galaxy_url=%s&webapp=community' % ( tool_shed_url, missing_tool_id, galaxy_url )
+ message += '<a href="%s">%s</a><br/>' % ( url, tool_shed_name )
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='index',
+ webapp='galaxy',
+ message=message,
+ status=status ) )
+ else:
+ # TODO: Figure out what to do here...
+ pass
+ return self.list( trans )
+ return trans.fill_template( "workflow/import.mako",
+ url=url,
+ message=message,
+ status=status,
+ use_panels=True )
@web.json
def get_datatypes( self, trans ):
ext_to_class_name = dict()
@@ -1605,8 +1648,7 @@
step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step )
annotation_str = ""
if step_annotation:
- annotation_str = step_annotation.annotation
-
+ annotation_str = step_annotation.annotation
# Step info
step_dict = {
'id': step.order_index,
@@ -1620,7 +1662,6 @@
## 'data_outputs': module.get_data_outputs(),
'annotation' : annotation_str
}
-
# Add post-job actions to step dict.
if module.type == 'tool':
pja_dict = {}
@@ -1629,7 +1670,6 @@
output_name = pja.output_name,
action_arguments = pja.action_arguments )
step_dict[ 'post_job_actions' ] = pja_dict
-
# Data inputs
step_dict['inputs'] = []
if module.type == "data_input":
@@ -1647,7 +1687,6 @@
for partname, partval in val.items():
if type( partval ) == RuntimeValue:
step_dict['inputs'].append( { "name" : name, "description" : "runtime parameter for tool %s" % module.get_name() } )
-
# User outputs
step_dict['user_outputs'] = []
"""
@@ -1668,7 +1707,6 @@
if type( module ) is ToolModule:
for output in module.get_data_outputs():
step_dict['outputs'].append( { 'name' : output['name'], 'type' : output['extensions'][0] } )
-
# Connections
input_connections = step.input_connections
if step.type is None or step.type == 'tool':
@@ -1692,7 +1730,6 @@
# Add to return value
data['steps'][step.order_index] = step_dict
return data
-
def _workflow_from_dict( self, trans, data, source=None ):
"""
Creates a workflow from a dict. Created workflow is stored in the database and returned.
@@ -1714,8 +1751,12 @@
# The editor will provide ids for each step that we don't need to save,
# but do need to use to make connections
steps_by_external_id = {}
+ # Keep track of tools required by the workflow that are not available in
+ # the local Galaxy instance. Each tuple in the list of missing_tool_tups
+ # will be ( tool_id, tool_name, tool_version ).
+ missing_tool_tups = []
# First pass to build step objects and populate basic values
- for key, step_dict in data['steps'].iteritems():
+ for key, step_dict in data[ 'steps' ].iteritems():
# Create the model class for the step
step = model.WorkflowStep()
steps.append( step )
@@ -1723,6 +1764,11 @@
# FIXME: Position should be handled inside module
step.position = step_dict['position']
module = module_factory.from_dict( trans, step_dict, secure=False )
+ if module.tool is None:
+ # A required tool is not available in the local Galaxy instance.
+ missing_tool_tup = ( step_dict[ 'tool_id' ], step_dict[ 'name' ], step_dict[ 'tool_version' ] )
+ if missing_tool_tup not in missing_tool_tups:
+ missing_tool_tups.append( missing_tool_tup )
module.save_to_step( step )
if step.tool_errors:
workflow.has_errors = True
@@ -1761,7 +1807,7 @@
# Persist
trans.sa_session.add( stored )
trans.sa_session.flush()
- return stored
+ return stored, missing_tool_tups
## ---- Utility methods -------------------------------------------------------
--- a/lib/galaxy/webapps/community/controllers/repository.py Fri Oct 14 09:58:59 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Fri Oct 14 12:14:54 2011 -0400
@@ -251,8 +251,7 @@
RevisionColumn( "Revision" ),
UserColumn( "Owner",
model_class=model.User,
- attach_popup=False,
- key="User.username" )
+ attach_popup=False )
]
operations = []
standard_filters = []
--- a/lib/galaxy/workflow/modules.py Fri Oct 14 09:58:59 2011 -0400
+++ b/lib/galaxy/workflow/modules.py Fri Oct 14 12:14:54 2011 -0400
@@ -8,6 +8,9 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.jobs.actions.post import ActionBox
from galaxy.model import PostJobAction
+import logging
+
+log = logging.getLogger( __name__ )
class WorkflowModule( object ):
@@ -163,29 +166,46 @@
def __init__( self, trans, tool_id ):
self.trans = trans
self.tool_id = tool_id
- self.tool = trans.app.toolbox.tools_by_id[ tool_id ]
+ try:
+ self.tool = trans.app.toolbox.tools_by_id[ tool_id ]
+ except KeyError, e:
+ # Handle the case where the workflow requires a tool not available in the local Galaxy instance.
+ self.tool = None
+ # TODO: Instead of parsing the guid, get the tool_id and version from the shed_tool_conf.xml,
+ # which requires enhancements to the tool loading process.
+ for available_tool_id, available_tool in trans.app.toolbox.tools_by_id.items():
+ if available_tool_id.find( tool_id ) >=0:
+ # We're attempting to match tool id against a tool guid.
+ # TODO: match by tool_id (and version if we attempt that, but
+ # workflows will break) is not good enough because
+ # 2 tools installed from a tool shed could both match this. We
+ # need to present a select list here.
+ self.tool = available_tool
+ break
self.post_job_actions = {}
self.workflow_outputs = []
self.state = None
- self.errors = None
-
+ if self.tool:
+ self.errors = None
+ else:
+ self.errors = {}
+ self.errors[ tool_id ] = 'Tool unavailable'
@classmethod
def new( Class, trans, tool_id=None ):
module = Class( trans, tool_id )
module.state = module.tool.new_state( trans, all_pages=True )
return module
-
@classmethod
def from_dict( Class, trans, d, secure=True ):
- tool_id = d['tool_id']
+ tool_id = d[ 'tool_id' ]
module = Class( trans, tool_id )
module.state = DefaultToolState()
- module.state.decode( d["tool_state"], module.tool, module.trans.app, secure=secure )
+ if module.tool is not None:
+ module.state.decode( d[ "tool_state" ], module.tool, module.trans.app, secure=secure )
module.errors = d.get( "tool_errors", None )
- module.post_job_actions = d.get("post_job_actions", {})
- module.workflow_outputs = d.get("workflow_outputs", [])
+ module.post_job_actions = d.get( "post_job_actions", {} )
+ module.workflow_outputs = d.get( "workflow_outputs", [] )
return module
-
@classmethod
def from_workflow_step( Class, trans, step ):
tool_id = step.tool_id
@@ -203,12 +223,15 @@
pjadict[pja.action_type] = pja
module.post_job_actions = pjadict
return module
-
def save_to_step( self, step ):
step.type = self.type
step.tool_id = self.tool_id
- step.tool_version = self.get_tool_version()
- step.tool_inputs = self.tool.params_to_strings( self.state.inputs, self.trans.app )
+ if self.tool:
+ step.tool_version = self.get_tool_version()
+ step.tool_inputs = self.tool.params_to_strings( self.state.inputs, self.trans.app )
+ else:
+ step.tool_version = None
+ step.tool_inputs = None
step.tool_errors = self.errors
for k, v in self.post_job_actions.iteritems():
# Must have action_type, step. output and a_args are optional.
@@ -221,7 +244,6 @@
else:
action_arguments = None
n_p = PostJobAction(v['action_type'], step, output_name, action_arguments)
-
def get_name( self ):
return self.tool.name
def get_tool_id( self ):
@@ -234,7 +256,6 @@
return self.errors
def get_tooltip( self ):
return self.tool.help
-
def get_data_inputs( self ):
data_inputs = []
def callback( input, value, prefixed_name, prefixed_label ):
@@ -268,15 +289,12 @@
formats.append( format )
data_outputs.append( dict( name=name, extensions=formats ) )
return data_outputs
-
def get_post_job_actions( self ):
return self.post_job_actions
-
def get_config_form( self ):
self.add_dummy_datasets()
return self.trans.fill_template( "workflow/editor_tool_form.mako",
tool=self.tool, values=self.state.inputs, errors=( self.errors or {} ) )
-
def update_state( self, incoming ):
# Build a callback that handles setting an input to be required at
# runtime. We still process all other parameters the user might have
@@ -306,10 +324,8 @@
# Update state using incoming values
errors = self.tool.update_state( self.trans, self.tool.inputs, self.state.inputs, incoming, item_callback=item_callback )
self.errors = errors or None
-
def check_and_update_state( self ):
return self.tool.check_and_update_param_values( self.state.inputs, self.trans )
-
def add_dummy_datasets( self, connections=None):
if connections:
# Store onnections by input name
@@ -324,8 +340,7 @@
if connections is None or prefixed_name in input_connections_by_name:
return DummyDataset()
visit_input_values( self.tool.inputs, self.state.inputs, callback )
-
-
+
class WorkflowModuleFactory( object ):
def __init__( self, module_types ):
self.module_types = module_types
--- a/templates/webapps/galaxy/admin/center.mako Fri Oct 14 09:58:59 2011 -0400
+++ b/templates/webapps/galaxy/admin/center.mako Fri Oct 14 12:14:54 2011 -0400
@@ -1,187 +1,192 @@
<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" /><%def name="title()">Galaxy Administration</%def><h2>Administration</h2>
-<p>The menu on the left provides the following features</p>
-<ul>
- <li><strong>Security</strong> - see the <strong>Data Security and Data Libraries</strong> section below for details
+%if message:
+ ${render_msg( message, status )}
+%else:
+ <p>The menu on the left provides the following features</p>
+ <ul>
+ <li><strong>Security</strong> - see the <strong>Data Security and Data Libraries</strong> section below for details
+ <p/>
+ <ul>
+ <li>
+ <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated
+ with each user.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
+ each group (both private and non-private roles). The group names include a link to a page that allows you to manage the users and
+ roles that are associated with the group.
+ </li>
+ <p/>
+ <li>
+ <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
+ are associated with the role. The role names include a link to a page that allows you to manage the users and groups that are associated
+ with the role. The page also includes a view of the data library datasets that are associated with the role and the permissions applied
+ to each dataset.
+ </li>
+ </ul>
+ </li><p/>
- <ul>
- <li>
- <strong>Manage users</strong> - provides a view of the registered users and all groups and non-private roles associated
- with each user.
- </li>
+ <li><strong>Data</strong><p/>
- <li>
- <strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
- each group (both private and non-private roles). The group names include a link to a page that allows you to manage the users and
- roles that are associated with the group.
- </li>
+ <ul>
+ <li>
+ <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
+ only administrators can create data libraries.
+ <p/>
+ When a data library is first created, it is considered "public" since it will be displayed in the "Data Libraries" view for any user, even
+ those that are not logged in. The Galaxy administrator can restrict access to a data library by associating roles with the data library's
+ "access library" permission. This permission will conservatively override the [dataset] "access" permission for the data library's contained
+ datasets.
+ <p/>
+ For example, if a data library's "access library" permission is associated with Role1 and the data library contains "public" datasets, the
+ data library will still only be displayed to those users that have Role1. However, if the data library's "access library" permission is
+ associated with both Role1 and Role2 and the data library contains datasets whose [dataset] "access" permission is associated with only Role1,
+ then users that have Role2 will be able to access the library, but will not see those contained datasets whose [dataset] "access" permission
+ is associated with only Role1.
+ <p/>
+ In addition to the "access library" permission, permission to perform the following functions on the data library (and it's contents) can
+ be granted to users (a library item is one of: a data library, a library folder, a library dataset).
+ <p/>
+ <ul>
+ <li><strong>add library item</strong> - Users that have the role can add library items to this data library or folder</li>
+ <li><strong>modify library item</strong> - Users that have the role can modify this library item</li>
+ <li><strong>manage library permissions</strong> - Users that have the role can manage permissions applied to this library item</li>
+ </ul>
+ <p/>
+ The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward (with the exception
+ of the "access library" permission, which is only available on the data library itself). Because of this, it is important to set desired permissions
+ on a new data library when it is created. When this is done, new folders and datasets added to the data library will automatically inherit those
+ permissions. In the same way, permissions can be applied to a folder, which will be automatically inherited by all contained datasets and sub-folders.
+ <p/>
+ The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
+ Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
+ approach allows for multiple users to use a single (possibly very large) dataset file.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Server</strong><p/>
- <li>
- <strong>Manage roles</strong> - provides a view of all non-private roles along with the role type, and the users and groups that
- are associated with the role. The role names include a link to a page that allows you to manage the users and groups that are associated
- with the role. The page also includes a view of the data library datasets that are associated with the role and the permissions applied
- to each dataset.
- </li>
- </ul>
- </li>
+ <ul>
+ <li>
+ <strong>Reload a tool's configuration</strong> - allows a new version of a tool to be loaded while the server is running
+ </li>
+ <p/>
+ <li>
+ <strong>Profile memory usage</strong> - measures system memory used for certain Galaxy functions
+ </li>
+ <p/>
+ <li>
+ <strong>Manage jobs</strong> - displays all jobs that are currently not finished (i.e., their state is new, waiting, queued, or
+ running). Administrators are able to cleanly stop long-running jobs.
+ </li>
+ </ul>
+ </li>
+ <p/>
+ <li><strong>Forms</strong>
+ <p/>To be completed
+ </li>
+ <p/>
+ <li><strong>Sequencing Requests</strong>
+ <p/>To be completed
+ </li>
+ <p/>
+ <li><strong>Cloud</strong>
+ <p/>To be completed
+ </li>
+ </ul><p/>
- <li><strong>Data</strong>
+ <p><strong>Data Security and Data Libraries</strong></p>
+ <p/>
+ <strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
+ here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
+ process incorporates users, groups and roles, and enables the application of certain permissions on datasets, specifically "access"
+ and "manage permissions". By default, the "manage permissions" permission is associated with the dataset owner's private role, and
+ the "access" permission is not set, making the dataset public. With these default permissions, users should not see any difference
+ in the way Galaxy has behaved in the past.
+ <ul>
+ <li>
+ <strong>Users</strong> - registered Galaxy users that have created a Galaxy account. Users can belong to groups and can
+ be associated with 1 or more roles. If a user is not authenticated during a Galaxy session, they will not have access to any
+ of the security features, and datasets they create during that session will have no permissions applied to them (i.e., they
+ will be considered "public", and no one will be allowed to change permissions on them).
+ </li><p/>
- <ul>
- <li>
- <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
- only administrators can create data libraries.
- <p/>
- When a data library is first created, it is considered "public" since it will be displayed in the "Data Libraries" view for any user, even
- those that are not logged in. The Galaxy administrator can restrict access to a data library by associating roles with the data library's
- "access library" permission. This permission will conservatively override the [dataset] "access" permission for the data library's contained
- datasets.
- <p/>
- For example, if a data library's "access library" permission is associated with Role1 and the data library contains "public" datasets, the
- data library will still only be displayed to those users that have Role1. However, if the data library's "access library" permission is
- associated with both Role1 and Role2 and the data library contains datasets whose [dataset] "access" permission is associated with only Role1,
- then users that have Role2 will be able to access the library, but will not see those contained datasets whose [dataset] "access" permission
- is associated with only Role1.
- <p/>
- In addition to the "access library" permission, permission to perform the following functions on the data library (and it's contents) can
- be granted to users (a library item is one of: a data library, a library folder, a library dataset).
- <p/>
- <ul>
- <li><strong>add library item</strong> - Users that have the role can add library items to this data library or folder</li>
- <li><strong>modify library item</strong> - Users that have the role can modify this library item</li>
- <li><strong>manage library permissions</strong> - Users that have the role can manage permissions applied to this library item</li>
- </ul>
- <p/>
- The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward (with the exception
- of the "access library" permission, which is only available on the data library itself). Because of this, it is important to set desired permissions
- on a new data library when it is created. When this is done, new folders and datasets added to the data library will automatically inherit those
- permissions. In the same way, permissions can be applied to a folder, which will be automatically inherited by all contained datasets and sub-folders.
- <p/>
- The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
- Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
- approach allows for multiple users to use a single (possibly very large) dataset file.
- </li>
- </ul>
- </li>
- <p/>
- <li><strong>Server</strong>
+ <li>
+ <strong>Groups</strong> - a set of 0 or more users which are considered members of the group. Groups can be associated with 0
+ or more roles, simplifying the process of applying permissions to the data between a select group of users.
+ </li><p/>
- <ul>
- <li>
- <strong>Reload a tool's configuration</strong> - allows a new version of a tool to be loaded while the server is running
- </li>
- <p/>
- <li>
- <strong>Profile memory usage</strong> - measures system memory used for certain Galaxy functions
- </li>
- <p/>
- <li>
- <strong>Manage jobs</strong> - displays all jobs that are currently not finished (i.e., their state is new, waiting, queued, or
- running). Administrators are able to cleanly stop long-running jobs.
- </li>
- </ul>
- </li>
- <p/>
- <li><strong>Forms</strong>
- <p/>To be completed
- </li>
- <p/>
- <li><strong>Sequencing Requests</strong>
- <p/>To be completed
- </li>
- <p/>
- <li><strong>Cloud</strong>
- <p/>To be completed
- </li>
-</ul>
-<p/>
-<p><strong>Data Security and Data Libraries</strong></p>
-<p/>
-<strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
-here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
-process incorporates users, groups and roles, and enables the application of certain permissions on datasets, specifically "access"
-and "manage permissions". By default, the "manage permissions" permission is associated with the dataset owner's private role, and
-the "access" permission is not set, making the dataset public. With these default permissions, users should not see any difference
-in the way Galaxy has behaved in the past.
-<ul>
- <li>
- <strong>Users</strong> - registered Galaxy users that have created a Galaxy account. Users can belong to groups and can
- be associated with 1 or more roles. If a user is not authenticated during a Galaxy session, they will not have access to any
- of the security features, and datasets they create during that session will have no permissions applied to them (i.e., they
- will be considered "public", and no one will be allowed to change permissions on them).
- </li>
- <p/>
- <li>
- <strong>Groups</strong> - a set of 0 or more users which are considered members of the group. Groups can be associated with 0
- or more roles, simplifying the process of applying permissions to the data between a select group of users.
- </li>
- <p/>
- <li>
- <strong>Roles</strong> - associate users and groups with specific permissions on datasets. For example, users in groups A and B
- can be associated with role C which gives them the "access" permission on datasets D, E and F. Roles have a type which is currently
- one of the following:
- <ul>
- <li>
- <strong>private</strong> - every user is associated automatically with their own private role. Administrators cannot
- manage private roles.
- </li>
- <li>
- <strong>user</strong> - this is currently not used, but eventually any registered user will be able to create a new role
- and this will be it's type.
- </li>
- <li>
- <strong>sharing</strong> - a role created automatically during a Galaxy session that enables a user to share data with
- another user. This can generally be considered a temporary role.
- </li>
- <li><strong>admin</strong> - a role created by a Galaxy administrator.</li>
- </ul>
- </li>
- <p/>
- <li>
- <strong>Dataset Permissions</strong> - applying the following permissions will to a dataset will result in the behavior described.
- <ul>
- <li>
- <strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
- <p>
- If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by anyone
- that can access the data library in which it is contained. See the <strong>Manage data libraries</strong> section above for
- details. Public datasets contained in public data libraries will be accessible to all users (as well as anyone not logged in
- during a Galaxy session) from the list of data libraries displayed when the "Data Libraries" menu item is selected.
- </p>
- <p>
- Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
- For example, if 'Role A' includes the "access" permission and 'Role A' is associated with the dataset, only those users
- and groups who are associated with 'Role A' may access the dataset.
- </p>
- <p>
- If multiple roles that include the "access" permission are associated with a dataset, access to the dataset is derived
- from the intersection of the users associated with the roles. For example, if 'Role A' and 'Role B' are associated with
- a dataset, only those users and groups who are associated with both 'Role A' AND 'Role B' may access the dataset. When
- the "access" permission is applied to a dataset, Galaxy checks to make sure that at least 1 user belongs to all groups and
- roles associated with the "access" permission (otherwise the dataset would be restricted from everyone).
- </p>
- <p>
- In order for a user to make a dataset private (i.e., only they can access it), they should associate the dataset with
- their private role (the role identical to their Galaxy user name / email address). Associating additional roles that
- include the "access" permission is not possible, since it would render the dataset inaccessible to everyone.
- <p>
- To make a dataset private to themselves and one or more other users, the user can create a new role and associate the dataset
- with that role, not their "private role". Galaxy makes this easy by telling the user they are about to share a private dataset
- and giving them the option of doing so. If they respond positively, the sharing role is automatically created for them.
- </p>
- <p>
- Private data (data associated with roles that include the "access" permission) must be made public in order to be used
- with external applications like the "view at UCSC" link, or the "Perform genome analysis and prediction with EpiGRAPH"
- tool. Being made publically accessible means removing the association of all roles that include the "access" permission
- from the dataset.
- <p>
- </li>
- <li><strong>manage permissions</strong> - Role members can manage the permissions applied to this dataset</li>
- </ul>
- </li>
-</ul>
-<br/>
+ <li>
+ <strong>Roles</strong> - associate users and groups with specific permissions on datasets. For example, users in groups A and B
+ can be associated with role C which gives them the "access" permission on datasets D, E and F. Roles have a type which is currently
+ one of the following:
+ <ul>
+ <li>
+ <strong>private</strong> - every user is associated automatically with their own private role. Administrators cannot
+ manage private roles.
+ </li>
+ <li>
+ <strong>user</strong> - this is currently not used, but eventually any registered user will be able to create a new role
+ and this will be it's type.
+ </li>
+ <li>
+ <strong>sharing</strong> - a role created automatically during a Galaxy session that enables a user to share data with
+ another user. This can generally be considered a temporary role.
+ </li>
+ <li><strong>admin</strong> - a role created by a Galaxy administrator.</li>
+ </ul>
+ </li>
+ <p/>
+ <li>
+ <strong>Dataset Permissions</strong> - applying the following permissions will to a dataset will result in the behavior described.
+ <ul>
+ <li>
+ <strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
+ <p>
+ If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by anyone
+ that can access the data library in which it is contained. See the <strong>Manage data libraries</strong> section above for
+ details. Public datasets contained in public data libraries will be accessible to all users (as well as anyone not logged in
+ during a Galaxy session) from the list of data libraries displayed when the "Data Libraries" menu item is selected.
+ </p>
+ <p>
+ Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
+ For example, if 'Role A' includes the "access" permission and 'Role A' is associated with the dataset, only those users
+ and groups who are associated with 'Role A' may access the dataset.
+ </p>
+ <p>
+ If multiple roles that include the "access" permission are associated with a dataset, access to the dataset is derived
+ from the intersection of the users associated with the roles. For example, if 'Role A' and 'Role B' are associated with
+ a dataset, only those users and groups who are associated with both 'Role A' AND 'Role B' may access the dataset. When
+ the "access" permission is applied to a dataset, Galaxy checks to make sure that at least 1 user belongs to all groups and
+ roles associated with the "access" permission (otherwise the dataset would be restricted from everyone).
+ </p>
+ <p>
+ In order for a user to make a dataset private (i.e., only they can access it), they should associate the dataset with
+ their private role (the role identical to their Galaxy user name / email address). Associating additional roles that
+ include the "access" permission is not possible, since it would render the dataset inaccessible to everyone.
+ <p>
+ To make a dataset private to themselves and one or more other users, the user can create a new role and associate the dataset
+ with that role, not their "private role". Galaxy makes this easy by telling the user they are about to share a private dataset
+ and giving them the option of doing so. If they respond positively, the sharing role is automatically created for them.
+ </p>
+ <p>
+ Private data (data associated with roles that include the "access" permission) must be made public in order to be used
+ with external applications like the "view at UCSC" link, or the "Perform genome analysis and prediction with EpiGRAPH"
+ tool. Being made publically accessible means removing the association of all roles that include the "access" permission
+ from the dataset.
+ <p>
+ </li>
+ <li><strong>manage permissions</strong> - Role members can manage the permissions applied to this dataset</li>
+ </ul>
+ </li>
+ </ul>
+ <br/>
+%endif
--- a/templates/webapps/galaxy/admin/index.mako Fri Oct 14 09:58:59 2011 -0400
+++ b/templates/webapps/galaxy/admin/index.mako Fri Oct 14 12:14:54 2011 -0400
@@ -1,4 +1,5 @@
<%inherit file="/webapps/galaxy/base_panels.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
## Default title
<%def name="title()">Galaxy Administration</%def>
@@ -102,6 +103,6 @@
</%def><%def name="center_panel()">
- <% center_url = h.url_for( action='center', webapp='galaxy' ) %>
+ <% center_url = h.url_for( controller='admin', action='center', webapp='galaxy', message=message, status=status ) %><iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${center_url}"></iframe></%def>
--- a/templates/workflow/list.mako Fri Oct 14 09:58:59 2011 -0400
+++ b/templates/workflow/list.mako Fri Oct 14 12:14:54 2011 -0400
@@ -37,7 +37,7 @@
</a></li><li>
- <a class="action-button" href="${h.url_for( action='import_workflow' )}">
+ <a class="action-button" href="${h.url_for( controller='workflow', action='import_workflow' )}"><img src="${h.url_for('/static/images/fugue/arrow-090.png')}" /><span>Upload or import workflow</span></a>
@@ -63,22 +63,20 @@
## <td>${str(workflow.update_time)[:19]}</td><td><div popupmenu="wf-${i}-popup">
- <a class="action-button" href="${h.url_for( action='editor', id=trans.security.encode_id(workflow.id) )}" target="_parent">Edit</a>
- <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id(workflow.id) )}" target="_parent">Run</a>
- <a class="action-button" href="${h.url_for( action='sharing', id=trans.security.encode_id(workflow.id) )}">Share or Publish</a>
- <a class="action-button" href="${h.url_for( action='export', id=trans.security.encode_id(workflow.id) )}">Download or Export</a>
- <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
- <a class="action-button" href="${h.url_for( action='rename', id=trans.security.encode_id(workflow.id) )}">Rename</a>
- <a class="action-button" confirm="Are you sure you want to delete workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( action='delete', id=trans.security.encode_id(workflow.id) )}">Delete</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='editor', id=trans.security.encode_id( workflow.id ) )}" target="_parent">Edit</a>
+ <a class="action-button" href="${h.url_for( controller='root', action='index', workflow_id=trans.security.encode_id( workflow.id ) )}" target="_parent">Run</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='sharing', id=trans.security.encode_id( workflow.id ) )}">Share or Publish</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='export', id=trans.security.encode_id( workflow.id ) )}">Download or Export</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='clone', id=trans.security.encode_id( workflow.id ) )}">Clone</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='rename', id=trans.security.encode_id( workflow.id ) )}">Rename</a>
+ <a class="action-button" confirm="Are you sure you want to delete workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='delete', id=trans.security.encode_id( workflow.id ) )}">Delete</a></div></td></tr>
%endfor
</table>
%else:
-
You have no workflows.
-
%endif
<h2>Workflows shared with you by others</h2>
@@ -101,18 +99,16 @@
<td>${len(workflow.latest_workflow.steps)}</td><td><div popupmenu="shared-${i}-popup">
- <a class="action-button" href="${h.url_for( action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug)}" target="_top">View</a>
- <a class="action-button" href="${h.url_for( action='run', id=trans.security.encode_id(workflow.id) )}">Run</a>
- <a class="action-button" href="${h.url_for( action='clone', id=trans.security.encode_id(workflow.id) )}">Clone</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug )}" target="_top">View</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id( workflow.id ) )}">Run</a>
+ <a class="action-button" href="${h.url_for( controller='workflow', action='clone', id=trans.security.encode_id( workflow.id ) )}">Clone</a></div></td></tr>
%endfor
</table>
%else:
-
No workflows have been shared with you.
-
%endif
<h2>Other options</h2>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/d711a9201414/
changeset: d711a9201414
user: natefoo
date: 2011-10-14 15:58:59
summary: Remove incorrect #-S /bin/bash (SGE shell override) from drmaa job runner. If you were depending on this to get jobs to run on your SGE cluster under bash, please add '-S /bin/bash' to your Galaxy user's ~/.sge_request.
affected #: 1 file (-1 bytes)
--- a/lib/galaxy/jobs/runners/drmaa.py Thu Oct 13 16:09:28 2011 -0400
+++ b/lib/galaxy/jobs/runners/drmaa.py Fri Oct 14 09:58:59 2011 -0400
@@ -33,7 +33,6 @@
}
drm_template = """#!/bin/sh
-#$ -S /bin/sh
GALAXY_LIB="%s"
if [ "$GALAXY_LIB" != "None" ]; then
if [ -n "$PYTHONPATH" ]; then
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/98041300775e/
changeset: 98041300775e
user: greg
date: 2011-10-13 22:09:28
summary: Clarify tool searches in the tool shed. Searches are retricted to only tools that properly load in Galaxy.
affected #: 3 files (-1 bytes)
--- a/lib/galaxy/webapps/community/controllers/repository.py Thu Oct 13 15:35:45 2011 -0400
+++ b/lib/galaxy/webapps/community/controllers/repository.py Thu Oct 13 16:09:28 2011 -0400
@@ -293,7 +293,12 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- return trans.fill_template( '/webapps/community/index.mako', message=message, status=status )
+ # See if there are any RepositoryMetadata records since menu items require them.
+ repository_metadata = trans.sa_session.query( model.RepositoryMetadata ).first()
+ return trans.fill_template( '/webapps/community/index.mako',
+ repository_metadata=repository_metadata,
+ message=message,
+ status=status )
@web.expose
def browse_categories( self, trans, **kwd ):
if 'f-free-text-search' in kwd:
@@ -400,7 +405,7 @@
exact_matches_checked = CheckboxField.is_checked( exact_matches )
match_tuples = []
if tool_id or tool_name or tool_version:
- for repository_metadata in trans.sa_session.query( model.RepositoryMetadata.table ).all():
+ for repository_metadata in trans.sa_session.query( model.RepositoryMetadata ):
metadata = repository_metadata.metadata
tools = metadata[ 'tools' ]
found = False
--- a/templates/webapps/community/index.mako Thu Oct 13 15:35:45 2011 -0400
+++ b/templates/webapps/community/index.mako Thu Oct 13 16:09:28 2011 -0400
@@ -61,9 +61,11 @@
<a target="galaxy_main" href="${h.url_for( controller='repository', action='browse_repositories', operation='my_repositories', webapp='community' )}">Browse my repositories</a></div>
%endif
- <div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='repository', action='find_tools', webapp='community' )}">Search for tools</a>
- </div>
+ %if repository_metadata:
+ <div class="toolTitle">
+ <a target="galaxy_main" href="${h.url_for( controller='repository', action='find_tools', webapp='community' )}">Search for valid tools</a>
+ </div>
+ %endif
</div></div><div class="toolSectionBody">
--- a/templates/webapps/community/repository/find_tools.mako Thu Oct 13 15:35:45 2011 -0400
+++ b/templates/webapps/community/repository/find_tools.mako Thu Oct 13 16:09:28 2011 -0400
@@ -15,10 +15,10 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">Search repositories for tools</div>
+ <div class="toolFormTitle">Search repositories for valid tools</div><div class="toolFormBody"><div class="form-row">
- Enter any combination of the following tool attributes to locate matching tools.
+ Valid tools are those that properly load in Galaxy. Enter any combination of the following tool attributes to locate matching valid tools.
</div><div style="clear: both"></div><form name="find_tools" id="find_tools" action="${h.url_for( controller='repository', action='find_tools' )}" method="post" >
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/ac84504bc6a1/
changeset: ac84504bc6a1
user: jgoecks
date: 2011-10-12 21:50:02
summary: Force absolute path when creating temporary directory used when importing histories. Fixes #667
affected #: 1 file (-1 bytes)
--- a/lib/galaxy/tools/actions/history_imp_exp.py Wed Oct 12 11:19:58 2011 -0400
+++ b/lib/galaxy/tools/actions/history_imp_exp.py Wed Oct 12 15:50:02 2011 -0400
@@ -28,7 +28,10 @@
#
# Add association for keeping track of job, history relationship.
- archive_dir = tempfile.mkdtemp()
+
+ # Use abspath because mkdtemp() does not, contrary to the documentation,
+ # always return an absolute path.
+ archive_dir = os.path.abspath( tempfile.mkdtemp() )
jiha = trans.app.model.JobImportHistoryArchive( job=job, archive_dir=archive_dir )
trans.sa_session.add( jiha )
job_wrapper = JobImportHistoryArchiveWrapper( job )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.