galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
November 2009
- 26 participants
- 233 discussions
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/4fdf952e413e
changeset: 3006:4fdf952e413e
user: Kanwei Li <kanwei(a)gmail.com>
date: Tue Nov 10 16:49:05 2009 -0500
description:
Add Paste#gzip and visualization module options to universe_wsgi.ini.sample
diffstat:
universe_wsgi.ini.sample | 17 ++++++++++++++++-
1 files changed, 16 insertions(+), 1 deletions(-)
diffs (29 lines):
diff -r 6eddc13b1d3b -r 4fdf952e413e universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample Tue Nov 10 16:02:49 2009 -0500
+++ b/universe_wsgi.ini.sample Tue Nov 10 16:49:05 2009 -0500
@@ -8,9 +8,24 @@
use_threadpool = true
threadpool_workers = 10
+# ---- HTTP gzip compression ----
+# If planning to run Galaxy as a production service, we recommend running Galaxy
+# through a proxy and enabling gzip compression there (instructions at
+# http://bitbucket.org/galaxy/galaxy-central/wiki/Config/ProductionServer )
+# but you may also turn on Paste's built-in gzip compressor by uncommenting the following lines
+# and also the 'filter-with = gzip' line under [app:main]. This will reduce network traffic
+# and should speed up the interface, especially the visualization module.
+# [filter:gzip]
+# use = egg:Paste#gzip
+
# ---- Galaxy Web Interface -------------------------------------------------
+[app:main]
-[app:main]
+# Uncomment following line to enable Paste gzip compression
+# filter-with = gzip
+
+# Uncomment following line below to enable visualization module
+# enable_tracks = True
# Specifies the factory for the universe WSGI application
paste.app_factory = galaxy.web.buildapp:app_factory
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/6b23bba26001
changeset: 3009:6b23bba26001
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 11:49:08 2009 -0500
description:
HostAgent needs an sa_session
diffstat:
lib/galaxy/security/__init__.py | 3 +++
1 files changed, 3 insertions(+), 0 deletions(-)
diffs (13 lines):
diff -r ba4ad1b7a746 -r 6b23bba26001 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Wed Nov 11 11:38:48 2009 -0500
+++ b/lib/galaxy/security/__init__.py Wed Nov 11 11:49:08 2009 -0500
@@ -569,6 +569,9 @@
self.model = model
if permitted_actions:
self.permitted_actions = permitted_actions
+ @property
+ def sa_session( self ):
+ return self.model.context.current
def allow_action( self, addr, action, **kwd ):
if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
hda = kwd['dataset']
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/ba4ad1b7a746
changeset: 3008:ba4ad1b7a746
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 11:38:48 2009 -0500
description:
Convert lib/galaxy/security/__init__.py from DOS to UNIX line endings (no other changes)
diffstat:
lib/galaxy/security/__init__.py | 1236 ++++++++++++++++++++++----------------------
1 files changed, 618 insertions(+), 618 deletions(-)
diffs (1244 lines):
diff -r f9bd28601cba -r ba4ad1b7a746 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Tue Nov 10 17:09:02 2009 -0500
+++ b/lib/galaxy/security/__init__.py Wed Nov 11 11:38:48 2009 -0500
@@ -1,622 +1,622 @@
-"""
-Galaxy Security
-
-"""
-import logging, socket
-from datetime import datetime, timedelta
-from galaxy.util.bunch import Bunch
-from galaxy.model.orm import *
-
-log = logging.getLogger(__name__)
-
-class Action( object ):
- def __init__( self, action, description, model ):
- self.action = action
- self.description = description
- self.model = model
-
-class RBACAgent:
- """Class that handles galaxy security"""
- permitted_actions = Bunch(
- DATASET_MANAGE_PERMISSIONS = Action( "manage permissions", "Role members can manage the roles associated with this dataset", "grant" ),
- DATASET_ACCESS = Action( "access", "Role members can import this dataset into their history for analysis", "restrict" ),
- LIBRARY_ADD = Action( "add library item", "Role members can add library items to this library item", "grant" ),
- LIBRARY_MODIFY = Action( "modify library item", "Role members can modify this library item", "grant" ),
- LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with this library item", "grant" )
- )
- def get_action( self, name, default=None ):
- """Get a permitted action by its dict key or action name"""
- for k, v in self.permitted_actions.items():
- if k == name or v.action == name:
- return v
- return default
- def get_actions( self ):
- """Get all permitted actions as a list of Action objects"""
- return self.permitted_actions.__dict__.values()
- def get_item_actions( self, action, item ):
- raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item )
- def guess_derived_permissions_for_datasets( self, datasets = [] ):
- raise "Unimplemented Method"
- def can_access_dataset( self, roles, dataset ):
- raise "Unimplemented Method"
- def can_manage_dataset( self, roles, dataset ):
- raise "Unimplemented Method"
- def can_add_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def can_modify_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def can_manage_library_item( self, user, roles, item ):
- raise "Unimplemented Method"
- def associate_components( self, **kwd ):
- raise 'No valid method of associating provided components: %s' % kwd
- def create_private_user_role( self, user ):
- raise "Unimplemented Method"
- def get_private_user_role( self, user ):
- raise "Unimplemented Method"
- def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False ):
- raise "Unimplemented Method"
- def history_set_default_permissions( self, history, permissions=None, dataset=False, bypass_manage_permission=False ):
- raise "Unimplemented Method"
- def set_all_dataset_permissions( self, dataset, permissions ):
- raise "Unimplemented Method"
- def set_dataset_permission( self, dataset, permission ):
- raise "Unimplemented Method"
- def set_all_library_permissions( self, dataset, permissions ):
- raise "Unimplemented Method"
- def dataset_is_public( self, dataset ):
- raise "Unimplemented Method"
- def make_dataset_public( self, dataset ):
- raise "Unimplemented Method"
- def get_component_associations( self, **kwd ):
- raise "Unimplemented Method"
- def components_are_associated( self, **kwd ):
- return bool( self.get_component_associations( **kwd ) )
- def convert_permitted_action_strings( self, permitted_action_strings ):
- """
- When getting permitted actions from an untrusted source like a
- form, ensure that they match our actual permitted actions.
- """
- return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] )
-
-class GalaxyRBACAgent( RBACAgent ):
- def __init__( self, model, permitted_actions=None ):
- self.model = model
- if permitted_actions:
- self.permitted_actions = permitted_actions
- # List of "library_item" objects and their associated permissions and info template objects
- self.library_item_assocs = (
- ( self.model.Library, self.model.LibraryPermissions ),
- ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
- ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
- ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.model.context.current
- def allow_dataset_action( self, roles, action, dataset ):
- """
- Returns true when user has permission to perform an action on an
- instance of Dataset.
- """
- dataset_actions = self.get_item_actions( action, dataset )
- if not dataset_actions:
- return action.model == 'restrict'
- ret_val = False
- for dataset_action in dataset_actions:
- if dataset_action.role in roles:
- ret_val = True
- break
- return ret_val
- def can_access_dataset( self, roles, dataset ):
- return self.allow_dataset_action( roles, self.permitted_actions.DATASET_ACCESS, dataset )
- def can_manage_dataset( self, roles, dataset ):
- return self.allow_dataset_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
- def allow_library_item_action( self, user, roles, action, item ):
- """
- Method for checking a permission for the current user to perform a
- specific library action on a library item, which must be one of:
- Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
- """
- if user is None:
- # All permissions are granted, so non-users cannot have permissions
- return False
- # Check to see if user has access to any of the roles associated with action
- item_actions = self.get_item_actions( action, item )
- if not item_actions:
- # All permissions are granted, so item must have action
- return False
- ret_val = False
- for item_action in item_actions:
- if item_action.role in roles:
- ret_val = True
- break
- return ret_val
- def can_add_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_ADD, item )
- def can_modify_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, item )
- def can_manage_library_item( self, user, roles, item ):
- return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, item )
- def get_item_actions( self, action, item ):
- # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
- return [ permission for permission in item.actions if permission.action == action.action ]
- def guess_derived_permissions_for_datasets( self, datasets=[] ):
- """Returns a dict of { action : [ role, role, ... ] } for the output dataset based upon provided datasets"""
- perms = {}
- for dataset in datasets:
- if not isinstance( dataset, self.model.Dataset ):
- dataset = dataset.dataset
- these_perms = {}
- # initialize blank perms
- for action in self.get_actions():
- these_perms[ action ] = []
- # collect this dataset's perms
- these_perms = self.get_dataset_permissions( dataset )
- # join or intersect this dataset's permissions with others
- for action, roles in these_perms.items():
- if action not in perms.keys():
- perms[ action ] = roles
- else:
- if action.model == 'grant':
- # intersect existing roles with new roles
- perms[ action ] = filter( lambda x: x in perms[ action ], roles )
- elif action.model == 'restrict':
- # join existing roles with new roles
- perms[ action ].extend( filter( lambda x: x not in perms[ action ], roles ) )
- return perms
- def associate_components( self, **kwd ):
- if 'user' in kwd:
- if 'group' in kwd:
- return self.associate_user_group( kwd['user'], kwd['group'] )
- elif 'role' in kwd:
- return self.associate_user_role( kwd['user'], kwd['role'] )
- elif 'role' in kwd:
- if 'group' in kwd:
- return self.associate_group_role( kwd['group'], kwd['role'] )
- if 'action' in kwd:
- if 'dataset' in kwd and 'role' in kwd:
- return self.associate_action_dataset_role( kwd['action'], kwd['dataset'], kwd['role'] )
- raise 'No valid method of associating provided components: %s' % kwd
- def associate_user_group( self, user, group ):
- assoc = self.model.UserGroupAssociation( user, group )
- assoc.flush()
- return assoc
- def associate_user_role( self, user, role ):
- assoc = self.model.UserRoleAssociation( user, role )
- assoc.flush()
- return assoc
- def associate_group_role( self, group, role ):
- assoc = self.model.GroupRoleAssociation( group, role )
- assoc.flush()
- return assoc
- def associate_action_dataset_role( self, action, dataset, role ):
- assoc = self.model.DatasetPermissions( action, dataset, role )
- assoc.flush()
- return assoc
- def create_private_user_role( self, user ):
- # Create private role
- role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
- role.flush()
- # Add user to role
- self.associate_components( role=role, user=user )
- return role
- def get_private_user_role( self, user, auto_create=False ):
- role = self.sa_session.query( self.model.Role ) \
- .filter( and_( self.model.Role.table.c.name == user.email,
- self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
- .first()
- if not role:
- if auto_create:
- return self.create_private_user_role( user )
- else:
- return None
- return role
- def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private = False ):
- # bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
- if user is None:
- return None
- if not permissions:
+"""
+Galaxy Security
+
+"""
+import logging, socket
+from datetime import datetime, timedelta
+from galaxy.util.bunch import Bunch
+from galaxy.model.orm import *
+
+log = logging.getLogger(__name__)
+
+class Action( object ):
+ def __init__( self, action, description, model ):
+ self.action = action
+ self.description = description
+ self.model = model
+
+class RBACAgent:
+ """Class that handles galaxy security"""
+ permitted_actions = Bunch(
+ DATASET_MANAGE_PERMISSIONS = Action( "manage permissions", "Role members can manage the roles associated with this dataset", "grant" ),
+ DATASET_ACCESS = Action( "access", "Role members can import this dataset into their history for analysis", "restrict" ),
+ LIBRARY_ADD = Action( "add library item", "Role members can add library items to this library item", "grant" ),
+ LIBRARY_MODIFY = Action( "modify library item", "Role members can modify this library item", "grant" ),
+ LIBRARY_MANAGE = Action( "manage library permissions", "Role members can manage roles associated with this library item", "grant" )
+ )
+ def get_action( self, name, default=None ):
+ """Get a permitted action by its dict key or action name"""
+ for k, v in self.permitted_actions.items():
+ if k == name or v.action == name:
+ return v
+ return default
+ def get_actions( self ):
+ """Get all permitted actions as a list of Action objects"""
+ return self.permitted_actions.__dict__.values()
+ def get_item_actions( self, action, item ):
+ raise 'No valid method of retrieving action (%s) for item %s.' % ( action, item )
+ def guess_derived_permissions_for_datasets( self, datasets = [] ):
+ raise "Unimplemented Method"
+ def can_access_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_manage_dataset( self, roles, dataset ):
+ raise "Unimplemented Method"
+ def can_add_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_modify_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def can_manage_library_item( self, user, roles, item ):
+ raise "Unimplemented Method"
+ def associate_components( self, **kwd ):
+ raise 'No valid method of associating provided components: %s' % kwd
+ def create_private_user_role( self, user ):
+ raise "Unimplemented Method"
+ def get_private_user_role( self, user ):
+ raise "Unimplemented Method"
+ def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False ):
+ raise "Unimplemented Method"
+ def history_set_default_permissions( self, history, permissions=None, dataset=False, bypass_manage_permission=False ):
+ raise "Unimplemented Method"
+ def set_all_dataset_permissions( self, dataset, permissions ):
+ raise "Unimplemented Method"
+ def set_dataset_permission( self, dataset, permission ):
+ raise "Unimplemented Method"
+ def set_all_library_permissions( self, dataset, permissions ):
+ raise "Unimplemented Method"
+ def dataset_is_public( self, dataset ):
+ raise "Unimplemented Method"
+ def make_dataset_public( self, dataset ):
+ raise "Unimplemented Method"
+ def get_component_associations( self, **kwd ):
+ raise "Unimplemented Method"
+ def components_are_associated( self, **kwd ):
+ return bool( self.get_component_associations( **kwd ) )
+ def convert_permitted_action_strings( self, permitted_action_strings ):
+ """
+ When getting permitted actions from an untrusted source like a
+ form, ensure that they match our actual permitted actions.
+ """
+ return filter( lambda x: x is not None, [ self.permitted_actions.get( action_string ) for action_string in permitted_action_strings ] )
+
+class GalaxyRBACAgent( RBACAgent ):
+ def __init__( self, model, permitted_actions=None ):
+ self.model = model
+ if permitted_actions:
+ self.permitted_actions = permitted_actions
+ # List of "library_item" objects and their associated permissions and info template objects
+ self.library_item_assocs = (
+ ( self.model.Library, self.model.LibraryPermissions ),
+ ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
+ ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
+ ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session -- currently just gets the current
+ session from the threadlocal session context, but this is provided
+ to allow migration toward a more SQLAlchemy 0.4 style of use.
+ """
+ return self.model.context.current
+ def allow_dataset_action( self, roles, action, dataset ):
+ """
+ Returns true when user has permission to perform an action on an
+ instance of Dataset.
+ """
+ dataset_actions = self.get_item_actions( action, dataset )
+ if not dataset_actions:
+ return action.model == 'restrict'
+ ret_val = False
+ for dataset_action in dataset_actions:
+ if dataset_action.role in roles:
+ ret_val = True
+ break
+ return ret_val
+ def can_access_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_ACCESS, dataset )
+ def can_manage_dataset( self, roles, dataset ):
+ return self.allow_dataset_action( roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset )
+ def allow_library_item_action( self, user, roles, action, item ):
+ """
+ Method for checking a permission for the current user to perform a
+ specific library action on a library item, which must be one of:
+ Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+ """
+ if user is None:
+ # All permissions are granted, so non-users cannot have permissions
+ return False
+ # Check to see if user has access to any of the roles associated with action
+ item_actions = self.get_item_actions( action, item )
+ if not item_actions:
+ # All permissions are granted, so item must have action
+ return False
+ ret_val = False
+ for item_action in item_actions:
+ if item_action.role in roles:
+ ret_val = True
+ break
+ return ret_val
+ def can_add_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_ADD, item )
+ def can_modify_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MODIFY, item )
+ def can_manage_library_item( self, user, roles, item ):
+ return self.allow_library_item_action( user, roles, self.permitted_actions.LIBRARY_MANAGE, item )
+ def get_item_actions( self, action, item ):
+ # item must be one of: Dataset, Library, LibraryFolder, LibraryDataset, LibraryDatasetDatasetAssociation
+ return [ permission for permission in item.actions if permission.action == action.action ]
+ def guess_derived_permissions_for_datasets( self, datasets=[] ):
+ """Returns a dict of { action : [ role, role, ... ] } for the output dataset based upon provided datasets"""
+ perms = {}
+ for dataset in datasets:
+ if not isinstance( dataset, self.model.Dataset ):
+ dataset = dataset.dataset
+ these_perms = {}
+ # initialize blank perms
+ for action in self.get_actions():
+ these_perms[ action ] = []
+ # collect this dataset's perms
+ these_perms = self.get_dataset_permissions( dataset )
+ # join or intersect this dataset's permissions with others
+ for action, roles in these_perms.items():
+ if action not in perms.keys():
+ perms[ action ] = roles
+ else:
+ if action.model == 'grant':
+ # intersect existing roles with new roles
+ perms[ action ] = filter( lambda x: x in perms[ action ], roles )
+ elif action.model == 'restrict':
+ # join existing roles with new roles
+ perms[ action ].extend( filter( lambda x: x not in perms[ action ], roles ) )
+ return perms
+ def associate_components( self, **kwd ):
+ if 'user' in kwd:
+ if 'group' in kwd:
+ return self.associate_user_group( kwd['user'], kwd['group'] )
+ elif 'role' in kwd:
+ return self.associate_user_role( kwd['user'], kwd['role'] )
+ elif 'role' in kwd:
+ if 'group' in kwd:
+ return self.associate_group_role( kwd['group'], kwd['role'] )
+ if 'action' in kwd:
+ if 'dataset' in kwd and 'role' in kwd:
+ return self.associate_action_dataset_role( kwd['action'], kwd['dataset'], kwd['role'] )
+ raise 'No valid method of associating provided components: %s' % kwd
+ def associate_user_group( self, user, group ):
+ assoc = self.model.UserGroupAssociation( user, group )
+ assoc.flush()
+ return assoc
+ def associate_user_role( self, user, role ):
+ assoc = self.model.UserRoleAssociation( user, role )
+ assoc.flush()
+ return assoc
+ def associate_group_role( self, group, role ):
+ assoc = self.model.GroupRoleAssociation( group, role )
+ assoc.flush()
+ return assoc
+ def associate_action_dataset_role( self, action, dataset, role ):
+ assoc = self.model.DatasetPermissions( action, dataset, role )
+ assoc.flush()
+ return assoc
+ def create_private_user_role( self, user ):
+ # Create private role
+ role = self.model.Role( name=user.email, description='Private Role for ' + user.email, type=self.model.Role.types.PRIVATE )
+ role.flush()
+ # Add user to role
+ self.associate_components( role=role, user=user )
+ return role
+ def get_private_user_role( self, user, auto_create=False ):
+ role = self.sa_session.query( self.model.Role ) \
+ .filter( and_( self.model.Role.table.c.name == user.email,
+ self.model.Role.table.c.type == self.model.Role.types.PRIVATE ) ) \
+ .first()
+ if not role:
+ if auto_create:
+ return self.create_private_user_role( user )
+ else:
+ return None
+ return role
+ def user_set_default_permissions( self, user, permissions={}, history=False, dataset=False, bypass_manage_permission=False, default_access_private = False ):
+ # bypass_manage_permission is used to change permissions of datasets in a userless history when logging in
+ if user is None:
+ return None
+ if not permissions:
#default permissions
permissions = { self.permitted_actions.DATASET_MANAGE_PERMISSIONS : [ self.get_private_user_role( user, auto_create=True ) ] }
#new_user_dataset_access_role_default_private is set as True in config file
if default_access_private:
- permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ]
- # Delete all of the current default permissions for the user
- for dup in user.default_permissions:
- self.sa_session.delete( dup )
- dup.flush()
- # Add the new default permissions for the user
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dup in [ self.model.DefaultUserPermissions( user, action, role ) for role in roles ]:
- dup.flush()
- if history:
- for history in user.active_histories:
- self.history_set_default_permissions( history, permissions=permissions, dataset=dataset, bypass_manage_permission=bypass_manage_permission )
- def user_get_default_permissions( self, user ):
- permissions = {}
- for dup in user.default_permissions:
- action = self.get_action( dup.action )
- if action in permissions:
- permissions[ action ].append( dup.role )
- else:
- permissions[ action ] = [ dup.role ]
- return permissions
- def history_set_default_permissions( self, history, permissions={}, dataset=False, bypass_manage_permission=False ):
- # bypass_manage_permission is used to change permissions of datasets in a user-less history when logging in
- user = history.user
- if not user:
- # default permissions on a user-less history are None
- return None
- if not permissions:
- permissions = self.user_get_default_permissions( user )
- # Delete all of the current default permission for the history
- for dhp in history.default_permissions:
- self.sa_session.delete( dhp )
- dhp.flush()
- # Add the new default permissions for the history
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dhp in [ self.model.DefaultHistoryPermissions( history, action, role ) for role in roles ]:
- dhp.flush()
- if dataset:
- # Only deal with datasets that are not purged
- for hda in history.activatable_datasets:
- dataset = hda.dataset
- if dataset.library_associations:
- # Don't change permissions on a dataset associated with a library
- continue
- if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
- # Don't change permissions on a dataset associated with a history not owned by the user
- continue
- if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
- self.set_all_dataset_permissions( dataset, permissions )
- def history_get_default_permissions( self, history ):
- permissions = {}
- for dhp in history.default_permissions:
- action = self.get_action( dhp.action )
- if action in permissions:
- permissions[ action ].append( dhp.role )
- else:
- permissions[ action ] = [ dhp.role ]
- return permissions
- def set_all_dataset_permissions( self, dataset, permissions={} ):
- """
- Set new permissions on a dataset, eliminating all current permissions
- permissions looks like: { Action : [ Role, Role ] }
- """
- # Delete all of the current permissions on the dataset
- # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
- # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
- for dp in dataset.actions:
- self.sa_session.delete( dp )
- dp.flush()
- # Add the new permissions on the dataset
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
- dp.flush()
- def set_dataset_permission( self, dataset, permission={} ):
- """
- Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
- permissions looks like: { Action : [ Role, Role ] }
- """
- # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
- # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
- for action, roles in permission.items():
- if isinstance( action, Action ):
- action = action.action
- # Delete the current specific permission on the dataset if one exists
- for dp in dataset.actions:
- if dp.action == action:
- self.sa_session.delete( dp )
- dp.flush()
- # Add the new specific permission on the dataset
- for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
- dp.flush()
- def dataset_is_public( self, dataset ):
- # A dataset is considered public if there are no "access" actions associated with it. Any
- # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
- return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
- def make_dataset_public( self, dataset ):
- # A dataset is considered public if there are no "access" actions associated with it. Any
- # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
- for dp in dataset.actions:
- if dp.action == self.permitted_actions.DATASET_ACCESS.action:
- self.sa_session.delete( dp )
- dp.flush()
- def get_dataset_permissions( self, dataset ):
- """
- Return a dictionary containing the actions and associated roles on dataset.
- The dictionary looks like: { Action : [ Role, Role ] }
- dataset must be an instance of Dataset()
- """
- permissions = {}
- for dp in dataset.actions:
- action = self.get_action( dp.action )
- if action in permissions:
- permissions[ action ].append( dp.role )
- else:
- permissions[ action ] = [ dp.role ]
- return permissions
- def copy_dataset_permissions( self, src, dst ):
- if not isinstance( src, self.model.Dataset ):
- src = src.dataset
- if not isinstance( dst, self.model.Dataset ):
- dst = dst.dataset
- self.set_all_dataset_permissions( dst, self.get_dataset_permissions( src ) )
- def privately_share_dataset( self, dataset, users = [] ):
- intersect = None
- for user in users:
- roles = [ ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING ]
- if intersect is None:
- intersect = roles
- else:
- new_intersect = []
- for role in roles:
- if role in intersect:
- new_intersect.append( role )
- intersect = new_intersect
- sharing_role = None
- if intersect:
- for role in intersect:
- if not filter( lambda x: x not in users, [ ura.user for ura in role.users ] ):
- # only use a role if it contains ONLY the users we're sharing with
- sharing_role = role
- break
- if sharing_role is None:
- sharing_role = self.model.Role( name = "Sharing role for: " + ", ".join( [ u.email for u in users ] ),
- type = self.model.Role.types.SHARING )
- sharing_role.flush()
- for user in users:
- self.associate_components( user=user, role=sharing_role )
- self.set_dataset_permission( dataset, { self.permitted_actions.DATASET_ACCESS : [ sharing_role ] } )
- def set_all_library_permissions( self, library_item, permissions={} ):
- # Set new permissions on library_item, eliminating all current permissions
- for role_assoc in library_item.actions:
- self.sa_session.delete( role_assoc )
- role_assoc.flush()
- # Add the new permissions on library_item
- for item_class, permission_class in self.library_item_assocs:
- if isinstance( library_item, item_class ):
- for action, roles in permissions.items():
- if isinstance( action, Action ):
- action = action.action
- for role_assoc in [ permission_class( action, library_item, role ) for role in roles ]:
- role_assoc.flush()
- def get_library_dataset_permissions( self, library_dataset ):
- # Permissions will always be the same for LibraryDatasets and associated
- # LibraryDatasetDatasetAssociations
- if isinstance( library_dataset, self.model.LibraryDatasetDatasetAssociation ):
- library_dataset = library_dataset.library_dataset
- permissions = {}
- for library_dataset_permission in library_dataset.actions:
- action = self.get_action( library_dataset_permission.action )
- if action in permissions:
- permissions[ action ].append( library_dataset_permission.role )
- else:
- permissions[ action ] = [ library_dataset_permission.role ]
- return permissions
- def copy_library_permissions( self, source_library_item, target_library_item, user=None ):
- # Copy all permissions from source
- permissions = {}
- for role_assoc in source_library_item.actions:
- if role_assoc.action in permissions:
- permissions[role_assoc.action].append( role_assoc.role )
- else:
- permissions[role_assoc.action] = [ role_assoc.role ]
- self.set_all_library_permissions( target_library_item, permissions )
- if user:
- item_class = None
- for item_class, permission_class in self.library_item_assocs:
- if isinstance( target_library_item, item_class ):
- break
- if item_class:
- # Make sure user's private role is included
- private_role = self.model.security_agent.get_private_user_role( user )
- for name, action in self.permitted_actions.items():
- if not permission_class.filter_by( role_id = private_role.id, action = action.action ).first():
- lp = permission_class( action.action, target_library_item, private_role )
- lp.flush()
- else:
- raise 'Invalid class (%s) specified for target_library_item (%s)' % \
- ( target_library_item.__class__, target_library_item.__class__.__name__ )
- def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
- """
- This method must be sent an instance of Library() or LibraryFolder(). Recursive execution produces a
- comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
- the string, True is returned if the current user has permission to perform any 1 of actions_to_check
- on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
- this criteria, if it exists. This method does not necessarily scan the entire library as it returns
- when it finds the first library_item that allows user to perform any one action in actions_to_check.
- """
- for action in actions_to_check:
- if self.allow_library_item_action( user, roles, action, library_item ):
- return True, hidden_folder_ids
- if isinstance( library_item, self.model.Library ):
- return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids='' )
- if isinstance( library_item, self.model.LibraryFolder ):
- for folder in library_item.active_folders:
- can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
- if can_show:
- return True, hidden_folder_ids
- if hidden_folder_ids:
- hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
- else:
- hidden_folder_ids = '%d' % folder.id
- return False, hidden_folder_ids
- def get_showable_folders( self, user, roles, library_item, actions_to_check, hidden_folder_ids=[], showable_folders=[] ):
- """
- This method must be sent an instance of Library(), all the folders of which are scanned to determine if
- user is allowed to perform any action in actions_to_check. The param hidden_folder_ids, if passed, should
- contain a list of folder IDs which was generated when the library was previously scanned
- using the same actions_to_check. A list of showable folders is generated. This method scans the entire library.
- """
- if isinstance( library_item, self.model.Library ):
- return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] )
- if isinstance( library_item, self.model.LibraryFolder ):
- if library_item.id not in hidden_folder_ids:
- for action in actions_to_check:
- if self.allow_library_item_action( user, roles, action, library_item ):
- showable_folders.append( library_item )
- break
- for folder in library_item.active_folders:
- self.get_showable_folders( user, roles, folder, actions_to_check, showable_folders=showable_folders )
- return showable_folders
- def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
- for user in users:
- if delete_existing_assocs:
- for a in user.non_private_roles + user.groups:
- self.sa_session.delete( a )
- a.flush()
- self.sa_session.refresh( user )
- for role in roles:
- # Make sure we are not creating an additional association with a PRIVATE role
- if role not in user.roles:
- self.associate_components( user=user, role=role )
- for group in groups:
- self.associate_components( user=user, group=group )
- def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ):
- for group in groups:
- if delete_existing_assocs:
- for a in group.roles + group.users:
- self.sa_session.delete( a )
- a.flush()
- for role in roles:
- self.associate_components( group=group, role=role )
- for user in users:
- self.associate_components( group=group, user=user )
- def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ):
- for role in roles:
- if delete_existing_assocs:
- for a in role.users + role.groups:
- self.sa_session.delete( a )
- a.flush()
- for user in users:
- self.associate_components( user=user, role=role )
- for group in groups:
- self.associate_components( group=group, role=role )
- def get_component_associations( self, **kwd ):
- assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.'
- if 'dataset' in kwd:
- if 'action' in kwd:
- return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first()
- elif 'user' in kwd:
- if 'group' in kwd:
- return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first()
- elif 'role' in kwd:
- return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first()
- elif 'group' in kwd:
- if 'role' in kwd:
- return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first()
- raise 'No valid method of associating provided components: %s' % kwd
- def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
- """
- This method must always be sent an instance of LibraryFolder(). Recursive execution produces a
- comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along
- with the string, True is returned if the current user has permission to access folder. Otherwise,
- cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
- This method does not necessarily scan the entire library as it returns when it finds the first
- folder that is accessible to user.
- """
- action = self.permitted_actions.DATASET_ACCESS
- lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
- .join( "library_dataset" ) \
- .filter( self.model.LibraryDataset.folder == folder ) \
- .join( "dataset" ) \
- .options( eagerload_all( "dataset.actions" ) ) \
- .all()
- for ldda in lddas:
- ldda_access_permissions = self.get_item_actions( action, ldda.dataset )
- if not ldda_access_permissions:
- # Dataset is public
- return True, hidden_folder_ids
- for ldda_access_permission in ldda_access_permissions:
- if ldda_access_permission.role in roles:
- # The current user has access permission on the dataset
- return True, hidden_folder_ids
- for sub_folder in folder.active_folders:
- can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
- if can_access:
- return True, hidden_folder_ids
- if hidden_folder_ids:
- hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
- else:
- hidden_folder_ids = '%d' % sub_folder.id
- return False, hidden_folder_ids
-
-class HostAgent( RBACAgent ):
- """
- A simple security agent which allows access to datasets based on host.
- This exists so that externals sites such as UCSC can gain access to
- datasets which have permissions which would normally prevent such access.
- """
- # TODO: Make sites user configurable
- sites = Bunch(
- ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
- ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
- ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
- )
- def __init__( self, model, permitted_actions=None ):
- self.model = model
- if permitted_actions:
- self.permitted_actions = permitted_actions
- def allow_action( self, addr, action, **kwd ):
- if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
- hda = kwd['dataset']
- if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]:
- log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id )
- return True # dataset has no roles associated with the access permission, thus is already public
- hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
- .filter_by( history_dataset_association_id = hda.id ).first()
- if not hdadaa:
- log.debug( 'Denying access to private dataset with hda: %i. No hdadaa record for this dataset.' % hda.id )
- return False # no auth
- # We could just look up the reverse of addr, but then we'd also
- # have to verify it with the forward address and special case any
- # IPs (instead of hosts) in the server list.
- #
- # This would be improved by caching, but that's what the OS's name
- # service cache daemon is for (you ARE running nscd, right?).
- for server in HostAgent.sites.get( hdadaa.site, [] ):
- # We're going to search in order, but if the remote site is load
- # balancing their connections (as UCSC does), this is okay.
- try:
- if socket.gethostbyname( server ) == addr:
- break # remote host is in the server list
- except ( socket.error, socket.gaierror ):
- pass # can't resolve, try next
- else:
- log.debug( 'Denying access to private dataset with hda: %i. Remote addr is not a valid server for site: %s.' % ( hda.id, hdadaa.site ) )
- return False # remote addr is not in the server list
- if ( datetime.utcnow() - hdadaa.update_time ) > timedelta( seconds=60 ):
- log.debug( 'Denying access to private dataset with hda: %i. Authorization was granted, but has expired.' % hda.id )
- return False # not authz'd in the last 60 seconds
- log.debug( 'Allowing access to private dataset with hda: %i. Remote server is: %s.' % ( hda.id, server ) )
- return True
- else:
- raise 'The dataset access permission is the only valid permission in the host security agent.'
- def set_dataset_permissions( self, hda, user, site ):
- hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
- .filter_by( history_dataset_association_id = hda.id ).first()
- if hdadaa:
- hdadaa.update_time = datetime.utcnow()
- else:
- hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization( hda=hda, user=user, site=site )
- hdadaa.flush()
-
-def get_permitted_actions( filter=None ):
- '''Utility method to return a subset of RBACAgent's permitted actions'''
- if filter is None:
- return RBACAgent.permitted_actions
- tmp_bunch = Bunch()
- [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
- return tmp_bunch
+ permissions[ self.permitted_actions.DATASET_ACCESS ] = permissions.values()[ 0 ]
+ # Delete all of the current default permissions for the user
+ for dup in user.default_permissions:
+ self.sa_session.delete( dup )
+ dup.flush()
+ # Add the new default permissions for the user
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dup in [ self.model.DefaultUserPermissions( user, action, role ) for role in roles ]:
+ dup.flush()
+ if history:
+ for history in user.active_histories:
+ self.history_set_default_permissions( history, permissions=permissions, dataset=dataset, bypass_manage_permission=bypass_manage_permission )
+ def user_get_default_permissions( self, user ):
+ permissions = {}
+ for dup in user.default_permissions:
+ action = self.get_action( dup.action )
+ if action in permissions:
+ permissions[ action ].append( dup.role )
+ else:
+ permissions[ action ] = [ dup.role ]
+ return permissions
+ def history_set_default_permissions( self, history, permissions={}, dataset=False, bypass_manage_permission=False ):
+ # bypass_manage_permission is used to change permissions of datasets in a user-less history when logging in
+ user = history.user
+ if not user:
+ # default permissions on a user-less history are None
+ return None
+ if not permissions:
+ permissions = self.user_get_default_permissions( user )
+ # Delete all of the current default permission for the history
+ for dhp in history.default_permissions:
+ self.sa_session.delete( dhp )
+ dhp.flush()
+ # Add the new default permissions for the history
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dhp in [ self.model.DefaultHistoryPermissions( history, action, role ) for role in roles ]:
+ dhp.flush()
+ if dataset:
+ # Only deal with datasets that are not purged
+ for hda in history.activatable_datasets:
+ dataset = hda.dataset
+ if dataset.library_associations:
+ # Don't change permissions on a dataset associated with a library
+ continue
+ if [ assoc for assoc in dataset.history_associations if assoc.history not in user.histories ]:
+ # Don't change permissions on a dataset associated with a history not owned by the user
+ continue
+ if bypass_manage_permission or self.can_manage_dataset( user.all_roles(), dataset ):
+ self.set_all_dataset_permissions( dataset, permissions )
+ def history_get_default_permissions( self, history ):
+ permissions = {}
+ for dhp in history.default_permissions:
+ action = self.get_action( dhp.action )
+ if action in permissions:
+ permissions[ action ].append( dhp.role )
+ else:
+ permissions[ action ] = [ dhp.role ]
+ return permissions
+ def set_all_dataset_permissions( self, dataset, permissions={} ):
+ """
+ Set new permissions on a dataset, eliminating all current permissions
+ permissions looks like: { Action : [ Role, Role ] }
+ """
+ # Delete all of the current permissions on the dataset
+ # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
+ # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
+ for dp in dataset.actions:
+ self.sa_session.delete( dp )
+ dp.flush()
+ # Add the new permissions on the dataset
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
+ dp.flush()
+ def set_dataset_permission( self, dataset, permission={} ):
+ """
+ Set a specific permission on a dataset, leaving all other current permissions on the dataset alone
+ permissions looks like: { Action : [ Role, Role ] }
+ """
+ # TODO: If setting ACCESS permission, at least 1 user must have every role associated with this dataset,
+ # or the dataset is inaccessible. See admin/library_dataset_dataset_association()
+ for action, roles in permission.items():
+ if isinstance( action, Action ):
+ action = action.action
+ # Delete the current specific permission on the dataset if one exists
+ for dp in dataset.actions:
+ if dp.action == action:
+ self.sa_session.delete( dp )
+ dp.flush()
+ # Add the new specific permission on the dataset
+ for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
+ dp.flush()
+ def dataset_is_public( self, dataset ):
+ # A dataset is considered public if there are no "access" actions associated with it. Any
+ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
+ def make_dataset_public( self, dataset ):
+ # A dataset is considered public if there are no "access" actions associated with it. Any
+ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ for dp in dataset.actions:
+ if dp.action == self.permitted_actions.DATASET_ACCESS.action:
+ self.sa_session.delete( dp )
+ dp.flush()
+ def get_dataset_permissions( self, dataset ):
+ """
+ Return a dictionary containing the actions and associated roles on dataset.
+ The dictionary looks like: { Action : [ Role, Role ] }
+ dataset must be an instance of Dataset()
+ """
+ permissions = {}
+ for dp in dataset.actions:
+ action = self.get_action( dp.action )
+ if action in permissions:
+ permissions[ action ].append( dp.role )
+ else:
+ permissions[ action ] = [ dp.role ]
+ return permissions
+ def copy_dataset_permissions( self, src, dst ):
+ if not isinstance( src, self.model.Dataset ):
+ src = src.dataset
+ if not isinstance( dst, self.model.Dataset ):
+ dst = dst.dataset
+ self.set_all_dataset_permissions( dst, self.get_dataset_permissions( src ) )
+ def privately_share_dataset( self, dataset, users = [] ):
+ intersect = None
+ for user in users:
+ roles = [ ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING ]
+ if intersect is None:
+ intersect = roles
+ else:
+ new_intersect = []
+ for role in roles:
+ if role in intersect:
+ new_intersect.append( role )
+ intersect = new_intersect
+ sharing_role = None
+ if intersect:
+ for role in intersect:
+ if not filter( lambda x: x not in users, [ ura.user for ura in role.users ] ):
+ # only use a role if it contains ONLY the users we're sharing with
+ sharing_role = role
+ break
+ if sharing_role is None:
+ sharing_role = self.model.Role( name = "Sharing role for: " + ", ".join( [ u.email for u in users ] ),
+ type = self.model.Role.types.SHARING )
+ sharing_role.flush()
+ for user in users:
+ self.associate_components( user=user, role=sharing_role )
+ self.set_dataset_permission( dataset, { self.permitted_actions.DATASET_ACCESS : [ sharing_role ] } )
+ def set_all_library_permissions( self, library_item, permissions={} ):
+ # Set new permissions on library_item, eliminating all current permissions
+ for role_assoc in library_item.actions:
+ self.sa_session.delete( role_assoc )
+ role_assoc.flush()
+ # Add the new permissions on library_item
+ for item_class, permission_class in self.library_item_assocs:
+ if isinstance( library_item, item_class ):
+ for action, roles in permissions.items():
+ if isinstance( action, Action ):
+ action = action.action
+ for role_assoc in [ permission_class( action, library_item, role ) for role in roles ]:
+ role_assoc.flush()
+ def get_library_dataset_permissions( self, library_dataset ):
+ # Permissions will always be the same for LibraryDatasets and associated
+ # LibraryDatasetDatasetAssociations
+ if isinstance( library_dataset, self.model.LibraryDatasetDatasetAssociation ):
+ library_dataset = library_dataset.library_dataset
+ permissions = {}
+ for library_dataset_permission in library_dataset.actions:
+ action = self.get_action( library_dataset_permission.action )
+ if action in permissions:
+ permissions[ action ].append( library_dataset_permission.role )
+ else:
+ permissions[ action ] = [ library_dataset_permission.role ]
+ return permissions
+ def copy_library_permissions( self, source_library_item, target_library_item, user=None ):
+ # Copy all permissions from source
+ permissions = {}
+ for role_assoc in source_library_item.actions:
+ if role_assoc.action in permissions:
+ permissions[role_assoc.action].append( role_assoc.role )
+ else:
+ permissions[role_assoc.action] = [ role_assoc.role ]
+ self.set_all_library_permissions( target_library_item, permissions )
+ if user:
+ item_class = None
+ for item_class, permission_class in self.library_item_assocs:
+ if isinstance( target_library_item, item_class ):
+ break
+ if item_class:
+ # Make sure user's private role is included
+ private_role = self.model.security_agent.get_private_user_role( user )
+ for name, action in self.permitted_actions.items():
+ if not permission_class.filter_by( role_id = private_role.id, action = action.action ).first():
+ lp = permission_class( action.action, target_library_item, private_role )
+ lp.flush()
+ else:
+ raise 'Invalid class (%s) specified for target_library_item (%s)' % \
+ ( target_library_item.__class__, target_library_item.__class__.__name__ )
+ def show_library_item( self, user, roles, library_item, actions_to_check, hidden_folder_ids='' ):
+ """
+ This method must be sent an instance of Library() or LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along with
+ the string, True is returned if the current user has permission to perform any 1 of actions_to_check
+ on library_item. Otherwise, cycle through all sub-folders in library_item until one is found that meets
+ this criteria, if it exists. This method does not necessarily scan the entire library as it returns
+ when it finds the first library_item that allows user to perform any one action in actions_to_check.
+ """
+ for action in actions_to_check:
+ if self.allow_library_item_action( user, roles, action, library_item ):
+ return True, hidden_folder_ids
+ if isinstance( library_item, self.model.Library ):
+ return self.show_library_item( user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids='' )
+ if isinstance( library_item, self.model.LibraryFolder ):
+ for folder in library_item.active_folders:
+ can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids )
+ if can_show:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, folder.id )
+ else:
+ hidden_folder_ids = '%d' % folder.id
+ return False, hidden_folder_ids
+ def get_showable_folders( self, user, roles, library_item, actions_to_check, hidden_folder_ids=[], showable_folders=[] ):
+ """
+ This method must be sent an instance of Library(), all the folders of which are scanned to determine if
+ user is allowed to perform any action in actions_to_check. The param hidden_folder_ids, if passed, should
+ contain a list of folder IDs which was generated when the library was previously scanned
+ using the same actions_to_check. A list of showable folders is generated. This method scans the entire library.
+ """
+ if isinstance( library_item, self.model.Library ):
+ return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] )
+ if isinstance( library_item, self.model.LibraryFolder ):
+ if library_item.id not in hidden_folder_ids:
+ for action in actions_to_check:
+ if self.allow_library_item_action( user, roles, action, library_item ):
+ showable_folders.append( library_item )
+ break
+ for folder in library_item.active_folders:
+ self.get_showable_folders( user, roles, folder, actions_to_check, showable_folders=showable_folders )
+ return showable_folders
+ def set_entity_user_associations( self, users=[], roles=[], groups=[], delete_existing_assocs=True ):
+ for user in users:
+ if delete_existing_assocs:
+ for a in user.non_private_roles + user.groups:
+ self.sa_session.delete( a )
+ a.flush()
+ self.sa_session.refresh( user )
+ for role in roles:
+ # Make sure we are not creating an additional association with a PRIVATE role
+ if role not in user.roles:
+ self.associate_components( user=user, role=role )
+ for group in groups:
+ self.associate_components( user=user, group=group )
+ def set_entity_group_associations( self, groups=[], users=[], roles=[], delete_existing_assocs=True ):
+ for group in groups:
+ if delete_existing_assocs:
+ for a in group.roles + group.users:
+ self.sa_session.delete( a )
+ a.flush()
+ for role in roles:
+ self.associate_components( group=group, role=role )
+ for user in users:
+ self.associate_components( group=group, user=user )
+ def set_entity_role_associations( self, roles=[], users=[], groups=[], delete_existing_assocs=True ):
+ for role in roles:
+ if delete_existing_assocs:
+ for a in role.users + role.groups:
+ self.sa_session.delete( a )
+ a.flush()
+ for user in users:
+ self.associate_components( user=user, role=role )
+ for group in groups:
+ self.associate_components( group=group, role=role )
+ def get_component_associations( self, **kwd ):
+ assert len( kwd ) == 2, 'You must specify exactly 2 Galaxy security components to check for associations.'
+ if 'dataset' in kwd:
+ if 'action' in kwd:
+ return self.sa_session.query( self.model.DatasetPermissions ).filter_by( action = kwd['action'].action, dataset_id = kwd['dataset'].id ).first()
+ elif 'user' in kwd:
+ if 'group' in kwd:
+ return self.sa_session.query( self.model.UserGroupAssociation ).filter_by( group_id = kwd['group'].id, user_id = kwd['user'].id ).first()
+ elif 'role' in kwd:
+ return self.sa_session.query( self.model.UserRoleAssociation ).filter_by( role_id = kwd['role'].id, user_id = kwd['user'].id ).first()
+ elif 'group' in kwd:
+ if 'role' in kwd:
+ return self.sa_session.query( self.model.GroupRoleAssociation ).filter_by( role_id = kwd['role'].id, group_id = kwd['group'].id ).first()
+ raise 'No valid method of associating provided components: %s' % kwd
+ def check_folder_contents( self, user, roles, folder, hidden_folder_ids='' ):
+ """
+ This method must always be sent an instance of LibraryFolder(). Recursive execution produces a
+ comma-separated string of folder ids whose folders do NOT meet the criteria for showing. Along
+ with the string, True is returned if the current user has permission to access folder. Otherwise,
+ cycle through all sub-folders in folder until one is found that meets this criteria, if it exists.
+ This method does not necessarily scan the entire library as it returns when it finds the first
+ folder that is accessible to user.
+ """
+ action = self.permitted_actions.DATASET_ACCESS
+ lddas = self.sa_session.query( self.model.LibraryDatasetDatasetAssociation ) \
+ .join( "library_dataset" ) \
+ .filter( self.model.LibraryDataset.folder == folder ) \
+ .join( "dataset" ) \
+ .options( eagerload_all( "dataset.actions" ) ) \
+ .all()
+ for ldda in lddas:
+ ldda_access_permissions = self.get_item_actions( action, ldda.dataset )
+ if not ldda_access_permissions:
+ # Dataset is public
+ return True, hidden_folder_ids
+ for ldda_access_permission in ldda_access_permissions:
+ if ldda_access_permission.role in roles:
+ # The current user has access permission on the dataset
+ return True, hidden_folder_ids
+ for sub_folder in folder.active_folders:
+ can_access, hidden_folder_ids = self.check_folder_contents( user, roles, sub_folder, hidden_folder_ids=hidden_folder_ids )
+ if can_access:
+ return True, hidden_folder_ids
+ if hidden_folder_ids:
+ hidden_folder_ids = '%s,%d' % ( hidden_folder_ids, sub_folder.id )
+ else:
+ hidden_folder_ids = '%d' % sub_folder.id
+ return False, hidden_folder_ids
+
+class HostAgent( RBACAgent ):
+ """
+ A simple security agent which allows access to datasets based on host.
+ This exists so that externals sites such as UCSC can gain access to
+ datasets which have permissions which would normally prevent such access.
+ """
+ # TODO: Make sites user configurable
+ sites = Bunch(
+ ucsc_main = ( 'hgw1.cse.ucsc.edu', 'hgw2.cse.ucsc.edu', 'hgw3.cse.ucsc.edu', 'hgw4.cse.ucsc.edu',
+ 'hgw5.cse.ucsc.edu', 'hgw6.cse.ucsc.edu', 'hgw7.cse.ucsc.edu', 'hgw8.cse.ucsc.edu' ),
+ ucsc_test = ( 'hgwdev.cse.ucsc.edu', ),
+ ucsc_archaea = ( 'lowepub.cse.ucsc.edu', )
+ )
+ def __init__( self, model, permitted_actions=None ):
+ self.model = model
+ if permitted_actions:
+ self.permitted_actions = permitted_actions
+ def allow_action( self, addr, action, **kwd ):
+ if 'dataset' in kwd and action == self.permitted_actions.DATASET_ACCESS:
+ hda = kwd['dataset']
+ if action == self.permitted_actions.DATASET_ACCESS and action.action not in [ dp.action for dp in hda.dataset.actions ]:
+ log.debug( 'Allowing access to public dataset with hda: %i.' % hda.id )
+ return True # dataset has no roles associated with the access permission, thus is already public
+ hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+ .filter_by( history_dataset_association_id = hda.id ).first()
+ if not hdadaa:
+ log.debug( 'Denying access to private dataset with hda: %i. No hdadaa record for this dataset.' % hda.id )
+ return False # no auth
+ # We could just look up the reverse of addr, but then we'd also
+ # have to verify it with the forward address and special case any
+ # IPs (instead of hosts) in the server list.
+ #
+ # This would be improved by caching, but that's what the OS's name
+ # service cache daemon is for (you ARE running nscd, right?).
+ for server in HostAgent.sites.get( hdadaa.site, [] ):
+ # We're going to search in order, but if the remote site is load
+ # balancing their connections (as UCSC does), this is okay.
+ try:
+ if socket.gethostbyname( server ) == addr:
+ break # remote host is in the server list
+ except ( socket.error, socket.gaierror ):
+ pass # can't resolve, try next
+ else:
+ log.debug( 'Denying access to private dataset with hda: %i. Remote addr is not a valid server for site: %s.' % ( hda.id, hdadaa.site ) )
+ return False # remote addr is not in the server list
+ if ( datetime.utcnow() - hdadaa.update_time ) > timedelta( seconds=60 ):
+ log.debug( 'Denying access to private dataset with hda: %i. Authorization was granted, but has expired.' % hda.id )
+ return False # not authz'd in the last 60 seconds
+ log.debug( 'Allowing access to private dataset with hda: %i. Remote server is: %s.' % ( hda.id, server ) )
+ return True
+ else:
+ raise 'The dataset access permission is the only valid permission in the host security agent.'
+ def set_dataset_permissions( self, hda, user, site ):
+ hdadaa = self.sa_session.query( self.model.HistoryDatasetAssociationDisplayAtAuthorization ) \
+ .filter_by( history_dataset_association_id = hda.id ).first()
+ if hdadaa:
+ hdadaa.update_time = datetime.utcnow()
+ else:
+ hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization( hda=hda, user=user, site=site )
+ hdadaa.flush()
+
+def get_permitted_actions( filter=None ):
+ '''Utility method to return a subset of RBACAgent's permitted actions'''
+ if filter is None:
+ return RBACAgent.permitted_actions
+ tmp_bunch = Bunch()
+ [ tmp_bunch.__dict__.__setitem__(k, v) for k, v in RBACAgent.permitted_actions.items() if k.startswith( filter ) ]
+ return tmp_bunch
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/f9bd28601cba
changeset: 3007:f9bd28601cba
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Tue Nov 10 17:09:02 2009 -0500
description:
Changes system call method from subprocess.check_call to os.system in Bam set_meta in images.py. Change to sam_to_bam_out1.bam should fix sam-to-bam functional test.
diffstat:
lib/galaxy/datatypes/images.py | 12 ++++++------
test-data/sam_to_bam_out1.bam |
2 files changed, 6 insertions(+), 6 deletions(-)
diffs (24 lines):
diff -r 4fdf952e413e -r f9bd28601cba lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Tue Nov 10 16:49:05 2009 -0500
+++ b/lib/galaxy/datatypes/images.py Tue Nov 10 17:09:02 2009 -0500
@@ -254,12 +254,12 @@
tmpf1 = tempfile.NamedTemporaryFile(dir=tmp_dir)
tmpf1bai = '%s.bai' % tmpf1.name
try:
- subprocess.check_call(['cd', tmp_dir], shell=True)
- subprocess.check_call('cp %s %s' % (dataset.file_name, tmpf1.name), shell=True)
- subprocess.check_call('samtools index %s' % tmpf1.name, shell=True)
- subprocess.check_call('cp %s %s' % (tmpf1bai, index_file.file_name), shell=True)
- except subprocess.CalledProcessError:
- sys.stderr.write('There was a problem creating the index for the BAM file\n')
+ os.system('cd %s' % tmp_dir)
+ os.system('cp %s %s' % (dataset.file_name, tmpf1.name))
+ os.system('samtools index %s' % tmpf1.name)
+ os.system('cp %s %s' % (tmpf1bai, index_file.file_name))
+ except Exception, ex:
+ sys.stderr.write('There was a problem creating the index for the BAM file\n%s\n' + str(ex))
tmpf1.close()
if os.path.exists(tmpf1bai):
os.remove(tmpf1bai)
diff -r 4fdf952e413e -r f9bd28601cba test-data/sam_to_bam_out1.bam
Binary file test-data/sam_to_bam_out1.bam has changed
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d36d08cc4abb
changeset: 3011:d36d08cc4abb
user: Kelly Vincent <kpvincent(a)bx.psu.edu>
date: Wed Nov 11 13:53:48 2009 -0500
description:
Changed sam-to-bam test by removing nonexistent dbkey from test input specification.
diffstat:
tools/samtools/sam_to_bam.xml | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 68c3b25ea133 -r d36d08cc4abb tools/samtools/sam_to_bam.xml
--- a/tools/samtools/sam_to_bam.xml Wed Nov 11 13:08:11 2009 -0500
+++ b/tools/samtools/sam_to_bam.xml Wed Nov 11 13:53:48 2009 -0500
@@ -36,7 +36,7 @@
<tests>
<test>
<param name="indexSource" value="history" />
- <param name="input1" value="sam_to_bam_in1.sam" ftype="sam" dbkey="chrM" />
+ <param name="input1" value="sam_to_bam_in1.sam" ftype="sam" />
<param name="ref_file" value="chrM.fa" ftype="fasta" />
<output name="output1" file="sam_to_bam_out1.bam" />
</test>
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/68c3b25ea133
changeset: 3010:68c3b25ea133
user: Nate Coraor <nate(a)bx.psu.edu>
date: Wed Nov 11 13:08:11 2009 -0500
description:
Properly set the PBS job name when not using staging
diffstat:
lib/galaxy/jobs/runners/pbs.py | 4 +++-
1 files changed, 3 insertions(+), 1 deletions(-)
diffs (18 lines):
diff -r 6b23bba26001 -r 68c3b25ea133 lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 11:49:08 2009 -0500
+++ b/lib/galaxy/jobs/runners/pbs.py Wed Nov 11 13:08:11 2009 -0500
@@ -208,11 +208,13 @@
exec_dir = os.path.abspath( os.getcwd() )
# If not, we're using NFS
else:
- job_attrs = pbs.new_attropl(2)
+ job_attrs = pbs.new_attropl(3)
job_attrs[0].name = pbs.ATTR_o
job_attrs[0].value = ofile
job_attrs[1].name = pbs.ATTR_e
job_attrs[1].value = efile
+ job_attrs[2].name = pbs.ATTR_N
+ job_attrs[2].value = "%s" % job_wrapper.job_id
exec_dir = os.getcwd()
# write the job script
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/6eddc13b1d3b
changeset: 3005:6eddc13b1d3b
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 10 16:02:49 2009 -0500
description:
Fix for the broken test I just committed
diffstat:
test/base/twilltestcase.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 4f923033e9d0 -r 6eddc13b1d3b test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Nov 10 15:10:43 2009 -0500
+++ b/test/base/twilltestcase.py Tue Nov 10 16:02:49 2009 -0500
@@ -1651,7 +1651,7 @@
t.extract( n, tmpd )
t.close()
elif zipfile.is_zipfile( archive ):
- z = zipfile.open( archive )
+ z = zipfile.ZipFile( archive, 'r' )
for n in z.namelist():
mkdir( n )
open( os.path.join( tmpd, n ), 'wb' ).write( z.read( n ) )
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/4f923033e9d0
changeset: 3004:4f923033e9d0
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 10 15:10:43 2009 -0500
description:
Add test for library tarball downloads
diffstat:
test/base/twilltestcase.py | 60 ++++++++++++++++++++++++++++-
test/functional/test_security_and_libraries.py | 9 ++++
2 files changed, 66 insertions(+), 3 deletions(-)
diffs (110 lines):
diff -r d5fd27771019 -r 4f923033e9d0 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Tue Nov 10 15:04:03 2009 -0500
+++ b/test/base/twilltestcase.py Tue Nov 10 15:10:43 2009 -0500
@@ -1,7 +1,7 @@
import pkg_resources
pkg_resources.require( "twill==0.9" )
-import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, zipfile, tempfile, re
+import StringIO, os, sys, random, filecmp, time, unittest, urllib, logging, difflib, tarfile, zipfile, tempfile, re, shutil
from itertools import *
import twill
@@ -736,8 +736,8 @@
errmsg = "no match to '%s'\npage content written to '%s'" % ( patt, fname )
raise AssertionError( errmsg )
- def write_temp_file( self, content ):
- fd, fname = tempfile.mkstemp( suffix='.html', prefix='twilltestcase-' )
+ def write_temp_file( self, content, suffix='.html' ):
+ fd, fname = tempfile.mkstemp( suffix=suffix, prefix='twilltestcase-' )
f = os.fdopen( fd, "w" )
f.write( content )
f.close()
@@ -1616,6 +1616,60 @@
self.check_page_for_string( check_str_after_submit )
self.library_wait( library_id, controller='library' )
self.home()
+ def download_archive_of_library_files( self, library_id, ldda_ids, format ):
+ self.home()
+ self.visit_url( "%s/library/browse_library?obj_id=%s" % ( self.url, library_id ) )
+ for ldda_id in ldda_ids:
+ tc.fv( "1", "ldda_ids", ldda_id )
+ tc.fv( "1", "do_action", format )
+ tc.submit( "action_on_datasets_button" )
+ tc.code( 200 )
+ archive = self.write_temp_file( self.last_page(), suffix=format )
+ self.home()
+ return archive
+ def check_archive_contents( self, archive, lddas ):
+ def get_ldda_path( ldda ):
+ path = ""
+ parent_folder = ldda.library_dataset.folder
+ while parent_folder is not None:
+ if parent_folder.parent is None:
+ path = os.path.join( parent_folder.library_root[0].name, path )
+ break
+ path = os.path.join( parent_folder.name, path )
+ parent_folder = parent_folder.parent
+ path += ldda.name
+ return path
+ def mkdir( file ):
+ dir = os.path.join( tmpd, os.path.dirname( file ) )
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ tmpd = tempfile.mkdtemp()
+ if tarfile.is_tarfile( archive ):
+ t = tarfile.open( archive )
+ for n in t.getnames():
+ mkdir( n )
+ t.extract( n, tmpd )
+ t.close()
+ elif zipfile.is_zipfile( archive ):
+ z = zipfile.open( archive )
+ for n in z.namelist():
+ mkdir( n )
+ open( os.path.join( tmpd, n ), 'wb' ).write( z.read( n ) )
+ z.close()
+ else:
+ raise Exception( 'Unable to read archive: %s' % archive )
+ for ldda in lddas:
+ orig_file = self.get_filename( ldda.name )
+ downloaded_file = os.path.join( tmpd, get_ldda_path( ldda ) )
+ assert os.path.exists( downloaded_file )
+ try:
+ self.files_diff( orig_file, downloaded_file )
+ except AssertionError, err:
+ errmsg = 'Library item %s different than expected, difference:\n' % ldda.name
+ errmsg += str( err )
+ errmsg += 'Unpacked archive remains in: %s\n' % tmpd
+ raise AssertionError( errmsg )
+ shutil.rmtree( tmpd )
def delete_library_item( self, library_id, library_item_id, library_item_name, library_item_type='library_dataset' ):
"""Mark a library item as deleted"""
self.home()
diff -r d5fd27771019 -r 4f923033e9d0 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Tue Nov 10 15:04:03 2009 -0500
+++ b/test/functional/test_security_and_libraries.py Tue Nov 10 15:10:43 2009 -0500
@@ -6,6 +6,7 @@
not_logged_in_security_msg = 'You must be logged in as an administrator to access this feature.'
logged_in_security_msg = 'You must be an administrator to access this feature.'
+import sys
class TestSecurityAndLibraries( TwillTestCase ):
def test_000_admin_features_when_not_logged_in( self ):
"""Testing admin_features when not logged in"""
@@ -1385,6 +1386,14 @@
self.home()
self.logout()
self.login( email=admin_user.email )
+ def test_167_download_archive_of_library_files( self ):
+ """Testing downloading an archive of files from the library"""
+ for format in ( 'tbz', 'tgz', 'zip' ):
+ archive = self.download_archive_of_library_files( str( library_one.id ),
+ ( str( ldda_one.id ), str( ldda_two.id ) ),
+ format )
+ self.check_archive_contents( archive, ( ldda_one, ldda_two ) )
+ os.remove( archive )
def test_170_mark_group_deleted( self ):
"""Testing marking a group as deleted"""
# Logged in as admin_user
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/c146726d274f
changeset: 3001:c146726d274f
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 10 13:47:34 2009 -0500
description:
Upgrade to Cheetah 2.2.2
diffstat:
eggs.ini | 4 +-
lib/galaxy/eggs/__init__.py | 1 +
scripts/scramble/lib/get_platform.py | 18 ++++++
scripts/scramble/scripts/Cheetah-py2.5.py | 55 ------------------
scripts/scramble/scripts/Cheetah.py | 62 ++++++++++++++++++++
scripts/scramble/scripts/DRMAA_python.py | 1 +
scripts/scramble/scripts/MySQL_python.py | 1 +
scripts/scramble/scripts/generic.py | 1 +
scripts/scramble/scripts/pbs_python.py | 1 +
scripts/scramble/scripts/psycopg2.py | 1 +
scripts/scramble/scripts/pysqlite.py | 1 +
scripts/scramble/scripts/python_lzo.py | 1 +
tools/filters/joiner.xml | 35 ++++++-----
tools/maf/genebed_maf_to_fasta.xml | 7 +-
tools/maf/interval2maf.xml | 10 +-
tools/maf/interval_maf_to_merged_fasta.xml | 7 +-
tools/maf/maf_to_fasta.xml | 6 +-
tools/metag_tools/blat_wrapper.xml | 7 +-
tools/metag_tools/shrimp_color_wrapper.xml | 6 +-
tools/metag_tools/shrimp_wrapper.xml | 10 +-
tools/samtools/pileup_parser.xml | 8 +-
tools/sr_mapping/lastz_wrapper.xml | 18 +++---
tools/stats/aggregate_binned_scores_in_intervals.xml | 6 +-
23 files changed, 151 insertions(+), 116 deletions(-)
diffs (485 lines):
diff -r facf315d9e0e -r c146726d274f eggs.ini
--- a/eggs.ini Tue Nov 10 13:46:53 2009 -0500
+++ b/eggs.ini Tue Nov 10 13:47:34 2009 -0500
@@ -13,7 +13,7 @@
[eggs:platform]
bx_python = 0.5.0
-Cheetah = 1.0
+Cheetah = 2.2.2
DRMAA_python = 0.2
MySQL_python = 1.2.2
pbs_python = 2.9.4
@@ -66,7 +66,7 @@
; source location, necessary for scrambling
[source]
bx_python = http://bitbucket.org/james_taylor/bx-python/get/4bf1f32e6b76.bz2
-Cheetah = http://voxel.dl.sourceforge.net/sourceforge/cheetahtemplate/Cheetah-1.0.tar…
+Cheetah = http://pypi.python.org/packages/source/C/Cheetah/Cheetah-2.2.2.tar.gz
DRMAA_python = http://gridengine.sunsource.net/files/documents/7/36/DRMAA-python-0.2.tar.gz
MySQL_python = http://superb-west.dl.sourceforge.net/sourceforge/mysql-python/MySQL-python… http://downloads.mysql.com/archives/mysql-5.0/mysql-5.0.67.tar.gz
pbs_python = http://ftp.sara.nl/pub/outgoing/pbs_python-2.9.4.tar.gz
diff -r facf315d9e0e -r c146726d274f lib/galaxy/eggs/__init__.py
--- a/lib/galaxy/eggs/__init__.py Tue Nov 10 13:46:53 2009 -0500
+++ b/lib/galaxy/eggs/__init__.py Tue Nov 10 13:47:34 2009 -0500
@@ -140,6 +140,7 @@
cmd = "ssh %s 'cd %s; %s -ES %s'" % ( self.build_host, self.buildpath, self.python, "scramble.py" )
else:
cmd = "%s -ES %s" % ( self.python, "scramble.py" )
+ log.debug( 'Executing: %s' % cmd )
p = subprocess.Popen( args = cmd, shell = True, cwd = self.buildpath )
r = p.wait()
if r != 0:
diff -r facf315d9e0e -r c146726d274f scripts/scramble/lib/get_platform.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/lib/get_platform.py Tue Nov 10 13:47:34 2009 -0500
@@ -0,0 +1,18 @@
+"""
+Monkeypatch get_platform since it's broken on OS X versions of Python 2.5
+"""
+import os, sys
+from distutils.sysconfig import get_config_vars
+if sys.platform == 'darwin' and get_config_vars().get('UNIVERSALSDK', '').strip():
+ # Has to be before anything imports pkg_resources
+ def _get_platform_monkeypatch():
+ plat = distutils.util._get_platform()
+ if plat.startswith( 'macosx-' ):
+ plat = 'macosx-10.3-fat'
+ return plat
+ import distutils.util
+ try:
+ assert distutils.util._get_platform
+ except:
+ distutils.util._get_platform = distutils.util.get_platform
+ distutils.util.get_platform = _get_platform_monkeypatch
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/Cheetah-py2.5.py
--- a/scripts/scramble/scripts/Cheetah-py2.5.py Tue Nov 10 13:46:53 2009 -0500
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,55 +0,0 @@
-import os, sys, shutil
-
-# change back to the build dir
-if os.path.dirname( sys.argv[0] ) != "":
- os.chdir( os.path.dirname( sys.argv[0] ) )
-
-# find setuptools
-scramble_lib = os.path.join( "..", "..", "..", "lib" )
-sys.path.append( scramble_lib )
-from ez_setup import use_setuptools
-use_setuptools( download_delay=8, to_dir=scramble_lib )
-from setuptools import *
-
-# get the tag
-if os.access( ".galaxy_tag", os.F_OK ):
- tagfile = open( ".galaxy_tag", "r" )
- tag = tagfile.readline().strip()
-else:
- tag = None
-
-# in case you're running this by hand from a dirty module source dir
-for dir in [ "build", "dist" ]:
- if os.access( dir, os.F_OK ):
- print "scramble_it.py: removing dir:", dir
- shutil.rmtree( dir )
-
-# patch
-for file in [ "src/NameMapper.py", "src/Tests/NameMapper.py" ]:
- if not os.access( "%s.orig" %file, os.F_OK ):
- print "scramble_it(): Patching", file
- shutil.copyfile( file, "%s.orig" %file )
- i = open( "%s.orig" %file, "r" )
- o = open( file, "w" )
- for line in i.readlines():
- if line.startswith("__author__ ="):
- print >>o, "from __future__ import generators"
- elif line == "from __future__ import generators\n":
- continue
- print >>o, line,
- i.close()
- o.close()
-
-# reset args for distutils
-me = sys.argv[0]
-sys.argv = [ me ]
-sys.argv.append( "egg_info" )
-if tag is not None:
- #sys.argv.append( "egg_info" )
- sys.argv.append( "--tag-build=%s" %tag )
-# svn revision (if any) is handled directly in tag-build
-sys.argv.append( "--no-svn-revision" )
-sys.argv.append( "bdist_egg" )
-
-# do it
-execfile( "setup.py", globals(), locals() )
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/Cheetah.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/scripts/scramble/scripts/Cheetah.py Tue Nov 10 13:47:34 2009 -0500
@@ -0,0 +1,62 @@
+import os, sys, shutil
+
+# change back to the build dir
+if os.path.dirname( sys.argv[0] ) != "":
+ os.chdir( os.path.dirname( sys.argv[0] ) )
+
+# find setuptools
+scramble_lib = os.path.join( "..", "..", "..", "lib" )
+sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
+from ez_setup import use_setuptools
+use_setuptools( download_delay=8, to_dir=scramble_lib )
+from setuptools import *
+
+# get the tag
+if os.access( ".galaxy_tag", os.F_OK ):
+ tagfile = open( ".galaxy_tag", "r" )
+ tag = tagfile.readline().strip()
+else:
+ tag = None
+
+# in case you're running this by hand from a dirty module source dir
+for dir in [ "build", "dist" ]:
+ if os.access( dir, os.F_OK ):
+ print "scramble_it.py: removing dir:", dir
+ shutil.rmtree( dir )
+
+# patch
+file = "SetupConfig.py"
+if not os.access( "%s.orig" %file, os.F_OK ):
+ print "scramble.py(): Patching", file
+ shutil.copyfile( file, "%s.orig" %file )
+ i = open( "%s.orig" %file, "r" )
+ o = open( file, "w" )
+ comment = False
+ for line in i.readlines():
+ if line == " install_requires = [\n":
+ comment = True
+ print >>o, "#" + line,
+ elif comment and line == " ]\n":
+ comment = False
+ print >>o, "#" + line,
+ elif comment:
+ print >>o, "#" + line,
+ else:
+ print >>o, line,
+ i.close()
+ o.close()
+
+# reset args for distutils
+me = sys.argv[0]
+sys.argv = [ me ]
+sys.argv.append( "egg_info" )
+if tag is not None:
+ #sys.argv.append( "egg_info" )
+ sys.argv.append( "--tag-build=%s" %tag )
+# svn revision (if any) is handled directly in tag-build
+sys.argv.append( "--no-svn-revision" )
+sys.argv.append( "bdist_egg" )
+
+# do it
+execfile( "setup.py", globals(), locals() )
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/DRMAA_python.py
--- a/scripts/scramble/scripts/DRMAA_python.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/DRMAA_python.py Tue Nov 10 13:47:34 2009 -0500
@@ -12,6 +12,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/MySQL_python.py
--- a/scripts/scramble/scripts/MySQL_python.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/MySQL_python.py Tue Nov 10 13:47:34 2009 -0500
@@ -57,6 +57,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
from ez_setup import use_setuptools
use_setuptools( download_delay=8, to_dir=scramble_lib )
from setuptools import *
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/generic.py
--- a/scripts/scramble/scripts/generic.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/generic.py Tue Nov 10 13:47:34 2009 -0500
@@ -7,6 +7,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
from ez_setup import use_setuptools
use_setuptools( download_delay=8, to_dir=scramble_lib )
from setuptools import *
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/pbs_python.py
--- a/scripts/scramble/scripts/pbs_python.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/pbs_python.py Tue Nov 10 13:47:34 2009 -0500
@@ -12,6 +12,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/psycopg2.py
--- a/scripts/scramble/scripts/psycopg2.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/psycopg2.py Tue Nov 10 13:47:34 2009 -0500
@@ -59,6 +59,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/pysqlite.py
--- a/scripts/scramble/scripts/pysqlite.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/pysqlite.py Tue Nov 10 13:47:34 2009 -0500
@@ -21,6 +21,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
diff -r facf315d9e0e -r c146726d274f scripts/scramble/scripts/python_lzo.py
--- a/scripts/scramble/scripts/python_lzo.py Tue Nov 10 13:46:53 2009 -0500
+++ b/scripts/scramble/scripts/python_lzo.py Tue Nov 10 13:47:34 2009 -0500
@@ -54,6 +54,7 @@
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
+import get_platform # fixes fat python 2.5
try:
from setuptools import *
import pkg_resources
diff -r facf315d9e0e -r c146726d274f tools/filters/joiner.xml
--- a/tools/filters/joiner.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/filters/joiner.xml Tue Nov 10 13:47:34 2009 -0500
@@ -52,24 +52,25 @@
<configfiles>
<configfile name="fill_options_file"><%
import simplejson
-%>#set $__fill_options = {}
+%>
+#set $__fill_options = {}
#if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty':
-#set $__fill_options['fill_unjoined_only'] = $fill_empty_columns['fill_columns_by'].value == 'fill_unjoined_only'
-#if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'single_fill_value':
-#set $__start_fill = $fill_empty_columns['do_fill_empty_columns']['fill_value'].value
-#else:
-#set $__start_fill = ""
-#end if
-#set $__fill_options['file1_columns'] = [ $__start_fill for i in range( int( $input1.metadata.columns ) ) ]
-#set $__fill_options['file2_columns'] = [ $__start_fill for i in range( int( $input2.metadata.columns ) ) ]
-#if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'fill_value_by_column':
-#for column_fill1 in $fill_empty_columns['do_fill_empty_columns']['column_fill1']:
-#set $__fill_options['file1_columns'][ int( column_fill1['column_number1'].value ) - 1 ] = column_fill1['fill_value1'].value
-#end for
-#for column_fill2 in $fill_empty_columns['do_fill_empty_columns']['column_fill2']:
-#set $__fill_options['file2_columns'][ int( column_fill2['column_number2'].value ) - 1 ] = column_fill2['fill_value2'].value
-#end for
-#end if
+ #set $__fill_options['fill_unjoined_only'] = $fill_empty_columns['fill_columns_by'].value == 'fill_unjoined_only'
+ #if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'single_fill_value':
+ #set $__start_fill = $fill_empty_columns['do_fill_empty_columns']['fill_value'].value
+ #else:
+ #set $__start_fill = ""
+ #end if
+ #set $__fill_options['file1_columns'] = [ __start_fill for i in range( int( $input1.metadata.columns ) ) ]
+ #set $__fill_options['file2_columns'] = [ __start_fill for i in range( int( $input2.metadata.columns ) ) ]
+ #if $fill_empty_columns['do_fill_empty_columns']['column_fill_type'] == 'fill_value_by_column':
+ #for column_fill1 in $fill_empty_columns['do_fill_empty_columns']['column_fill1']:
+ #set $__fill_options['file1_columns'][ int( column_fill1['column_number1'].value ) - 1 ] = column_fill1['fill_value1'].value
+ #end for
+ #for column_fill2 in $fill_empty_columns['do_fill_empty_columns']['column_fill2']:
+ #set $__fill_options['file2_columns'][ int( column_fill2['column_number2'].value ) - 1 ] = column_fill2['fill_value2'].value
+ #end for
+ #end if
#end if
${simplejson.dumps( __fill_options )}
</configfile>
diff -r facf315d9e0e -r c146726d274f tools/maf/genebed_maf_to_fasta.xml
--- a/tools/maf/genebed_maf_to_fasta.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/maf/genebed_maf_to_fasta.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,8 +1,9 @@
<tool id="GeneBed_Maf_Fasta2" name="Stitch Gene blocks" version="1.0.1">
<description>given a set of coding exon intervals</description>
- <command interpreter="python">#if $maf_source_type.maf_source == "user":#interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
-#else:#interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
-#end if# --overwrite_with_gaps=$overwrite_with_gaps
+ <command interpreter="python">
+ #if $maf_source_type.maf_source == "user" #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+ #else #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --mafSourceType=$maf_source_type.maf_source --geneBED --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+ #end if# --overwrite_with_gaps=$overwrite_with_gaps
</command>
<inputs>
<param name="input1" type="data" format="bed" label="Gene BED File">
diff -r facf315d9e0e -r c146726d274f tools/maf/interval2maf.xml
--- a/tools/maf/interval2maf.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/maf/interval2maf.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,11 +1,11 @@
<tool id="Interval2Maf1" name="Extract MAF blocks" version="1.0.1">
<description>given a set of genomic intervals</description>
<command interpreter="python">
- #if $maf_source_type.maf_source == "user":#interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafFile=$maf_source_type.mafFile --mafIndex=$maf_source_type.mafFile.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
- #else:#interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafType=$maf_source_type.mafType --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
- #end if
- --split_blocks_by_species=$split_blocks_by_species_selector.split_blocks_by_species
- #if $split_blocks_by_species_selector.split_blocks_by_species == "split_blocks_by_species":# --remove_all_gap_columns=$split_blocks_by_species_selector.remove_all_gap_columns
+ #if $maf_source_type.maf_source == "user" #interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafFile=$maf_source_type.mafFile --mafIndex=$maf_source_type.mafFile.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
+ #else #interval2maf.py --dbkey=${input1.dbkey} --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafType=$maf_source_type.mafType --interval_file=$input1 --output_file=$out_file1 --mafIndexFile=${GALAXY_DATA_INDEX_DIR}/maf_index.loc --species=$maf_source_type.species
+ #end if# --split_blocks_by_species=$split_blocks_by_species_selector.split_blocks_by_species
+ #if $split_blocks_by_species_selector.split_blocks_by_species == "split_blocks_by_species"#
+ --remove_all_gap_columns=$split_blocks_by_species_selector.remove_all_gap_columns
#end if
</command>
<inputs>
diff -r facf315d9e0e -r c146726d274f tools/maf/interval_maf_to_merged_fasta.xml
--- a/tools/maf/interval_maf_to_merged_fasta.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/maf/interval_maf_to_merged_fasta.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,8 +1,9 @@
<tool id="Interval_Maf_Merged_Fasta2" name="Stitch MAF blocks" version="1.0.1">
<description>given a set of genomic intervals</description>
- <command interpreter="python">#if $maf_source_type.maf_source == "user":#interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
-#else:#interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
-#end if# --overwrite_with_gaps=$overwrite_with_gaps
+ <command interpreter="python">
+ #if $maf_source_type.maf_source == "user" #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_file --mafIndex=$maf_source_type.maf_file.metadata.maf_index --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+ #else #interval_maf_to_merged_fasta.py --dbkey=$dbkey --species=$maf_source_type.species --mafSource=$maf_source_type.maf_identifier --interval_file=$input1 --output_file=$out_file1 --chromCol=${input1.metadata.chromCol} --startCol=${input1.metadata.startCol} --endCol=${input1.metadata.endCol} --strandCol=${input1.metadata.strandCol} --mafSourceType=$maf_source_type.maf_source --mafIndexFileDir=${GALAXY_DATA_INDEX_DIR}
+ #end if# --overwrite_with_gaps=$overwrite_with_gaps
</command>
<inputs>
<page>
diff -r facf315d9e0e -r c146726d274f tools/maf/maf_to_fasta.xml
--- a/tools/maf/maf_to_fasta.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/maf/maf_to_fasta.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,9 +1,9 @@
<tool id="MAF_To_Fasta1" name="MAF to FASTA" version="1.0.1">
<description>Converts a MAF formated file to FASTA format</description>
<command interpreter="python">
- #if $fasta_target_type.fasta_type == "multiple":#maf_to_fasta_multiple_sets.py $input1 $out_file1 $fasta_target_type.species $fasta_target_type.complete_blocks
- #else:#maf_to_fasta_concat.py $fasta_target_type.species $input1 $out_file1
- #end if
+ #if $fasta_target_type.fasta_type == "multiple" #maf_to_fasta_multiple_sets.py $input1 $out_file1 $fasta_target_type.species $fasta_target_type.complete_blocks
+ #else #maf_to_fasta_concat.py $fasta_target_type.species $input1 $out_file1
+ #end if#
</command>
<inputs>
<param format="maf" name="input1" type="data" label="MAF file to convert"/>
diff -r facf315d9e0e -r c146726d274f tools/metag_tools/blat_wrapper.xml
--- a/tools/metag_tools/blat_wrapper.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/metag_tools/blat_wrapper.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,10 +1,9 @@
<tool id="blat_wrapper" name="BLAT" version="1.0.0">
<description> compare sequencing reads against UCSC genome builds</description>
<command interpreter="python">
- #if $source.source_select=="database":#blat_wrapper.py 0 $source.dbkey $input_query $output1 $iden $tile_size $one_off
- #else:#blat_wrapper.py 1 $source.input_target $input_query $output1 $iden $tile_size $one_off
- #end if
- ${GALAXY_DATA_INDEX_DIR}
+ #if $source.source_select=="database" #blat_wrapper.py 0 $source.dbkey $input_query $output1 $iden $tile_size $one_off
+ #else #blat_wrapper.py 1 $source.input_target $input_query $output1 $iden $tile_size $one_off
+ #end if# ${GALAXY_DATA_INDEX_DIR}
</command>
<inputs>
<conditional name="source">
diff -r facf315d9e0e -r c146726d274f tools/metag_tools/shrimp_color_wrapper.xml
--- a/tools/metag_tools/shrimp_color_wrapper.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/metag_tools/shrimp_color_wrapper.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,9 +1,9 @@
<tool id="shrimp_color_wrapper" name="SHRiMP for Color-space" version="1.0.0">
<description>reads mapping against reference sequence </description>
<command interpreter="python">
- #if $param.skip_or_full=="skip":#shrimp_color_wrapper.py $input_target $input_query $output1
- #else #shrimp_color_wrapper.py $input_target $input_query $output1 $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_crossover_penalty $param.sw_full_hit_threshold $param.sw_vector_hit_threshold
- #end if
+ #if $param.skip_or_full=="skip" #shrimp_color_wrapper.py $input_target $input_query $output1
+ #else #shrimp_color_wrapper.py $input_target $input_query $output1 $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_crossover_penalty $param.sw_full_hit_threshold $param.sw_vector_hit_threshold
+ #end if#
</command>
<inputs>
<page>
diff -r facf315d9e0e -r c146726d274f tools/metag_tools/shrimp_wrapper.xml
--- a/tools/metag_tools/shrimp_wrapper.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/metag_tools/shrimp_wrapper.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,11 +1,11 @@
<tool id="shrimp_wrapper" name="SHRiMP for Letter-space" version="1.0.0">
<description>reads mapping against reference sequence </description>
<command interpreter="python">
- #if ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="skip"):#shrimp_wrapper.py $input_target $output1 $output2 $input_query
- #elif ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="skip"):#shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size
- #elif ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="full"):#shrimp_wrapper.py $input_target $output1 $output2 $input_query $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_hit_threshold
- #elif ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="full"):#shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_hit_threshold
- #end if
+ #if ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="skip") #shrimp_wrapper.py $input_target $output1 $output2 $input_query
+ #elif ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="skip") #shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size
+ #elif ($type_of_reads.single_or_paired=="single" and $param.skip_or_full=="full") #shrimp_wrapper.py $input_target $output1 $output2 $input_query $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_hit_threshold
+ #elif ($type_of_reads.single_or_paired=="paired" and $param.skip_or_full=="full") #shrimp_wrapper.py $input_target $output1 $output2 $type_of_reads.input1,$type_of_reads.input2,$type_of_reads.insertion_size $param.spaced_seed $param.seed_matches_per_window $param.seed_hit_taboo_length $param.seed_generation_taboo_length $param.seed_window_length $param.max_hits_per_read $param.max_read_length $param.kmer $param.sw_match_value $param.sw_mismatch_value $param.sw_gap_open_ref $param.sw_gap_open_query $param.sw_gap_ext_ref $param.sw_gap_ext_query $param.sw_hit_threshold
+ #end if#
</command>
<inputs>
<page>
diff -r facf315d9e0e -r c146726d274f tools/samtools/pileup_parser.xml
--- a/tools/samtools/pileup_parser.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/samtools/pileup_parser.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,10 +1,10 @@
<tool id="pileup_parser" name="Filter pileup">
<description>on coverage and SNPs</description>
<command interpreter="perl">
- #if $pileup_type.type_select == "six": #pileup_parser.pl $input "3" "5" "6" "4" $qv_cutoff $cvrg_cutoff $snps_only $interval "2" $out_file1
- #elif $pileup_type.type_select == "ten": #pileup_parser.pl $input "3" "9" "10" "8" $qv_cutoff $cvrg_cutoff $snps_only $interval "2" $out_file1
- #elif $pileup_type.type_select == "manual": #pileup_parser.pl $input $pileup_type.ref_base_column $pileup_type.read_bases_column $pileup_type.read_qv_column $pileup_type.cvrg_column $qv_cutoff $cvrg_cutoff $snps_only $interval $pileup_type.coord_column $out_file1
- #end if
+ #if $pileup_type.type_select == "six" #pileup_parser.pl $input "3" "5" "6" "4" $qv_cutoff $cvrg_cutoff $snps_only $interval "2" $out_file1
+ #elif $pileup_type.type_select == "ten" #pileup_parser.pl $input "3" "9" "10" "8" $qv_cutoff $cvrg_cutoff $snps_only $interval "2" $out_file1
+ #elif $pileup_type.type_select == "manual" #pileup_parser.pl $input $pileup_type.ref_base_column $pileup_type.read_bases_column $pileup_type.read_qv_column $pileup_type.cvrg_column $qv_cutoff $cvrg_cutoff $snps_only $interval $pileup_type.coord_column $out_file1
+ #end if#
</command>
<inputs>
<param name="input" type="data" format="tabular" label="Select dataset"/>
diff -r facf315d9e0e -r c146726d274f tools/sr_mapping/lastz_wrapper.xml
--- a/tools/sr_mapping/lastz_wrapper.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/sr_mapping/lastz_wrapper.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,15 +1,15 @@
<tool id="lastz_wrapper_1" name="Lastz" version="1.0.0">
<description> map short reads against reference sequence</description>
<command>
- #if ($params.source_select=="pre_set" and $seq_name.how_to_name=="No" and $out_format.value=="diffs"):#lastz $input1 ${input2}[fullnames] --${params.pre_set_options} --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="Yes" and $out_format.value=="diffs"):#lastz $seq_name.ref_name::$input1 ${input2}[fullnames] --${params.pre_set_options} --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="full" and $seq_name.how_to_name=="No" and $out_format.value=="diffs"):#lastz $input1 ${input2}[fullnames] $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="full" and $seq_name.how_to_name=="Yes" and $out_format.value=="diffs"):#lastz $seq_name.ref_name::$input1 ${input2}[fullnames] $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="No" and $out_format.value=="maf"):#lastz $input1 read::${input2} --${params.pre_set_options} --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="Yes" and $out_format.value=="maf"):#lastz $seq_name.ref_name::$input1 read::${input2} --${params.pre_set_options} --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="full" and $seq_name.how_to_name=="No" and $out_format.value=="maf"):#lastz $input1 read::${input2} $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #elif ($params.source_select=="full" and $seq_name.how_to_name=="Yes" and $out_format.value=="maf"):#lastz $seq_name.ref_name::$input1 read::${input2} $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
- #end if
+ #if ($params.source_select=="pre_set" and $seq_name.how_to_name=="No" and $out_format.value=="diffs") #lastz $input1 ${input2}[fullnames] --${params.pre_set_options} --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="Yes" and $out_format.value=="diffs") #lastz $seq_name.ref_name::$input1 ${input2}[fullnames] --${params.pre_set_options} --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="full" and $seq_name.how_to_name=="No" and $out_format.value=="diffs") #lastz $input1 ${input2}[fullnames] $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="full" and $seq_name.how_to_name=="Yes" and $out_format.value=="diffs") #lastz $seq_name.ref_name::$input1 ${input2}[fullnames] $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --nolaj --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="No" and $out_format.value=="maf") #lastz $input1 read::${input2} --${params.pre_set_options} --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="pre_set" and $seq_name.how_to_name=="Yes" and $out_format.value=="maf") #lastz $seq_name.ref_name::$input1 read::${input2} --${params.pre_set_options} --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="full" and $seq_name.how_to_name=="No" and $out_format.value=="maf") #lastz $input1 read::${input2} $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #elif ($params.source_select=="full" and $seq_name.how_to_name=="Yes" and $out_format.value=="maf") #lastz $seq_name.ref_name::$input1 read::${input2} $params.strand $params.seed $params.transition O=$params.O E=$params.E X=$params.X Y=$params.Y K=$params.K L=$params.L $params.entropy --ambiguousn --identity=${min_ident}..${max_ident} --census32=$output2 --coverage=$min_cvrg --format=$out_format > $output1
+ #end if#
</command>
<inputs>
<param name="input2" format="fasta" type="data" label="Align sequencing reads" />
diff -r facf315d9e0e -r c146726d274f tools/stats/aggregate_binned_scores_in_intervals.xml
--- a/tools/stats/aggregate_binned_scores_in_intervals.xml Tue Nov 10 13:46:53 2009 -0500
+++ b/tools/stats/aggregate_binned_scores_in_intervals.xml Tue Nov 10 13:47:34 2009 -0500
@@ -1,9 +1,9 @@
<tool id="aggregate_scores_in_intervals2" description="such as phastCons, GERP, binCons, and others for a set of genomic intervals" name="Aggregate datapoints" version="1.1.2">
<description>Appends the average, min, max of datapoints per interval</description>
<command interpreter="python">
- #if $score_source_type.score_source == "user":#aggregate_scores_in_intervals.py $score_source_type.input2 $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 --chrom_buffer=3
- #else:#aggregate_scores_in_intervals.py $score_source_type.datasets $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 -b
- #end if
+ #if $score_source_type.score_source == "user" #aggregate_scores_in_intervals.py $score_source_type.input2 $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 --chrom_buffer=3
+ #else #aggregate_scores_in_intervals.py $score_source_type.datasets $input1 ${input1.metadata.chromCol} ${input1.metadata.startCol} ${input1.metadata.endCol} $out_file1 -b
+ #end if#
</command>
<inputs>
<param format="interval" name="input1" type="data" label="Interval file">
1
0
12 Nov '09
details: http://www.bx.psu.edu/hg/galaxy/rev/d5fd27771019
changeset: 3003:d5fd27771019
user: Nate Coraor <nate(a)bx.psu.edu>
date: Tue Nov 10 15:04:03 2009 -0500
description:
Remove the "unknown extension in data factory" messages
diffstat:
lib/galaxy/datatypes/registry.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 5a9b6c39d173 -r d5fd27771019 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Tue Nov 10 13:49:34 2009 -0500
+++ b/lib/galaxy/datatypes/registry.py Tue Nov 10 15:04:03 2009 -0500
@@ -219,8 +219,6 @@
builder = self.datatypes_by_extension[ext]
except KeyError:
builder = data.Text()
- if ext is not None:
- self.log.warning('unknown extension in data factory %s', ext)
return builder
def change_datatype(self, data, ext ):
1
0