galaxy-commits
Threads by month
- ----- 2025 -----
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
August 2011
- 1 participants
- 106 discussions
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/f018b1450140/
changeset: f018b1450140
user: natefoo
date: 2011-08-04 16:39:16
summary: User/group disk quotas.
affected #: 26 files (42.9 KB)
--- a/lib/galaxy/app.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/app.py Thu Aug 04 10:39:16 2011 -0400
@@ -7,6 +7,7 @@
import galaxy.model
import galaxy.datatypes.registry
import galaxy.security
+import galaxy.quota
from galaxy.tags.tag_handler import GalaxyTagHandler
from galaxy.tools.imp_exp import load_history_imp_exp_tools
from galaxy.sample_tracking import external_service_types
@@ -57,6 +58,11 @@
#Load security policy
self.security_agent = self.model.security_agent
self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
+ # Load quota management
+ if self.config.enable_quotas:
+ self.quota_agent = galaxy.quota.QuotaAgent( self.model )
+ else:
+ self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
# Heartbeat and memdump for thread / heap profiling
self.heartbeat = None
self.memdump = None
--- a/lib/galaxy/config.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/config.py Thu Aug 04 10:39:16 2011 -0400
@@ -45,6 +45,7 @@
# web API
self.enable_api = string_as_bool( kwargs.get( 'enable_api', False ) )
self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
+ self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
self.len_file_path = kwargs.get( "len_file_path", resolve_path(os.path.join(self.tool_data_path, 'shared','ucsc','chrom'), self.root) )
--- a/lib/galaxy/jobs/__init__.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 04 10:39:16 2011 -0400
@@ -203,7 +203,7 @@
elif job_state == JOB_DELETED:
log.info( "job %d deleted by user while still queued" % job.id )
elif job_state == JOB_ADMIN_DELETED:
- job.info( "job %d deleted by admin while still queued" % job.id )
+ log.info( "job %d deleted by admin while still queued" % job.id )
else:
log.error( "unknown job state '%s' for job %d" % ( job_state, job.id ) )
if not self.track_jobs_in_database:
@@ -229,6 +229,15 @@
return JOB_DELETED
elif job.state == model.Job.states.ERROR:
return JOB_ADMIN_DELETED
+ elif self.app.config.enable_quotas:
+ quota = self.app.quota_agent.get_quota( job.user )
+ if quota is not None:
+ try:
+ usage = self.app.quota_agent.get_usage( user=job.user, history=job.history )
+ if usage > quota:
+ return JOB_WAIT
+ except AssertionError, e:
+ pass # No history, should not happen with an anon user
for dataset_assoc in job.input_datasets + job.input_library_datasets:
idata = dataset_assoc.dataset
if not idata:
--- a/lib/galaxy/model/__init__.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/model/__init__.py Thu Aug 04 10:39:16 2011 -0400
@@ -483,6 +483,53 @@
self.type = type
self.deleted = deleted
+class UserQuotaAssociation( object ):
+ def __init__( self, user, quota ):
+ self.user = user
+ self.quota = quota
+
+class GroupQuotaAssociation( object ):
+ def __init__( self, group, quota ):
+ self.group = group
+ self.quota = quota
+
+class Quota( object ):
+ valid_operations = ( '+', '-', '=' )
+ def __init__( self, name="", description="", amount=0, operation="=" ):
+ self.name = name
+ self.description = description
+ if amount is None:
+ self.bytes = -1
+ else:
+ self.bytes = amount
+ self.operation = operation
+ def get_amount( self ):
+ if self.bytes == -1:
+ return None
+ return self.bytes
+ def set_amount( self, amount ):
+ if amount is None:
+ self.bytes = -1
+ else:
+ self.bytes = amount
+ amount = property( get_amount, set_amount )
+ @property
+ def display_amount( self ):
+ if self.bytes == -1:
+ return "unlimited"
+ else:
+ return util.nice_size( self.bytes )
+
+class DefaultQuotaAssociation( Quota ):
+ types = Bunch(
+ UNREGISTERED = 'unregistered',
+ REGISTERED = 'registered'
+ )
+ def __init__( self, type, quota ):
+ assert type in self.types.__dict__.values(), 'Invalid type'
+ self.type = type
+ self.quota = quota
+
class DatasetPermissions( object ):
def __init__( self, action, dataset, role ):
self.action = action
--- a/lib/galaxy/model/mapping.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/model/mapping.py Thu Aug 04 10:39:16 2011 -0400
@@ -197,6 +197,37 @@
Column( "type", String( 40 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
+UserQuotaAssociation.table = Table( "user_quota_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+ Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+GroupQuotaAssociation.table = Table( "group_quota_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "group_id", Integer, ForeignKey( "galaxy_group.id" ), index=True ),
+ Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ) )
+
+Quota.table = Table( "quota", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "name", String( 255 ), index=True, unique=True ),
+ Column( "description", TEXT ),
+ Column( "bytes", Integer ),
+ Column( "operation", String( 8 ) ),
+ Column( "deleted", Boolean, index=True, default=False ) )
+
+DefaultQuotaAssociation.table = Table( "default_quota_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "type", String( 32 ), index=True, unique=True ),
+ Column( "quota_id", Integer, ForeignKey( "quota.id" ), index=True ) )
+
DatasetPermissions.table = Table( "dataset_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -1251,6 +1282,21 @@
)
)
+assign_mapper( context, Quota, Quota.table,
+ properties=dict( users=relation( UserQuotaAssociation ),
+ groups=relation( GroupQuotaAssociation ) ) )
+
+assign_mapper( context, UserQuotaAssociation, UserQuotaAssociation.table,
+ properties=dict( user=relation( User, backref="quotas" ),
+ quota=relation( Quota ) ) )
+
+assign_mapper( context, GroupQuotaAssociation, GroupQuotaAssociation.table,
+ properties=dict( group=relation( Group, backref="quotas" ),
+ quota=relation( Quota ) ) )
+
+assign_mapper( context, DefaultQuotaAssociation, DefaultQuotaAssociation.table,
+ properties=dict( quota=relation( Quota, backref="default" ) ) )
+
assign_mapper( context, DatasetPermissions, DatasetPermissions.table,
properties=dict(
dataset=relation( Dataset, backref="actions" ),
--- a/lib/galaxy/util/__init__.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/util/__init__.py Thu Aug 04 10:39:16 2011 -0400
@@ -542,6 +542,32 @@
return "%.1f %s" % (size, word)
return '??? bytes'
+def size_to_bytes( size ):
+ """
+ Returns a number of bytes if given a reasably formatted string with the size
+ """
+ # Assume input in bytes if we can convert directly to an int
+ try:
+ return int( size )
+ except:
+ pass
+ # Otherwise it must have non-numeric characters
+ size_re = re.compile( '([\d\.]+)\s*([tgmk]b?|b|bytes?)$' )
+ size_match = re.match( size_re, size.lower() )
+ assert size_match is not None
+ size = float( size_match.group(1) )
+ multiple = size_match.group(2)
+ if multiple.startswith( 't' ):
+ return int( size * 1024**4 )
+ elif multiple.startswith( 'g' ):
+ return int( size * 1024**3 )
+ elif multiple.startswith( 'm' ):
+ return int( size * 1024**2 )
+ elif multiple.startswith( 'k' ):
+ return int( size * 1024 )
+ elif multiple.startswith( 'b' ):
+ return int( size )
+
def send_mail( frm, to, subject, body, config ):
"""
Sends an email.
--- a/lib/galaxy/web/base/controller.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/web/base/controller.py Thu Aug 04 10:39:16 2011 -0400
@@ -1111,6 +1111,7 @@
user_list_grid = None
role_list_grid = None
group_list_grid = None
+ quota_list_grid = None
@web.expose
@web.require_admin
@@ -1518,6 +1519,481 @@
message=util.sanitize_text( message ),
status='done' ) )
+ # Galaxy Quota Stuff
+ @web.expose
+ @web.require_admin
+ def quotas( self, trans, **kwargs ):
+ if 'operation' in kwargs:
+ operation = kwargs['operation'].lower()
+ if operation == "quotas":
+ return self.quota( trans, **kwargs )
+ if operation == "create":
+ return self.create_quota( trans, **kwargs )
+ if operation == "delete":
+ return self.mark_quota_deleted( trans, **kwargs )
+ if operation == "undelete":
+ return self.undelete_quota( trans, **kwargs )
+ if operation == "purge":
+ return self.purge_quota( trans, **kwargs )
+ if operation == "change amount":
+ return self.edit_quota( trans, **kwargs )
+ if operation == "manage users and groups":
+ return self.manage_users_and_groups_for_quota( trans, **kwargs )
+ if operation == "rename":
+ return self.rename_quota( trans, **kwargs )
+ if operation == "edit":
+ return self.edit_quota( trans, **kwargs )
+ # Render the list view
+ return self.quota_list_grid( trans, **kwargs )
+
+ @web.expose
+ @web.require_admin
+ def create_quota( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ name = util.restore_text( params.get( 'name', '' ) )
+ description = util.restore_text( params.get( 'description', '' ) )
+ amount = util.restore_text( params.get( 'amount', '' ).strip() )
+ if amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+ create_amount = None
+ else:
+ try:
+ create_amount = util.size_to_bytes( amount )
+ except AssertionError:
+ create_amount = False
+ operation = params.get( 'operation', '' )
+ default = params.get( 'default', 'no' )
+ in_users = util.listify( params.get( 'in_users', [] ) )
+ out_users = util.listify( params.get( 'out_users', [] ) )
+ in_groups = util.listify( params.get( 'in_groups', [] ) )
+ out_groups = util.listify( params.get( 'out_groups', [] ) )
+ if params.get( 'create_quota_button', False ):
+ if not name or not description:
+ message = "Enter a valid name and a description."
+ status = 'error'
+ elif trans.sa_session.query( trans.app.model.Quota ).filter( trans.app.model.Quota.table.c.name==name ).first():
+ message = "Quota names must be unique and a quota with that name already exists, so choose another name."
+ status = 'error'
+ elif not params.get( 'amount', None ):
+ message = "Enter a valid quota amount."
+ status = 'error'
+ elif create_amount is False:
+ message = "Unable to parse the provided amount."
+ status = 'error'
+ elif operation not in trans.app.model.Quota.valid_operations:
+ message = "Enter a valid operation."
+ status = 'error'
+ elif default != 'no' and default not in trans.app.model.DefaultQuotaAssociation.types.__dict__.values():
+ message = "Enter a valid default type."
+ status = 'error'
+ elif default != 'no' and operation != '=':
+ message = "Operation for a default quota must be '='."
+ status = 'error'
+ operation = '='
+ else:
+ # Create the quota
+ quota = trans.app.model.Quota( name=name, description=description, amount=create_amount, operation=operation )
+ trans.sa_session.add( quota )
+ # If this is a default quota, create the DefaultQuotaAssociation
+ if default != 'no':
+ trans.app.quota_agent.set_default_quota( default, quota )
+ else:
+ # Create the UserQuotaAssociations
+ for user in [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in in_users ]:
+ uqa = trans.app.model.UserQuotaAssociation( user, quota )
+ trans.sa_session.add( uqa )
+ # Create the GroupQuotaAssociations
+ for group in [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in in_groups ]:
+ gqa = trans.app.model.GroupQuotaAssociation( group, quota )
+ trans.sa_session.add( gqa )
+ trans.sa_session.flush()
+ message = "Quota '%s' has been created with %d associated users and %d associated groups." % ( quota.name, len( in_users ), len( in_groups ) )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ in_users = map( int, in_users )
+ in_groups = map( int, in_groups )
+ new_in_users = []
+ new_in_groups = []
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ if user.id in in_users:
+ new_in_users.append( ( user.id, user.email ) )
+ else:
+ out_users.append( ( user.id, user.email ) )
+ for group in trans.sa_session.query( trans.app.model.Group ) \
+ .filter( trans.app.model.Group.table.c.deleted==False ) \
+ .order_by( trans.app.model.Group.table.c.name ):
+ if group.id in in_groups:
+ new_in_groups.append( ( group.id, group.name ) )
+ else:
+ out_groups.append( ( group.id, group.name ) )
+ return trans.fill_template( '/admin/quota/quota_create.mako',
+ webapp=webapp,
+ name=name,
+ description=description,
+ amount=amount,
+ operation=operation,
+ default=default,
+ in_users=new_in_users,
+ out_users=out_users,
+ in_groups=new_in_groups,
+ out_groups=out_groups,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def rename_quota( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ error = True
+ try:
+ assert id, 'No quota ids received for renaming'
+ quota = get_quota( trans, id )
+ assert quota, 'Quota id (%s) is invalid' % id
+ assert quota.id != 1, 'The default quota cannot be renamed'
+ error = False
+ except AssertionError, e:
+ message = str( e )
+ if error:
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ if params.get( 'rename_quota_button', False ):
+ old_name = quota.name
+ new_name = util.restore_text( params.name )
+ new_description = util.restore_text( params.description )
+ if not new_name:
+ message = 'Enter a valid name'
+ status='error'
+ elif trans.sa_session.query( trans.app.model.Quota ).filter( trans.app.model.Quota.table.c.name==new_name ).first():
+ message = 'A quota with that name already exists'
+ status = 'error'
+ else:
+ quota.name = new_name
+ quota.description = new_description
+ trans.sa_session.add( quota )
+ trans.sa_session.flush()
+ message = "Quota '%s' has been renamed to '%s'" % ( old_name, new_name )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ return trans.fill_template( '/admin/quota/quota_rename.mako',
+ quota=quota,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def manage_users_and_groups_for_quota( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ error = True
+ try:
+ assert id, 'No quota ids received for managing users and groups'
+ quota = get_quota( trans, id )
+ assert quota, 'Quota id (%s) is invalid' % id
+ assert not quota.default, 'Default quotas cannot be associated with specific users and groups'
+ error = False
+ except AssertionError, e:
+ message = str( e )
+ if error:
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ if params.get( 'quota_members_edit_button', False ):
+ in_users = [ trans.sa_session.query( trans.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
+ in_groups = [ trans.sa_session.query( trans.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
+ trans.app.quota_agent.set_entity_quota_associations( quotas=[ quota ], users=in_users, groups=in_groups )
+ trans.sa_session.refresh( quota )
+ message = "Quota '%s' has been updated with %d associated users and %d associated groups" % ( quota.name, len( in_users ), len( in_groups ) )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status=status ) )
+ in_users = []
+ out_users = []
+ in_groups = []
+ out_groups = []
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted==False ) \
+ .order_by( trans.app.model.User.table.c.email ):
+ if user in [ x.user for x in quota.users ]:
+ in_users.append( ( user.id, user.email ) )
+ else:
+ out_users.append( ( user.id, user.email ) )
+ for group in trans.sa_session.query( trans.app.model.Group ) \
+ .filter( trans.app.model.Group.table.c.deleted==False ) \
+ .order_by( trans.app.model.Group.table.c.name ):
+ if group in [ x.group for x in quota.groups ]:
+ in_groups.append( ( group.id, group.name ) )
+ else:
+ out_groups.append( ( group.id, group.name ) )
+ return trans.fill_template( '/admin/quota/quota.mako',
+ quota=quota,
+ in_users=in_users,
+ out_users=out_users,
+ in_groups=in_groups,
+ out_groups=out_groups,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def edit_quota( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No quota ids received for editing"
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ quota = get_quota( trans, id )
+ if params.get( 'edit_quota_button', False ):
+ amount = util.restore_text( params.get( 'amount', '' ).strip() )
+ if amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+ new_amount = None
+ else:
+ try:
+ new_amount = util.size_to_bytes( amount )
+ except AssertionError:
+ new_amount = False
+ operation = params.get( 'operation', None )
+ if not params.get( 'amount', None ):
+ message = 'Enter a valid amount'
+ status='error'
+ elif new_amount is False:
+ message = 'Unable to parse the provided amount'
+ status = 'error'
+ elif operation not in trans.app.model.Quota.valid_operations:
+ message = 'Enter a valid operation'
+ status = 'error'
+ else:
+ quota.amount = new_amount
+ quota.operation = operation
+ trans.sa_session.add( quota )
+ trans.sa_session.flush()
+ message = "Quota '%s' is now '%s'" % ( quota.name, quota.operation + quota.display_amount )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+
+ return trans.fill_template( '/admin/quota/quota_edit.mako',
+ quota=quota,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def set_quota_default( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ default = params.get( 'default', '' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No quota ids received for managing defaults"
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ quota = get_quota( trans, id )
+ if params.get( 'set_default_quota_button', False ):
+ if default != 'no' and default not in trans.app.model.DefaultQuotaAssociation.types.__dict__.values():
+ message = "Enter a valid default type."
+ status = 'error'
+ else:
+ if default != 'no':
+ trans.app.quota_agent.set_default_quota( default, quota )
+ message = "Quota '%s' is now the default for %s users" % ( quota.name, default )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ if not default:
+ default = 'no'
+ return trans.fill_template( '/admin/quota/quota_set_default.mako',
+ quota=quota,
+ webapp=webapp,
+ default=default,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
+ def unset_quota_default( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ default = params.get( 'default', '' )
+ id = params.get( 'id', None )
+ if not id:
+ message = "No quota ids received for managing defaults"
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ quota = get_quota( trans, id )
+ if not quota.default:
+ message = "Quota '%s' is not a default." % quota.name
+ status = 'error'
+ else:
+ message = "Quota '%s' is no longer the default for %s users." % ( quota.name, quota.default[0].type )
+ status = 'done'
+ for dqa in quota.default:
+ trans.sa_session.delete( dqa )
+ trans.sa_session.flush()
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status=status ) )
+
+ @web.expose
+ @web.require_admin
+ def mark_quota_deleted( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ ids = util.listify( id )
+ error = True
+ quotas = []
+ try:
+ assert id, 'No quota ids received for deleting'
+ for quota_id in ids:
+ quota = get_quota( trans, quota_id )
+ assert quota, 'Quota id (%s) is invalid' % id
+ assert not quota.default, "Quota '%s' is a default, please unset it as a default before deleting it" % ( quota.name )
+ quotas.append( quota )
+ error = False
+ except AssertionError, e:
+ message = str( e )
+ if error:
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ message = "Deleted %d quotas: " % len( ids )
+ for quota in quotas:
+ quota.deleted = True
+ trans.sa_session.add( quota )
+ message += " %s " % quota.name
+ trans.sa_session.flush()
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def undelete_quota( self, trans, **kwd ):
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ ids = util.listify( id )
+ error = True
+ quotas = []
+ try:
+ assert id, 'No quota ids received for undeleting'
+ for quota_id in ids:
+ quota = get_quota( trans, quota_id )
+ assert quota, 'Quota id (%s) is invalid' % id
+ assert quota.deleted, "Quota '%s' has not been deleted, so it cannot be undeleted." % quota.name
+ quotas.append( quota )
+ error = False
+ except AssertionError, e:
+ message = str( e )
+ if error:
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ message = "Undeleted %d quotas: " % len( ids )
+ for quota in quotas:
+ quota.deleted = False
+ trans.sa_session.add( quota )
+ trans.sa_session.flush()
+ message += " %s " % quota.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
+ @web.expose
+ @web.require_admin
+ def purge_quota( self, trans, **kwd ):
+ # This method should only be called for a Quota that has previously been deleted.
+ # Purging a deleted Quota deletes all of the following from the database:
+ # - UserQuotaAssociations where quota_id == Quota.id
+ # - GroupQuotaAssociations where quota_id == Quota.id
+ params = util.Params( kwd )
+ webapp = params.get( 'webapp', 'galaxy' )
+ id = kwd.get( 'id', None )
+ ids = util.listify( id )
+ error = True
+ quotas = []
+ try:
+ assert id, 'No quota ids received for undeleting'
+ for quota_id in ids:
+ quota = get_quota( trans, quota_id )
+ assert quota, 'Quota id (%s) is invalid' % id
+ assert quota.deleted, "Quota '%s' has not been deleted, so it cannot be purged." % quota.name
+ quotas.append( quota )
+ error = False
+ except AssertionError, e:
+ message = str( e )
+ if error:
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=message,
+ status='error' ) )
+ message = "Purged %d quotas: " % len( ids )
+ for quota in quotas:
+ # Delete UserQuotaAssociations
+ for uqa in quota.users:
+ trans.sa_session.delete( uqa )
+ # Delete GroupQuotaAssociations
+ for gqa in quota.groups:
+ trans.sa_session.delete( gqa )
+ trans.sa_session.flush()
+ message += " %s " % quota.name
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action='quotas',
+ webapp=webapp,
+ message=util.sanitize_text( message ),
+ status='done' ) )
# Galaxy Group Stuff
@web.expose
@web.require_admin
@@ -2235,3 +2711,9 @@
if not group:
return trans.show_error_message( "Group not found for id (%s)" % str( id ) )
return group
+def get_quota( trans, id ):
+ """Get a Quota from the database by id."""
+ # Load user from database
+ id = trans.security.decode_id( id )
+ quota = trans.sa_session.query( trans.model.Quota ).get( id )
+ return quota
--- a/lib/galaxy/web/controllers/admin.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/web/controllers/admin.py Thu Aug 04 10:39:16 2011 -0400
@@ -268,8 +268,121 @@
preserve_state = False
use_paging = True
+class QuotaListGrid( grids.Grid ):
+ class NameColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, quota ):
+ return quota.name
+ class DescriptionColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, quota ):
+ if quota.description:
+ return quota.description
+ return ''
+ class AmountColumn( grids.TextColumn ):
+ def get_value( self, trans, grid, quota ):
+ return quota.operation + quota.display_amount
+ class StatusColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, quota ):
+ if quota.deleted:
+ return "deleted"
+ elif quota.default:
+ return "<strong>default for %s users</strong>" % quota.default[0].type
+ return ""
+ class UsersColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, quota ):
+ if quota.users:
+ return len( quota.users )
+ return 0
+ class GroupsColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, quota ):
+ if quota.groups:
+ return len( quota.groups )
+ return 0
+
+ # Grid definition
+ webapp = "galaxy"
+ title = "Quotas"
+ model_class = model.Quota
+ template='/admin/quota/grid.mako'
+ default_sort_key = "name"
+ columns = [
+ NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="Manage users and groups", id=item.id, webapp="galaxy" ) if not item.default else dict( operation="Change amount", id=item.id, webapp="galaxy" ) ),
+ model_class=model.Quota,
+ attach_popup=True,
+ filterable="advanced" ),
+ DescriptionColumn( "Description",
+ key='description',
+ model_class=model.Quota,
+ attach_popup=False,
+ filterable="advanced" ),
+ AmountColumn( "Amount",
+ key='amount',
+ model_class=model.Quota,
+ attach_popup=False,
+ filterable="advanced" ),
+ UsersColumn( "Users", attach_popup=False ),
+ GroupsColumn( "Groups", attach_popup=False ),
+ StatusColumn( "Status", attach_popup=False ),
+ # Columns that are valid for filtering but are not visible.
+ grids.DeletedColumn( "Deleted", key="deleted", visible=False, filterable="advanced" )
+ ]
+ columns.append( grids.MulticolFilterColumn( "Search",
+ cols_to_filter=[ columns[0], columns[1], columns[2] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
+ global_actions = [
+ grids.GridAction( "Add new quota", dict( controller='admin', action='quotas', operation='create' ) )
+ ]
+ operations = [ grids.GridOperation( "Rename",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="rename_quota" ) ),
+ grids.GridOperation( "Change amount",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="edit_quota" ) ),
+ grids.GridOperation( "Manage users and groups",
+ condition=( lambda item: not item.default ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="manage_users_and_groups_for_quota" ) ),
+ grids.GridOperation( "Set as different type of default",
+ condition=( lambda item: item.default ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="set_quota_default" ) ),
+ grids.GridOperation( "Set as default",
+ condition=( lambda item: not item.default ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="set_quota_default" ) ),
+ grids.GridOperation( "Unset as default",
+ condition=( lambda item: item.default ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="unset_quota_default" ) ),
+ grids.GridOperation( "Delete",
+ condition=( lambda item: not item.deleted and not item.default ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="mark_quota_deleted" ) ),
+ grids.GridOperation( "Undelete",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="undelete_quota" ) ),
+ grids.GridOperation( "Purge",
+ condition=( lambda item: item.deleted ),
+ allow_multiple=True,
+ url_args=dict( webapp="galaxy", action="purge_quota" ) ) ]
+ standard_filters = [
+ grids.GridColumnFilter( "Active", args=dict( deleted=False ) ),
+ grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
+ grids.GridColumnFilter( "All", args=dict( deleted='All' ) )
+ ]
+ num_rows_per_page = 50
+ preserve_state = False
+ use_paging = True
+
class AdminGalaxy( BaseController, Admin ):
user_list_grid = UserListGrid()
role_list_grid = RoleListGrid()
group_list_grid = GroupListGrid()
+ quota_list_grid = QuotaListGrid()
--- a/lib/galaxy/web/controllers/dataset.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/web/controllers/dataset.py Thu Aug 04 10:39:16 2011 -0400
@@ -936,7 +936,7 @@
assert topmost_parent in history.datasets, "Data does not belong to current history"
# If the user is anonymous, make sure the HDA is owned by the current session.
if not user:
- assert trans.galaxy_session.id in [ s.id for s in hda.history.galaxy_sessions ], 'Invalid history dataset ID'
+ assert trans.galaxy_session.current_history_id == trans.history.id, 'Invalid history dataset ID'
# If the user is known, make sure the HDA is owned by the current user.
else:
assert topmost_parent.history.user == trans.user, 'Invalid history dataset ID'
--- a/lib/galaxy/web/controllers/root.py Thu Aug 04 16:55:13 2011 +1000
+++ b/lib/galaxy/web/controllers/root.py Thu Aug 04 10:39:16 2011 -0400
@@ -126,6 +126,7 @@
hda_id = hda_id,
show_deleted = show_deleted,
show_hidden=show_hidden,
+ over_quota=trans.app.quota_agent.get_percent( trans=trans ) >= 100,
message=message,
status=status )
@@ -192,7 +193,25 @@
@web.json
def history_get_disk_size( self, trans ):
- return trans.history.get_disk_size( nice_size=True )
+ rval = { 'history' : trans.history.get_disk_size( nice_size=True ) }
+ for k, v in self.__user_get_usage( trans ).items():
+ rval['global_' + k] = v
+ return rval
+
+ @web.json
+ def user_get_usage( self, trans ):
+ return self.__user_get_usage( trans )
+
+ def __user_get_usage( self, trans ):
+ usage = trans.app.quota_agent.get_usage( trans )
+ percent = trans.app.quota_agent.get_percent( trans=trans, usage=usage )
+ rval = {}
+ if percent is None:
+ rval['usage'] = util.nice_size( usage )
+ else:
+ rval['percent'] = percent
+ return rval
+
## ---- Dataset display / editing ----------------------------------------
--- a/static/june_2007_style/blue/panel_layout.css Thu Aug 04 16:55:13 2011 +1000
+++ b/static/june_2007_style/blue/panel_layout.css Thu Aug 04 10:39:16 2011 -0400
@@ -36,6 +36,12 @@
#masthead a{color:#eeeeee;text-decoration:none;}
#masthead .title{font-family:verdana;padding:3px 10px;font-size:175%;font-weight:bold;z-index:-1;}
#masthead a:hover{text-decoration:underline;}
+.quota-meter-container{position:absolute;top:0;right:0;height:32px;}
+.quota-meter{position:absolute;top:8px;right:8px;height:16px;width:100px;background-color:#C1C9E5;;}
+.quota-meter-bar{position:absolute;top:0;left:0;height:16px;background-color:#969DB3;;}
+.quota-meter-bar-warn{background-color:#FFB400;;}
+.quota-meter-bar-error{background-color:#FF4343;;}
+.quota-meter-text{position:absolute;top:50%;left:0;width:100px;height:16px;margin-top:-6px;text-align:center;z-index:9001;color:#000;;}
.tab-group{margin:0;padding:0 10px;height:100%;white-space:nowrap;cursor:default;background:transparent;}
.tab-group .tab{background:#2C3143;position:relative;float:left;margin:0;padding:0 1em;height:32px;line-height:32px;text-align:left;}
.tab-group .tab .submenu{display:none;position:absolute;z-index:16000;left:0;top:32px;padding:1em;margin:-1em;padding-top:0;margin-top:0;background-color:rgba(0,0,0,0.5);-moz-border-radius:0 0 1em 1em;-webkit-border-bottom-right-radius:1em;-webkit-border-bottom-left-radius:1em;}
--- a/static/june_2007_style/blue_colors.ini Thu Aug 04 16:55:13 2011 +1000
+++ b/static/june_2007_style/blue_colors.ini Thu Aug 04 10:39:16 2011 -0400
@@ -59,6 +59,12 @@
masthead_bg_hatch=-
masthead_link=#eeeeee
masthead_active_tab_bg=#222532
+# Quota meter
+quota_meter_bg=#C1C9E5;
+quota_meter_bar=#969DB3;
+quota_meter_warn_bar=#FFB400;
+quota_meter_error_bar=#FF4343;
+quota_meter_text=#000;
# ---- Layout -----------------------------------------------------------------
# Overall background color (including space between panels)
layout_bg=#eee
--- a/static/june_2007_style/masthead.css.tmpl Thu Aug 04 16:55:13 2011 +1000
+++ b/static/june_2007_style/masthead.css.tmpl Thu Aug 04 10:39:16 2011 -0400
@@ -59,4 +59,4 @@
margin-left: -3px; margin-right: -3px;
padding-bottom: 10px;
margin-bottom: -10px;
-}
\ No newline at end of file
+}
--- a/static/june_2007_style/panel_layout.css.tmpl Thu Aug 04 16:55:13 2011 +1000
+++ b/static/june_2007_style/panel_layout.css.tmpl Thu Aug 04 10:39:16 2011 -0400
@@ -259,6 +259,56 @@
text-decoration: underline;
}
+.quota-meter-container
+{
+ position: absolute;
+ top: 0;
+ right: 0;
+ height: 32px;
+}
+
+.quota-meter
+{
+ position: absolute;
+ top: 8px;
+ right: 8px;
+ height: 16px;
+ width: 100px;
+ background-color: $quota_meter_bg;
+}
+
+.quota-meter-bar
+{
+ position: absolute;
+ top: 0;
+ left: 0;
+ height: 16px;
+ background-color: $quota_meter_bar;
+}
+
+.quota-meter-bar-warn
+{
+ background-color: $quota_meter_warn_bar;
+}
+
+.quota-meter-bar-error
+{
+ background-color: $quota_meter_error_bar;
+}
+
+.quota-meter-text
+{
+ position: absolute;
+ top: 50%;
+ left: 0;
+ width: 100px;
+ height: 16px;
+ margin-top: -6px;
+ text-align: center;
+ z-index: 9001;
+ color: $quota_meter_text;
+}
+
## Tabs
.tab-group {
--- a/templates/root/history.mako Thu Aug 04 16:55:13 2011 +1000
+++ b/templates/root/history.mako Thu Aug 04 10:39:16 2011 -0400
@@ -272,8 +272,41 @@
%if hda_id:
self.location = "#${hda_id}";
%endif
+
+ // Update the Quota Meter
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='root', action='user_get_usage' )}",
+ dataType: "json",
+ success : function ( data ) {
+ $.each( data, function( type, val ) {
+ quota_meter_updater( type, val );
+ });
+ }
+ });
});
+// Updates the Quota Meter
+var quota_meter_updater = function ( type, val ) {
+ if ( type == "usage" ) {
+ $("#quota-meter-bar", window.top.document).css( "width", "0" );
+ $("#quota-meter-text", window.top.document).text( "Using " + val );
+ } else if ( type == "percent" ) {
+ $("#quota-meter-bar", window.top.document).removeClass("quota-meter-bar-warn quota-meter-bar-error");
+ if ( val >= 100 ) {
+ $("#quota-meter-bar", window.top.document).addClass("quota-meter-bar-error");
+ $("#quota-message-container").slideDown();
+ } else if ( val >= 85 ) {
+ $("#quota-meter-bar", window.top.document).addClass("quota-meter-bar-warn");
+ $("#quota-message-container").slideUp();
+ } else {
+ $("#quota-message-container").slideUp();
+ }
+ $("#quota-meter-bar", window.top.document).css( "width", val + "px" );
+ $("#quota-meter-text", window.top.document).text( "Using " + val + "%" );
+ }
+}
+
// Looks for changes in dataset state using an async request. Keeps
// calling itself (via setTimeout) until all datasets are in a terminal
// state.
@@ -334,7 +367,15 @@
url: "${h.url_for( controller='root', action='history_get_disk_size' )}",
dataType: "json",
success: function( data ) {
- $("#history-size").text( data );
+ $.each( data, function( type, val ) {
+ if ( type == "history" ) {
+ $("#history-size").text( val );
+ } else if ( type == "global_usage" ) {
+ quota_meter_updater( "usage", val );
+ } else if ( type == "global_percent" ) {
+ quota_meter_updater( "percent", val );
+ }
+ });
}
});
check_history_size = false;
@@ -471,6 +512,17 @@
%endif
</div>
+%if over_quota:
+<div id="quota-message-container">
+%else:
+<div id="quota-message-container" style="display: none;">
+%endif
+ <div id="quota-message" class="errormessage">
+ You are over your disk quota. Tool execution is on hold until your disk usage drops below your allocated quota.
+ </div>
+ <br/>
+</div>
+
%if not datasets:
<div class="infomessagesmall" id="emptyHistoryMessage">
--- a/templates/user/index.mako Thu Aug 04 16:55:13 2011 +1000
+++ b/templates/user/index.mako Thu Aug 04 10:39:16 2011 -0400
@@ -22,7 +22,12 @@
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp='community' )}">${_('Manage your information')}</a></li>
%endif
</ul>
- <p>You are currently using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.</p>
+ <p>
+ You are using <strong>${trans.user.get_disk_usage( nice_size=True )}</strong> of disk space in this Galaxy instance.
+ %if trans.app.config.enable_quotas:
+ Your disk quota is: <strong>${trans.app.quota_agent.get_quota( trans.user, nice_size=True )}</strong>.
+ %endif
+ </p>
%else:
%if not message:
<p>${n_('You are currently not logged in.')}</p>
--- a/templates/webapps/galaxy/admin/index.mako Thu Aug 04 16:55:13 2011 +1000
+++ b/templates/webapps/galaxy/admin/index.mako Thu Aug 04 10:39:16 2011 -0400
@@ -55,6 +55,7 @@
</div><div class="toolSectionBody"><div class="toolSectionBg">
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='quotas', webapp=webapp )}" target="galaxy_main">Manage quotas</a></div><div class="toolTitle"><a href="${h.url_for( controller='library_admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div></div></div>
--- a/templates/webapps/galaxy/base_panels.mako Thu Aug 04 16:55:13 2011 +1000
+++ b/templates/webapps/galaxy/base_panels.mako Thu Aug 04 10:39:16 2011 -0400
@@ -3,6 +3,11 @@
## Default title
<%def name="title()">Galaxy</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js( "jquery.tipsy" )}
+</%def>
+
## Masthead
<%def name="masthead()">
@@ -172,5 +177,45 @@
%endif
</a></div>
+
+ ## Quota meter
+ <%
+ bar_style = "quota-meter-bar"
+ usage = 0
+ percent = 0
+ quota = None
+ try:
+ usage = trans.app.quota_agent.get_usage( trans=trans )
+ quota = trans.app.quota_agent.get_quota( trans.user )
+ percent = trans.app.quota_agent.get_percent( usage=usage, quota=quota )
+ if percent is not None:
+ if percent >= 100:
+ bar_style += " quota-meter-bar-error"
+ elif percent >= 85:
+ bar_style += " quota-meter-bar-warn"
+ else:
+ percent = 0
+ except AssertionError:
+ pass # Probably no history yet
+ tooltip = None
+ if not trans.user and quota and trans.app.config.allow_user_creation:
+ if trans.app.quota_agent.default_registered_quota is None or trans.app.quota_agent.default_unregistered_quota < trans.app.quota_agent.default_registered_quota:
+ tooltip = "Your disk quota is %s. You can increase your quota by registering a Galaxy account." % util.nice_size( quota )
+ %>
+
+ <div class="quota-meter-container">
+ %if tooltip:
+ <div id="quota-meter" class="quota-meter tooltip" title="${tooltip}">
+ %else:
+ <div id="quota-meter" class="quota-meter">
+ %endif
+ <div id="quota-meter-bar" class="${bar_style}" style="width: ${percent}px;"></div>
+ %if quota is not None:
+ <div id="quota-meter-text" class="quota-meter-text">Using ${percent}%</div>
+ %else:
+ <div id="quota-meter-text" class="quota-meter-text">Using ${util.nice_size( usage )}</div>
+ %endif
+ </div>
+ </div></%def>
--- a/universe_wsgi.ini.sample Thu Aug 04 16:55:13 2011 +1000
+++ b/universe_wsgi.ini.sample Thu Aug 04 10:39:16 2011 -0400
@@ -454,6 +454,9 @@
# users in the help text.
#ftp_upload_site = None
+# Enable enforcement of quotas. Quotas can be set from the Admin interface.
+#enable_quotas = False
+
# -- Job Execution
# If running multiple Galaxy processes, one can be designated as the job
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/8cb402826d49/
changeset: 8cb402826d49
user: fubar
date: 2011-08-04 08:50:22
summary: sam_merge.xml tweaks: changed call to MergeSamFiles to use minimal command line flags and finally got LENIENT validation stringency to actually work
affected #: 3 files (9 bytes)
--- a/test-data/sam_merge_out1.log Thu Aug 04 12:43:30 2011 +1000
+++ b/test-data/sam_merge_out1.log Thu Aug 04 16:50:22 2011 +1000
@@ -1,5 +1,5 @@
-[Thu Aug 04 12:17:33 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmp6eiFcc/database/files/000/dataset_1.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_2.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_2.dat] OUTPUT=/data/tmp/tmp6eiFcc/database/files/000/dataset_3.dat MERGE_SEQUENCE_DICTIONARIES=true SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
-INFO 2011-08-04 12:17:33 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
-INFO 2011-08-04 12:17:33 MergeSamFiles Finished reading inputs.
-[Thu Aug 04 12:17:33 EST 2011] net.sf.picard.sam.MergeSamFiles done.
+[Thu Aug 04 13:48:49 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmpjnE9mC/database/files/000/dataset_1.dat, /data/tmp/tmpjnE9mC/database/files/000/dataset_2.dat, /data/tmp/tmpjnE9mC/database/files/000/dataset_2.dat] OUTPUT=/data/tmp/tmpjnE9mC/database/files/000/dataset_3.dat MERGE_SEQUENCE_DICTIONARIES=true VALIDATION_STRINGENCY=LENIENT SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
+INFO 2011-08-04 13:48:49 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
+INFO 2011-08-04 13:48:49 MergeSamFiles Finished reading inputs.
+[Thu Aug 04 13:48:49 EST 2011] net.sf.picard.sam.MergeSamFiles done.
Runtime.totalMemory()=2028732416
--- a/test-data/sam_merge_out2.log Thu Aug 04 12:43:30 2011 +1000
+++ b/test-data/sam_merge_out2.log Thu Aug 04 16:50:22 2011 +1000
@@ -1,5 +1,5 @@
-[Thu Aug 04 12:18:15 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmp6eiFcc/database/files/000/dataset_5.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_6.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_7.dat] OUTPUT=/data/tmp/tmp6eiFcc/database/files/000/dataset_8.dat MERGE_SEQUENCE_DICTIONARIES=true SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
-INFO 2011-08-04 12:18:15 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
-INFO 2011-08-04 12:18:15 MergeSamFiles Finished reading inputs.
-[Thu Aug 04 12:18:15 EST 2011] net.sf.picard.sam.MergeSamFiles done.
+[Thu Aug 04 13:49:53 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmpjnE9mC/database/files/000/dataset_5.dat, /data/tmp/tmpjnE9mC/database/files/000/dataset_6.dat, /data/tmp/tmpjnE9mC/database/files/000/dataset_7.dat] OUTPUT=/data/tmp/tmpjnE9mC/database/files/000/dataset_8.dat MERGE_SEQUENCE_DICTIONARIES=true VALIDATION_STRINGENCY=LENIENT SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
+INFO 2011-08-04 13:49:54 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
+INFO 2011-08-04 13:49:54 MergeSamFiles Finished reading inputs.
+[Thu Aug 04 13:49:54 EST 2011] net.sf.picard.sam.MergeSamFiles done.
Runtime.totalMemory()=2028732416
--- a/tools/samtools/sam_merge.xml Thu Aug 04 12:43:30 2011 +1000
+++ b/tools/samtools/sam_merge.xml Thu Aug 04 16:50:22 2011 +1000
@@ -4,9 +4,9 @@
<requirement type="package">picard</requirement></requirements><command>
- java -jar ${GALAXY_DATA_INDEX_DIR}/shared/jars/MergeSamFiles.jar MERGE_SEQUENCE_DICTIONARIES=$mergeSD OUTPUT=$output1 INPUT=$input1 INPUT=$input2
+java -Xmx2G -jar ${GALAXY_DATA_INDEX_DIR}/shared/jars/MergeSamFiles.jar MSD=$mergeSD VALIDATION_STRINGENCY=LENIENT O=$output1 I=$input1 I=$input2
#for $i in $inputs
- INPUT=${i.input}
+ I=${i.input}
#end for
2> $outlog
</command>
http://bitbucket.org/galaxy/galaxy-central/changeset/95e1527499e0/
changeset: 95e1527499e0
user: fubar
date: 2011-08-04 08:55:13
summary: Increase lines_diff so sam_merge tests might pass on buildbot
affected #: 1 file (2 bytes)
--- a/tools/samtools/sam_merge.xml Thu Aug 04 16:50:22 2011 +1000
+++ b/tools/samtools/sam_merge.xml Thu Aug 04 16:55:13 2011 +1000
@@ -36,7 +36,7 @@
<param name="input1" value="sam_merge_in1.bam" ftype="bam" /><param name="input2" value="sam_merge_in2.bam" ftype="bam" /><output name="output1" file="sam_merge_out1.bam" ftype="bam" />
- <output name="outlog" file="sam_merge_out1.log" ftype="txt" lines_diff="8"/>
+ <output name="outlog" file="sam_merge_out1.log" ftype="txt" lines_diff="10"/></test><test><param name="title" value="test2" />
@@ -45,7 +45,7 @@
<param name="input2" value="sam_merge_in2.bam" ftype="bam" /><param name="input" value="sam_merge_in3.bam" ftype="bam" /><output name="output1" file="sam_merge_out2.bam" ftype="bam" />
- <output name="outlog" file="sam_merge_out2.log" ftype="txt" lines_diff="8"/>
+ <output name="outlog" file="sam_merge_out2.log" ftype="txt" lines_diff="10"/></test></tests><help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/3479cddf6bfa/
changeset: 3479cddf6bfa
user: fubar
date: 2011-08-04 02:57:03
summary: fixes for rgManQQ plots to create a compressed PDF using GS as the downloadable artifact - much more useful for publication
affected #: 5 files (3.6 KB)
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Wed Aug 03 13:54:28 2011 -0400
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Thu Aug 04 10:57:03 2011 +1000
@@ -13,8 +13,10 @@
<h1>rgManQQtest1</h1><table>
-<tr><td><a href="Allelep_manhattan.png"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png hspace="10" width="400"><br>(Click to download image Allelep_manhattan.png)</a></td></tr>
-<tr><td><a href="Allelep_qqplot.png"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png hspace="10" width="400"><br>(Click to download image Allelep_qqplot.png)</a></td></tr>
+<tr><td><a href="Allelep_manhattan.pdf">Allelep_manhattan.pdf</a></td></tr>
+<tr><td><a href="Allelep_manhattan.pdf"><img src="Allelep_manhattan.png" title="Allelep_manhattan.png" hspace="10" width="800"></a></td></tr>
+<tr><td><a href="Allelep_qqplot.pdf">Allelep_qqplot.pdf</a></td></tr>
+<tr><td><a href="Allelep_qqplot.pdf"><img src="Allelep_qqplot.png" title="Allelep_qqplot.png" hspace="10" width="800"></a></td></tr><tr><td><a href="rgManQQtest1.R">rgManQQtest1.R</a></td></tr><tr><td><a href="rgManQQtest1.R.log">rgManQQtest1.R.log</a></td></tr></table>
@@ -43,7 +45,7 @@
Loading required package: proto
-[1] "### 101 values read from /data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpB3Kfsc/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
@@ -53,7 +55,6 @@
## R script=
-# license not stated so I'm assuming LGPL is ok for my derived work?
# generalised so 3 core fields passed as parameters ross lazarus March 24 2010 for rgenetics
# Originally created as qqman with the following
# attribution:
@@ -79,77 +80,96 @@
size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
+ n = length(pvals)
if (genomewide) { # use bonferroni since might be only a small region?
- genomewideline = -log10(0.05/length(pvals)) }
+ genomewideline = -log10(0.05/n) }
offset = as.integer(offset)
+ if (n > 1000000) { offset = offset/10000 }
+ else if (n > 10000) { offset = offset/1000}
+ chro = as.integer(chrom) # already dealt with X and friends?
pvals = as.double(pvals)
- chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
- #limit to only chrs 1-22, x=23,y=24,Mt=25?
- d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- #d=na.omit(d)
+ d=d[!is.na(d$P), ]
+ d=d[!is.na(d$BP), ]
+ d=d[!is.na(d$CHR), ]
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
d=d[d$P>0 & d$P<=1, ]
d$logp = as.double(-log10(d$P))
+ dlen = length(d$P)
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = as.integer(chrlist)
chrlist = sort(chrlist) # returns lexical ordering
+ if (max.y=="max") { maxy = ceiling(max(d$logp)) }
+ else { maxy = max.y }
nchr = length(chrlist) # may be any number?
+ maxy = max(maxy,1.1*genomewideline)
if (nchr >= 2) {
- for (x in c(1:nchr)) {
+ for (x in c(1:nchr)) {
i = chrlist[x] # need the chrom number - may not == index
if (x == 1) { # first time
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP
- tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
- } else {
- lastchr = chrlist[x-1] # previous whatever the list
- lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
- if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
- cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
- }
- tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
-
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP # initialize to first BP of chr1
+ dsub = subset(d,CHR==i)
+ dlen = length(dsub$P)
+ lastbase = max(dsub$pos) # last one
+ tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
+ lastchr = i
+ } else {
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP+lastbase # one humongous contig
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
+ tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+ lastchr = i
+ dsub = subset(d,CHR==i)
+ lastbase = max(dsub$pos) # last one
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
}
} else { # nchr is 1
nticks = 10
- last = max(offset)
- first = min(offset)
- tks = c()
+ last = max(d$BP)
+ first = min(d$BP)
+ tks = c(first)
t = (last-first)/nticks # units per tick
- for (x in c(1:nticks)) tks = c(tks,round(x*t))
- xlabs = tks
+ for (x in c(1:(nticks))) {
+ tks = c(tks,round(x*t)+first) }
ticklim = c(first,last)
} # else
if (grey) {mycols=rep(c("gray10","gray60"),max(d$CHR))
} else {
mycols=rep(coloursTouse,max(d$CHR))
}
-
- if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
- maxy = max(maxy,1.1*genomewideline)
+ dlen = length(d$P)
+ d$pranks = rank(d$P)/dlen
+ d$centiles = 100*d$pranks # small are interesting
+ d$sizes = ifelse((d$centile < 1),2,1)
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
- manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs) }
+ manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR),size=factor(sizes))
+ manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs,limits=ticklim)
+ manplot=manplot+scale_size_manual(values = c(0.5,1.5)) # requires discreet scale - eg factor
+ #manplot=manplot+scale_size(values=c(0.5,2)) # requires continuous
+ }
else {
manplot=qplot(BP,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous("BP") }
+ manplot=manplot+scale_x_continuous(name=paste("Chromosome",chrlist[1]), breaks=tks, labels=tks,limits=ticklim)
+ }
manplot=manplot+scale_y_continuous(limits=c(0,maxy), breaks=1:maxy, labels=1:maxy)
manplot=manplot+scale_colour_manual(value=mycols)
if (annotate) { manplot=manplot + geom_point(data=d.annotate, colour=I("green3")) }
manplot=manplot + opts(legend.position = "none")
manplot=manplot + opts(title=title)
manplot=manplot+opts(
- panel.background=theme_blank(),
- axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
- axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
- axis.ticks=theme_segment(colour=NA)
+ panel.background=theme_blank(),
+ axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
+ axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
+ axis.ticks=theme_segment(colour=NA)
)
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
@@ -178,7 +198,7 @@
qq
}
-rgqqMan = function(infile="/data/tmp/tmpM8NZ50/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
+rgqqMan = function(infile="/data/tmp/tmpB3Kfsc/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
@@ -203,13 +223,15 @@
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"qqplot.pdf",sep='_'),myqqplot,width=8,height=6,dpi=96)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"manhattan.pdf",sep='_'),mymanplot,width=8,height=6,dpi=96)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -228,6 +250,6 @@
</pre>
-<b><a href="http://rgenetics.org">Galaxy Rgenetics</a> tool output rgManQQ.py run at 20/07/2011 13:29:43</b><br/>
+<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 04/08/2011 10:51:34</h3></div></body></html>
Binary file test-data/sam_merge_out2.bam has changed
--- a/tool_conf.xml.sample Wed Aug 03 13:54:28 2011 -0400
+++ b/tool_conf.xml.sample Thu Aug 04 10:57:03 2011 +1000
@@ -343,7 +343,7 @@
<tool file="samtools/sam2interval.xml" /><tool file="samtools/sam_to_bam.xml" /><tool file="samtools/bam_to_sam.xml" />
- <tool file="samtools/sam_merge.xml" />
+ <tool file="picard/sam_merge.xml" /><tool file="samtools/sam_pileup.xml" /><tool file="samtools/pileup_parser.xml" /><tool file="samtools/pileup_interval.xml" />
--- a/tools/rgenetics/rgManQQ.py Wed Aug 03 13:54:28 2011 -0400
+++ b/tools/rgenetics/rgManQQ.py Thu Aug 04 10:57:03 2011 +1000
@@ -1,4 +1,10 @@
#!/usr/local/bin/python
+# This is a truly ghastly hack
+# all of the heavy data cleaning lifting is done in R which is a really dumb place IMHO
+# Making a new file seems a waste but it would be far easier to set everything up in python
+# seems to work so I'm leaving it alone
+# sigh. Should really move this gig to rpy - writing a robust R script is hard.
+# updated to compress pdf using gs since millions of points = horsechoker pdfs and pdfs are good
# updated july 20 to fix sort order - R unique() sorts into strict collating order
# so need to sort after unique to revert to lexicographic order for x axis on Manhattan
# rgmanqq updated july 19 to deal with x,y and mt
@@ -13,7 +19,6 @@
debug = False
rcode="""
-# license not stated so I'm assuming LGPL is ok for my derived work?
# generalised so 3 core fields passed as parameters ross lazarus March 24 2010 for rgenetics
# Originally created as qqman with the following
# attribution:
@@ -39,77 +44,96 @@
size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
+ n = length(pvals)
if (genomewide) { # use bonferroni since might be only a small region?
- genomewideline = -log10(0.05/length(pvals)) }
+ genomewideline = -log10(0.05/n) }
offset = as.integer(offset)
+ if (n > 1000000) { offset = offset/10000 }
+ else if (n > 10000) { offset = offset/1000}
+ chro = as.integer(chrom) # already dealt with X and friends?
pvals = as.double(pvals)
- chro = as.integer(chrom) # already dealt with X and friends?
d=data.frame(CHR=chro,BP=offset,P=pvals)
- #limit to only chrs 1-22, x=23,y=24,Mt=25?
- d=d[d$CHR %in% 1:25, ]
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- #d=na.omit(d)
+ d=d[!is.na(d$P), ]
+ d=d[!is.na(d$BP), ]
+ d=d[!is.na(d$CHR), ]
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
d=d[d$P>0 & d$P<=1, ]
d$logp = as.double(-log10(d$P))
+ dlen = length(d$P)
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = as.integer(chrlist)
chrlist = sort(chrlist) # returns lexical ordering
+ if (max.y=="max") { maxy = ceiling(max(d$logp)) }
+ else { maxy = max.y }
nchr = length(chrlist) # may be any number?
+ maxy = max(maxy,1.1*genomewideline)
if (nchr >= 2) {
- for (x in c(1:nchr)) {
+ for (x in c(1:nchr)) {
i = chrlist[x] # need the chrom number - may not == index
if (x == 1) { # first time
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP
- tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
- } else {
- lastchr = chrlist[x-1] # previous whatever the list
- lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
- if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
- cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
- }
- tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
-
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP # initialize to first BP of chr1
+ dsub = subset(d,CHR==i)
+ dlen = length(dsub$P)
+ lastbase = max(dsub$pos) # last one
+ tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
+ lastchr = i
+ } else {
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP+lastbase # one humongous contig
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
+ tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+ lastchr = i
+ dsub = subset(d,CHR==i)
+ lastbase = max(dsub$pos) # last one
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
}
} else { # nchr is 1
nticks = 10
- last = max(offset)
- first = min(offset)
- tks = c()
+ last = max(d$BP)
+ first = min(d$BP)
+ tks = c(first)
t = (last-first)/nticks # units per tick
- for (x in c(1:nticks)) tks = c(tks,round(x*t))
- xlabs = tks
+ for (x in c(1:(nticks))) {
+ tks = c(tks,round(x*t)+first) }
ticklim = c(first,last)
} # else
if (grey) {mycols=rep(c("gray10","gray60"),max(d$CHR))
} else {
mycols=rep(coloursTouse,max(d$CHR))
}
-
- if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
- maxy = max(maxy,1.1*genomewideline)
+ dlen = length(d$P)
+ d$pranks = rank(d$P)/dlen
+ d$centiles = 100*d$pranks # small are interesting
+ d$sizes = ifelse((d$centile < 1),2,1)
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
- manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs) }
+ manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR),size=factor(sizes))
+ manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs,limits=ticklim)
+ manplot=manplot+scale_size_manual(values = c(0.5,1.5)) # requires discreet scale - eg factor
+ #manplot=manplot+scale_size(values=c(0.5,2)) # requires continuous
+ }
else {
manplot=qplot(BP,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous("BP") }
+ manplot=manplot+scale_x_continuous(name=paste("Chromosome",chrlist[1]), breaks=tks, labels=tks,limits=ticklim)
+ }
manplot=manplot+scale_y_continuous(limits=c(0,maxy), breaks=1:maxy, labels=1:maxy)
manplot=manplot+scale_colour_manual(value=mycols)
if (annotate) { manplot=manplot + geom_point(data=d.annotate, colour=I("green3")) }
manplot=manplot + opts(legend.position = "none")
manplot=manplot + opts(title=title)
manplot=manplot+opts(
- panel.background=theme_blank(),
- axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
- axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
- axis.ticks=theme_segment(colour=NA)
+ panel.background=theme_blank(),
+ axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
+ axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
+ axis.ticks=theme_segment(colour=NA)
)
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
@@ -168,13 +192,15 @@
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"qqplot.pdf",sep='_'),myqqplot,width=8,height=6,dpi=96)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"manhattan.pdf",sep='_'),mymanplot,width=8,height=6,dpi=96)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -212,6 +238,17 @@
rlog.append(rcmd)
return rlog,flist
+def compressPDF(inpdf=None):
+ """need absolute path to pdf
+ """
+ assert os.path.isfile(inpdf), "## Input %s supplied to compressPDF not found" % inpdf
+ outpdf = '%s_compressed' % inpdf
+ cl = ["gs", "-sDEVICE=pdfwrite", "-dNOPAUSE", "-dBATCH", "-sOutputFile=%s" % outpdf,inpdf]
+ retval = subprocess.call(cl)
+ if retval == 0:
+ os.unlink(inpdf)
+ shutil.move(outpdf,inpdf)
+ return retval
def main():
u = """<command interpreter="python">
@@ -261,11 +298,19 @@
html.append('<table>\n')
for row in flist:
fname,expl = row # RRun returns pairs of filenames fiddled for the log and R script
- e = os.path.splitext(fname)[-1]
+ n,e = os.path.splitext(fname)
if e in ['.png','.jpg']:
- s= '<tr><td><a href="%s"><img src="%s" title="%s hspace="10" width="400"><br>(Click to download image %s)</a></td></tr>' \
- % (fname,fname,expl,expl )
- html.append(s)
+ pdf = '%s.pdf' % n
+ pdff = os.path.join(outdir,pdf)
+ if os.path.exists(pdff):
+ rval = compressPDF(inpdf=pdff)
+ if rval <> 0:
+ pdf = '%s(not_compressed)' % pdf
+ else:
+ pdf = '%s(not_found)' % pdf
+ s= '<tr><td><a href="%s"><img src="%s" title="%s" hspace="10" width="800"></a></td></tr>' \
+ % (pdf,fname,expl)
+ html.append(s)
else:
html.append('<tr><td><a href="%s">%s</a></td></tr>' % (fname,expl))
html.append('</table>\n')
--- a/tools/rgenetics/rgManQQ.xml Wed Aug 03 13:54:28 2011 -0400
+++ b/tools/rgenetics/rgManQQ.xml Thu Aug 04 10:57:03 2011 +1000
@@ -1,4 +1,4 @@
-<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.2">
+<tool id="rgManQQ1" name="Manhattan/QQ:" version="1.0.3"><code file="rgManQQ_code.py"/><description>Plots for WGA P values</description>
http://bitbucket.org/galaxy/galaxy-central/changeset/e79d064e4880/
changeset: e79d064e4880
user: fubar
date: 2011-08-04 04:43:30
summary: Fixed sam_merge.xml to call Picard MergeSamFiles.jar so metadata can be propagated through to the new merged bam from all the individual files - the samtools version of merge would require this to be done separately and passed in with the -h option whereas Picard does it automatically. Added one more test. Interesting that the test for that tool has been failing to correctly pass metadata but passing the buildbot anyway. Thanks to Camille Stephan for pointing out the bug.
Changes to rgManQQ so the user can obtain a decent pdf image. When millions of points are plotted, these are humongous so GS is called to compress the resulting pdf and they are now of reasonable size. PDF's are now linked from the thumbnails. Some minor fiddling with point size on the Manhatten plots so the intersting ones are a little more obvious.
Minor tweak to twilltestcase.py so composite file components are copied correctly to the directory specified by GALAXY_TEST_SAVE. This makes updating test artefacts much simpler because running tests with GALAXY_TEST_SAVE pointing somewhere will now save every tested output file.
affected #: 10 files (36.1 KB)
Binary file test-data/rgtestouts/rgManQQ/Allelep_manhattan.png has changed
Binary file test-data/rgtestouts/rgManQQ/Allelep_qqplot.png has changed
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.R Thu Aug 04 10:57:03 2011 +1000
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.R Thu Aug 04 12:43:30 2011 +1000
@@ -1,5 +1,4 @@
-# license not stated so I'm assuming LGPL is ok for my derived work?
# generalised so 3 core fields passed as parameters ross lazarus March 24 2010 for rgenetics
# Originally created as qqman with the following
# attribution:
@@ -8,7 +7,7 @@
# http://StephenTurner.us/
# http://GettingGeneticsDone.blogspot.com/
-# Last updated: Tuesday, December 22, 2009
+# Last updated: 19 July 2011 by Ross Lazarus
# R code for making manhattan plots and QQ plots from plink output files.
# With GWAS data this can take a lot of memory. Recommended for use on
# 64bit machines only, for now.
@@ -18,86 +17,104 @@
library(ggplot2)
coloursTouse = c('firebrick','darkblue','goldenrod','darkgreen')
-# not too fugly but need a colour expert please...
+# not too ugly but need a colour expert please...
-manhattan = function(chrom=NULL,offset=NULL,pvals=NULL, title=NULL, max.y="max",
- suggestiveline=0, genomewide=T, size.x.labels=9, size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
-
+DrawManhattan = function(pvals=Null,chrom=Null,offset=Null,title=NULL, max.y="max",suggestiveline=0, genomewide=T, size.x.labels=9,
+ size.y.labels=10, annotate=F, SNPlist=NULL,grey=0) {
if (annotate & is.null(SNPlist)) stop("You requested annotation but provided no SNPlist!")
genomewideline=NULL # was genomewideline=-log10(5e-8)
+ n = length(pvals)
if (genomewide) { # use bonferroni since might be only a small region?
- genomewideline = -log10(0.05/length(pvals)) }
- d=data.frame(CHR=chrom,BP=offset,P=pvals)
-
- #limit to only chrs 1-23?
- d=d[d$CHR %in% 1:23, ]
-
+ genomewideline = -log10(0.05/n) }
+ offset = as.integer(offset)
+ if (n > 1000000) { offset = offset/10000 }
+ else if (n > 10000) { offset = offset/1000}
+ chro = as.integer(chrom) # already dealt with X and friends?
+ pvals = as.double(pvals)
+ d=data.frame(CHR=chro,BP=offset,P=pvals)
if ("CHR" %in% names(d) & "BP" %in% names(d) & "P" %in% names(d) ) {
- d=na.omit(d)
+ d=d[!is.na(d$P), ]
+ d=d[!is.na(d$BP), ]
+ d=d[!is.na(d$CHR), ]
+ #limit to only chrs 1-22, x=23,y=24,Mt=25?
+ d=d[d$CHR %in% 1:25, ]
d=d[d$P>0 & d$P<=1, ]
- d$logp = -log10(d$P)
-
+ d$logp = as.double(-log10(d$P))
+ dlen = length(d$P)
d$pos=NA
ticks=NULL
lastbase=0
chrlist = unique(d$CHR)
+ chrlist = as.integer(chrlist)
+ chrlist = sort(chrlist) # returns lexical ordering
+ if (max.y=="max") { maxy = ceiling(max(d$logp)) }
+ else { maxy = max.y }
nchr = length(chrlist) # may be any number?
+ maxy = max(maxy,1.1*genomewideline)
if (nchr >= 2) {
- for (x in c(1:nchr)) {
+ for (x in c(1:nchr)) {
i = chrlist[x] # need the chrom number - may not == index
if (x == 1) { # first time
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP
- tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
- } else {
- lastchr = chrlist[x-1] # previous whatever the list
- lastbase=lastbase+tail(subset(d,CHR==lastchr)$BP, 1)
- d[d$CHR==i, ]$pos=d[d$CHR==i, ]$BP+lastbase
- tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP # initialize to first BP of chr1
+ dsub = subset(d,CHR==i)
+ dlen = length(dsub$P)
+ lastbase = max(dsub$pos) # last one
+ tks = d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1]
+ lastchr = i
+ } else {
+ d[d$CHR==i, ]$pos = d[d$CHR==i, ]$BP+lastbase # one humongous contig
+ if (sum(is.na(lastchr),is.na(lastbase),is.na(d[d$CHR==i, ]$pos))) {
+ cat(paste('manhattan: For',title,'chrlistx=',i,'lastchr=',lastchr,'lastbase=',lastbase,'pos=',d[d$CHR==i,]$pos))
+ }
+ tks=c(tks, d[d$CHR==i, ]$pos[floor(length(d[d$CHR==i, ]$pos)/2)+1])
+ lastchr = i
+ dsub = subset(d,CHR==i)
+ lastbase = max(dsub$pos) # last one
}
ticklim=c(min(d$pos),max(d$pos))
xlabs = chrlist
}
} else { # nchr is 1
nticks = 10
- last = max(offset)
- first = min(offset)
- tks = c()
+ last = max(d$BP)
+ first = min(d$BP)
+ tks = c(first)
t = (last-first)/nticks # units per tick
- for (x in c(1:nticks)) tks = c(tks,round(x*t))
- xlabs = tks
+ for (x in c(1:(nticks))) {
+ tks = c(tks,round(x*t)+first) }
ticklim = c(first,last)
} # else
if (grey) {mycols=rep(c("gray10","gray60"),max(d$CHR))
} else {
mycols=rep(coloursTouse,max(d$CHR))
}
-
- if (max.y=="max") maxy=ceiling(max(d$logp)) else maxy=max.y
- maxy = max(maxy,1.1*genomewideline)
- # if (maxy<8) maxy=8
- # only makes sense if genome wide is assumed - we could have a fine mapping region?
+ dlen = length(d$P)
+ d$pranks = rank(d$P)/dlen
+ d$centiles = 100*d$pranks # small are interesting
+ d$sizes = ifelse((d$centile < 1),2,1)
if (annotate) d.annotate=d[as.numeric(substr(d$SNP,3,100)) %in% SNPlist, ]
if (nchr >= 2) {
- manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs) }
+ manplot=qplot(pos,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR),size=factor(sizes))
+ manplot=manplot+scale_x_continuous(name="Chromosome", breaks=tks, labels=xlabs,limits=ticklim)
+ manplot=manplot+scale_size_manual(values = c(0.5,1.5)) # requires discreet scale - eg factor
+ #manplot=manplot+scale_size(values=c(0.5,2)) # requires continuous
+ }
else {
manplot=qplot(BP,logp,data=d, ylab=expression(-log[10](italic(p))) , colour=factor(CHR))
- manplot=manplot+scale_x_continuous("BP") }
+ manplot=manplot+scale_x_continuous(name=paste("Chromosome",chrlist[1]), breaks=tks, labels=tks,limits=ticklim)
+ }
manplot=manplot+scale_y_continuous(limits=c(0,maxy), breaks=1:maxy, labels=1:maxy)
manplot=manplot+scale_colour_manual(value=mycols)
if (annotate) { manplot=manplot + geom_point(data=d.annotate, colour=I("green3")) }
manplot=manplot + opts(legend.position = "none")
manplot=manplot + opts(title=title)
manplot=manplot+opts(
- panel.background=theme_blank(),
- axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
- axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
- axis.ticks=theme_segment(colour=NA)
+ panel.background=theme_blank(),
+ axis.text.x=theme_text(size=size.x.labels, colour="grey50"),
+ axis.text.y=theme_text(size=size.y.labels, colour="grey50"),
+ axis.ticks=theme_segment(colour=NA)
)
- #manplot = manplot + opts(panel.grid.y.minor=theme_blank(),panel.grid.y.major=theme_blank())
- #manplot = manplot + opts(panel.grid.major=theme_blank())
-
if (suggestiveline) manplot=manplot+geom_hline(yintercept=suggestiveline,colour="blue", alpha=I(1/3))
if (genomewideline) manplot=manplot+geom_hline(yintercept=genomewideline,colour="red")
manplot
@@ -124,16 +141,24 @@
if (spartan) plot=plot+opts(panel.background=theme_rect(col="grey50"), panel.grid.minor=theme_blank())
qq
}
-rgqqMan = function(infile="/tmp/rgManQQtemplYC5wa",chromcolumn=1, offsetcolumn=2, pvalscolumns=c(3),
+
+rgqqMan = function(infile="/data/tmp/tmpNaxDwH/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
cc = dn[chromcolumn]
oc = dn[offsetcolumn]
-nams = c(cc,oc)
+rawd[,cc] = sub('chr','',rawd[,cc],ignore.case = T) # just in case
+rawd[,cc] = sub(':','',rawd[,cc],ignore.case = T) # ugh
+rawd[,cc] = sub('X',23,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Y',24,rawd[,cc],ignore.case = T)
+rawd[,cc] = sub('Mt',25,rawd[,cc], ignore.case = T)
+nams = c(cc,oc) # for sorting
plen = length(rawd[,1])
-doreorder=1
print(paste('###',plen,'values read from',infile,'read - now running plots',sep=' '))
+rawd = rawd[do.call(order,rawd[nams]),]
+# mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
+# in case not yet ordered
if (plen > 0) {
for (pvalscolumn in pvalscolumns) {
if (pvalscolumn > 0)
@@ -142,19 +167,15 @@
mytitle = paste('p=',cname,', ',title,sep='')
myfname = chartr(' ','_',cname)
myqqplot = qq(rawd[,pvalscolumn],title=mytitle)
- ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=6,height=4,dpi=100)
+ ggsave(filename=paste(myfname,"qqplot.png",sep='_'),myqqplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"qqplot.pdf",sep='_'),myqqplot,width=8,height=6,dpi=96)
print(paste('## qqplot on',cname,'done'))
if ((chromcolumn > 0) & (offsetcolumn > 0)) {
- if (doreorder) {
- rawd = rawd[do.call(order,rawd[nams]),]
- # mmmf - suggested by http://onertipaday.blogspot.com/2007/08/sortingordering-dataframe-according…
- # in case not yet ordered
- doreorder = 0
- }
print(paste('## manhattan on',cname,'starting',chromcolumn,offsetcolumn,pvalscolumn))
- mymanplot= manhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ mymanplot= DrawManhattan(chrom=rawd[,chromcolumn],offset=rawd[,offsetcolumn],pvals=rawd[,pvalscolumn],title=mytitle,grey=grey)
+ ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=8,height=6,dpi=96)
+ ggsave(filename=paste(myfname,"manhattan.pdf",sep='_'),mymanplot,width=8,height=6,dpi=96)
print(paste('## manhattan plot on',cname,'done'))
- ggsave(filename=paste(myfname,"manhattan.png",sep='_'),mymanplot,width=6,height=4,dpi=100)
}
else {
print(paste('chrom column =',chromcolumn,'offset column = ',offsetcolumn,
@@ -171,4 +192,4 @@
rgqqMan()
# execute with defaults as substituted
-#R script autogenerated by rgenetics/rgutils.py on 07/11/2010 20:03:37
+#R script autogenerated by rgenetics/rgutils.py on 04/08/2011 11:46:16
--- a/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Thu Aug 04 10:57:03 2011 +1000
+++ b/test-data/rgtestouts/rgManQQ/rgManQQtest1.html Thu Aug 04 12:43:30 2011 +1000
@@ -45,7 +45,7 @@
Loading required package: proto
-[1] "### 101 values read from /data/tmp/tmpB3Kfsc/database/files/000/dataset_1.dat read - now running plots"
+[1] "### 101 values read from /data/tmp/tmpNaxDwH/database/files/000/dataset_1.dat read - now running plots"
[1] "## qqplot on Allelep done"
@@ -198,7 +198,7 @@
qq
}
-rgqqMan = function(infile="/data/tmp/tmpB3Kfsc/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
+rgqqMan = function(infile="/data/tmp/tmpNaxDwH/database/files/000/dataset_1.dat",chromcolumn=2, offsetcolumn=3, pvalscolumns=c(8),
title="rgManQQtest1",grey=0) {
rawd = read.table(infile,head=T,sep='\t')
dn = names(rawd)
@@ -250,6 +250,6 @@
</pre>
-<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 04/08/2011 10:51:34</h3>
+<h3><a href="http://rgenetics.org">Rgenetics</a> tool rgManQQ.py run at 04/08/2011 11:46:22</h3></div></body></html>
Binary file test-data/sam_merge_out1.bam has changed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sam_merge_out1.log Thu Aug 04 12:43:30 2011 +1000
@@ -0,0 +1,5 @@
+[Thu Aug 04 12:17:33 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmp6eiFcc/database/files/000/dataset_1.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_2.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_2.dat] OUTPUT=/data/tmp/tmp6eiFcc/database/files/000/dataset_3.dat MERGE_SEQUENCE_DICTIONARIES=true SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
+INFO 2011-08-04 12:17:33 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
+INFO 2011-08-04 12:17:33 MergeSamFiles Finished reading inputs.
+[Thu Aug 04 12:17:33 EST 2011] net.sf.picard.sam.MergeSamFiles done.
+Runtime.totalMemory()=2028732416
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/sam_merge_out2.log Thu Aug 04 12:43:30 2011 +1000
@@ -0,0 +1,5 @@
+[Thu Aug 04 12:18:15 EST 2011] net.sf.picard.sam.MergeSamFiles INPUT=[/data/tmp/tmp6eiFcc/database/files/000/dataset_5.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_6.dat, /data/tmp/tmp6eiFcc/database/files/000/dataset_7.dat] OUTPUT=/data/tmp/tmp6eiFcc/database/files/000/dataset_8.dat MERGE_SEQUENCE_DICTIONARIES=true SORT_ORDER=coordinate ASSUME_SORTED=false USE_THREADING=false TMP_DIR=/tmp/rlazarus VERBOSITY=INFO QUIET=false VALIDATION_STRINGENCY=STRICT COMPRESSION_LEVEL=5 MAX_RECORDS_IN_RAM=500000 CREATE_INDEX=false CREATE_MD5_FILE=false
+INFO 2011-08-04 12:18:15 MergeSamFiles Sorting input files using temp directory /tmp/rlazarus
+INFO 2011-08-04 12:18:15 MergeSamFiles Finished reading inputs.
+[Thu Aug 04 12:18:15 EST 2011] net.sf.picard.sam.MergeSamFiles done.
+Runtime.totalMemory()=2028732416
--- a/test/base/twilltestcase.py Thu Aug 04 10:57:03 2011 +1000
+++ b/test/base/twilltestcase.py Thu Aug 04 12:43:30 2011 +1000
@@ -728,6 +728,10 @@
self.visit_url( "%s/datasets/%s/display/%s" % ( self.url, self.security.encode_id( hda_id ), base_name ) )
data = self.last_page()
file( temp_name, 'wb' ).write( data )
+ if self.keepOutdir > '':
+ ofn = os.path.join(self.keepOutdir,base_name)
+ shutil.copy(temp_name,ofn)
+ log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % (self.keepOutdir,ofn))
try:
# have to nest try-except in try-finally to handle 2.4
try:
--- a/tool_conf.xml.sample Thu Aug 04 10:57:03 2011 +1000
+++ b/tool_conf.xml.sample Thu Aug 04 12:43:30 2011 +1000
@@ -343,7 +343,7 @@
<tool file="samtools/sam2interval.xml" /><tool file="samtools/sam_to_bam.xml" /><tool file="samtools/bam_to_sam.xml" />
- <tool file="picard/sam_merge.xml" />
+ <tool file="samtools/sam_merge.xml" /><tool file="samtools/sam_pileup.xml" /><tool file="samtools/pileup_parser.xml" /><tool file="samtools/pileup_interval.xml" />
--- a/tools/samtools/sam_merge.xml Thu Aug 04 10:57:03 2011 +1000
+++ b/tools/samtools/sam_merge.xml Thu Aug 04 12:43:30 2011 +1000
@@ -1,18 +1,21 @@
-<tool id="sam_merge" name="Merge BAM Files" version="1.1.1">
+<tool id="sam_merge2" name="Merge BAM Files" version="1.1.2"><description>merges BAM files together</description><requirements>
- <requirement type="package">samtools</requirement>
+ <requirement type="package">picard</requirement></requirements>
- <command interpreter="python">
- sam_merge.py
- $input1
- $output1
- $input2
+ <command>
+ java -jar ${GALAXY_DATA_INDEX_DIR}/shared/jars/MergeSamFiles.jar MERGE_SEQUENCE_DICTIONARIES=$mergeSD OUTPUT=$output1 INPUT=$input1 INPUT=$input2
#for $i in $inputs
- ${i.input}
- #end for
+ INPUT=${i.input}
+ #end for
+ 2> $outlog
</command><inputs>
+ <param name="title" label="Name for the output merged bam file" type="text" default="Merged.bam"
+ help="This name will appear in your history so use it to remember what the new file in your history contains" />
+ <param name="mergeSD" value="true" type="boolean" label="Merge all component bam file headers into the merged bam file"
+ truevalue="true" falsevalue="false" checked="yes"
+ help="Control the MERGE_SEQUENCE_DICTIONARIES flag for Picard MergeSamFiles. Default (true) correctly propagates read groups and other important metadata" /><param name="input1" label="First file" type="data" format="bam" /><param name="input2" label="with file" type="data" format="bam" help="Need to add more files? Use controls below." /><repeat name="inputs" title="Input Files">
@@ -20,57 +23,39 @@
</repeat></inputs><outputs>
- <data format="bam" name="output1" label="${tool.name} on ${on_string}: merged BAM" />
+ <data format="bam" name="output1" label="${title}.bam" />
+ <data format="txt" name="outlog" label="${title}_${tool.name}.log" /></outputs><tests><!-- TODO: add ability to test framework to test without at least
one repeat element value
+ --><test>
- -->
- <!--
- Bam merge command:
- samtools merge test-data/sam_merge_out1.bam test-data/sam_merge_in1.bam test-data/sam_merge_in2.bam
- -->
- <!--
+ <param name="title" value="test1" />
+ <param name="mergeSD" value="true" /><param name="input1" value="sam_merge_in1.bam" ftype="bam" /><param name="input2" value="sam_merge_in2.bam" ftype="bam" /><output name="output1" file="sam_merge_out1.bam" ftype="bam" />
+ <output name="outlog" file="sam_merge_out1.log" ftype="txt" lines_diff="8"/></test>
- --><test>
- <!--
- Bam merge command:
- samtools merge sam_merge_out2.bam test-data/sam_merge_in1.bam test-data/sam_merge_in2.bam test-data/sam_merge_in3.bam
- -->
+ <param name="title" value="test2" />
+ <param name="mergeSD" value="true" /><param name="input1" value="sam_merge_in1.bam" ftype="bam" /><param name="input2" value="sam_merge_in2.bam" ftype="bam" /><param name="input" value="sam_merge_in3.bam" ftype="bam" /><output name="output1" file="sam_merge_out2.bam" ftype="bam" />
+ <output name="outlog" file="sam_merge_out2.log" ftype="txt" lines_diff="8"/></test>
- <!-- TODO: add ability to test code to be able to test with multiple
- inputs (parameters with same value)
- <test>
- -->
- <!--
- Bam merge command:
- samtools merge test-data/sam_merge_out3.bam test-data/sam_merge_in1.bam test-data/sam_merge_in2.bam test-data/sam_merge_in3.bam test-data/sam_merge_in4.bam
- -->
- <!--
- <param name="input1" value="sam_merge_in1.bam" ftype="bam" />
- <param name="input2" value="sam_merge_in2.bam" ftype="bam" />
- <param name="input" value="sam_merge_in3.bam" ftype="bam" />
- <param name="input" value="sam_merge_in4.bam" ftype="bam" />
- <output name="output1" file="sam_merge_out3.bam" ftype="bam" />
- </test>
- --></tests><help>
**What it does**
-This tool uses SAMTools_' merge command to merge any number of BAM files together into one BAM file.
+This tool uses the Picard_ merge command to merge any number of BAM files together into one BAM file while preserving the BAM
+metadata such as read groups
-.. _SAMTools: http://samtools.sourceforge.net/samtools.shtml
+.. _Picard: http://picard.sourceforge.net/command-line-overview.shtml#MergeSamFiles
</help></tool>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: kanwei: Add simple display option to data libraries that omits certain columns... unclutters LibraryDataset parameter UI
by Bitbucket 03 Aug '11
by Bitbucket 03 Aug '11
03 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/da65b1695245/
changeset: da65b1695245
user: kanwei
date: 2011-08-03 19:54:28
summary: Add simple display option to data libraries that omits certain columns... unclutters LibraryDataset parameter UI
affected #: 3 files (799 bytes)
--- a/templates/library/common/browse_library.mako Wed Aug 03 11:39:23 2011 -0400
+++ b/templates/library/common/browse_library.mako Wed Aug 03 13:54:28 2011 -0400
@@ -82,7 +82,6 @@
};
$("#library-grid").each(function() {
-
var child_of_parent_cache = {};
// Recursively fill in children and descendents of each row
var process_row = function(q, parents) {
@@ -207,17 +206,17 @@
</script></%def>
-<%def name="render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False )">
+<%def name="render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, parent, row_counter, tracked_datasets, show_deleted=False, simple=False )"><%
## The received ldda must always be a LibraryDatasetDatasetAssociation object. The object id passed to methods
## from the drop down menu should be the ldda id to prevent id collision ( which could happen when displaying
## children, which are always lddas ). We also need to make sure we're displaying the latest version of this
## library_dataset, so we display the attributes from the ldda.
-
+
from galaxy.web.controllers.library_common import branch_deleted
-
+
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
-
+
if ldda.user:
uploaded_by = ldda.user.email
else:
@@ -245,60 +244,66 @@
%endif
id="libraryItem-${ldda.id}"><td style="padding-left: ${pad+20}px;">
- <input style="float: left;" type="checkbox" name="ldda_ids" value="${trans.security.encode_id( ldda.id )}"
+ <input style="float: left;" type="checkbox" name="ldda_ids" id="${trans.security.encode_id( ldda.id )}" value="${trans.security.encode_id( ldda.id )}"
%if selected:
checked="checked"
%endif
/>
- <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${ldda.id}-popup">
- <a class="view-info" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">
- %if ldda.library_dataset.deleted:
- <div class="libraryItem-error">${ldda.name}</div>
- %else:
- ${ldda.name}
- %endif
- </a>
- </div>
- %if not library.deleted:
- <div popupmenu="dataset-${ldda.id}-popup">
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='ldda', item_id=trans.security.encode_id( ldda.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this dataset</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and not info_association:
- <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and info_association:
- <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_manage:
- %if not trans.app.security_agent.dataset_is_public( ldda.dataset ):
- <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='ldda', id=trans.security.encode_id( ldda.dataset.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+ %if simple:
+ <label for="${trans.security.encode_id( ldda.id )}">${ldda.name}</label>
+ %else:
+ <div style="float: left; margin-left: 1px;" class="menubutton split popup" id="dataset-${ldda.id}-popup">
+ <a class="view-info" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">
+ %if ldda.library_dataset.deleted:
+ <div class="libraryItem-error">${ldda.name}</div>
+ %else:
+ ${ldda.name}
+ %endif
+ </a>
+ </div>
+ %if not library.deleted:
+ <div popupmenu="dataset-${ldda.id}-popup">
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_edit_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit information</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='move_library_item', cntrller=cntrller, item_type='ldda', item_id=trans.security.encode_id( ldda.id ), source_library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}">Move this dataset</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_info', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">View information</a>
%endif
- <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
- <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
- %endif
- %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
- <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a>
- <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
- %endif
- %if can_modify:
- %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
- <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
- %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
- <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and not info_association:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='add_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Use template</a>
%endif
- %endif
- </div>
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify and info_association:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='edit_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit template</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='delete_template', cntrller=cntrller, item_type='ldda', form_type=form_type, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), ldda_id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Unuse template</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_manage:
+ %if not trans.app.security_agent.dataset_is_public( ldda.dataset ):
+ <a class="action-button" href="${h.url_for( controller='library_common', action='make_library_item_public', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_type='ldda', id=trans.security.encode_id( ldda.dataset.id ), use_panels=use_panels, show_deleted=show_deleted )}">Make public</a>
+ %endif
+ <a class="action-button" href="${h.url_for( controller='library_common', action='ldda_permissions', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), id=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Edit permissions</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and can_modify:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( folder.id ), replace_id=trans.security.encode_id( library_dataset.id ), show_deleted=show_deleted )}">Upload a new version of this dataset</a>
+ %endif
+ %if not branch_deleted( folder ) and not ldda.library_dataset.deleted and ldda.has_data:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='import_datasets_to_histories', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), ldda_ids=trans.security.encode_id( ldda.id ), use_panels=use_panels, show_deleted=show_deleted )}">Import this dataset into selected histories</a>
+ <a class="action-button" href="${h.url_for( controller='library_common', action='download_dataset_from_folder', cntrller=cntrller, id=trans.security.encode_id( ldda.id ), library_id=trans.security.encode_id( library.id ), use_panels=use_panels )}">Download this dataset</a>
+ %endif
+ %if can_modify:
+ %if not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.deleted:
+ <a class="action-button" confirm="Click OK to delete dataset '${ldda.name}'." href="${h.url_for( controller='library_common', action='delete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Delete this dataset</a>
+ %elif not library.deleted and not branch_deleted( folder ) and not ldda.library_dataset.purged and ldda.library_dataset.deleted:
+ <a class="action-button" href="${h.url_for( controller='library_common', action='undelete_library_item', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), item_id=trans.security.encode_id( library_dataset.id ), item_type='library_dataset', show_deleted=show_deleted )}">Undelete this dataset</a>
+ %endif
+ %endif
+ </div>
+ %endif
%endif
</td>
- <td id="libraryItemInfo">${render_library_item_info( ldda )}</td>
- <td>${uploaded_by}</td>
+ % if not simple:
+ <td id="libraryItemInfo">${render_library_item_info( ldda )}</td>
+ <td>${uploaded_by}</td>
+ % endif
<td>${ldda.create_time.strftime( "%Y-%m-%d" )}</td><td>${ldda.get_size( nice_size=True )}</td></tr>
@@ -309,7 +314,7 @@
%endif
</%def>
-<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False )">
+<%def name="render_folder( cntrller, folder, folder_pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=False, parent=None, row_counter=None, root_folder=False, simple=False )"><%
from galaxy.web.controllers.library_common import active_folders, active_folders_and_library_datasets, activatable_folders_and_library_datasets, branch_deleted
@@ -348,7 +353,7 @@
can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, folder )
else:
can_add = can_modify = can_manage = False
-
+
form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
info_association, inherited = folder.get_info_association( restrict=True )
%>
@@ -448,7 +453,7 @@
%endfor
%else:
%for sub_folder in sub_folders:
- ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False )}
+ ${render_folder( cntrller, sub_folder, pad, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=my_row, row_counter=row_counter, root_folder=False, simple=simple )}
%endfor
%for library_dataset in library_datasets:
<%
@@ -462,20 +467,20 @@
can_access = False
%>
%if can_access:
- ${render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, my_row, row_counter, tracked_datasets, show_deleted=show_deleted )}
+ ${render_dataset( cntrller, ldda, library_dataset, selected, library, folder, pad, my_row, row_counter, tracked_datasets, show_deleted=show_deleted, simple=simple )}
%endif
%endfor
%endif
</%def>
-<%def name="render_content()">
+<%def name="render_content(simple=False)"><%
from galaxy import util
from galaxy.web.controllers.library_common import branch_deleted
from time import strftime
-
+
is_admin = trans.user_is_admin() and cntrller == 'library_admin'
-
+
if is_admin:
can_add = can_modify = can_manage = True
elif cntrller in [ 'library', 'requests' ]:
@@ -484,16 +489,16 @@
can_manage = trans.app.security_agent.can_manage_library_item( current_user_roles, library )
else:
can_add = can_modify = can_manage = False
-
+
info_association, inherited = library.get_info_association()
form_type = trans.model.FormDefinition.types.LIBRARY_INFO_TEMPLATE
-
+
self.has_accessible_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles )
root_folder_has_accessible_library_datasets = trans.app.security_agent.has_accessible_library_datasets( trans, library.root_folder, trans.user, current_user_roles, search_downward=False )
has_accessible_folders = is_admin or trans.app.security_agent.has_accessible_folders( trans, library.root_folder, trans.user, current_user_roles )
tracked_datasets = {}
-
+
class RowCounter( object ):
def __init__( self ):
self.count = 0
@@ -502,9 +507,9 @@
def __str__( self ):
return str( self.count )
%>
-
+
<h2>Data Library “${library.name}”</h2>
-
+
<ul class="manage-table-actions">
%if not library.deleted and ( is_admin or can_add ):
<li><a class="action-button" href="${h.url_for( controller='library_common', action='upload_library_dataset', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), folder_id=trans.security.encode_id( library.root_folder.id ), use_panels=use_panels, show_deleted=show_deleted )}">Add datasets</a></li>
@@ -547,18 +552,17 @@
</div>
%endif
</ul>
-
+
%if message:
${render_msg( message, status )}
%endif
-
- %if library.synopsis not in [ 'None', None ]:
+
+ %if library.synopsis not in [ '', 'None', None ]:
<div class="libraryItemBody">
${library.synopsis}
</div>
- <br/>
%endif
-
+
%if self.has_accessible_datasets:
<form name="act_on_multiple_datasets" action="${h.url_for( controller='library_common', action='act_on_multiple_datasets', cntrller=cntrller, library_id=trans.security.encode_id( library.id ), use_panels=use_panels, show_deleted=show_deleted )}" onSubmit="javascript:return checkForm();" method="post">
%endif
@@ -571,17 +575,19 @@
<input type="checkbox" id="checkAll" name=select_all_datasets_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_datasets_checkbox value="true"/>
%endif
Name
- </th>
- <th>Message</th>
- <th>Uploaded By</th>
+ </th>
+ % if not simple:
+ <th>Message</th>
+ <th>Uploaded By</th>
+ % endif
<th>Date</th><th>File Size</th></tr></thead><% row_counter = RowCounter() %>
%if cntrller in [ 'library', 'requests' ]:
- ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True )}
- %if not library.deleted and self.has_accessible_datasets:
+ ${self.render_folder( 'library', library.root_folder, 0, created_ldda_ids, library, hidden_folder_ids, tracked_datasets, show_deleted=show_deleted, parent=None, row_counter=row_counter, root_folder=True, simple=simple )}
+ %if not library.deleted and self.has_accessible_datasets and not simple:
${render_actions_on_multiple_items()}
%endif
%elif ( trans.user_is_admin() and cntrller in [ 'library_admin', 'requests_admin' ] ):
@@ -603,8 +609,8 @@
</script><!-- running: do not change this comment, used by TwillTestCase.library_wait -->
%endif
-
- %if self.has_accessible_datasets:
+
+ %if self.has_accessible_datasets and not simple:
${render_compression_types_help( comptypes )}
%endif
%if not has_accessible_folders:
--- a/templates/tool_form.mako Wed Aug 03 11:39:23 2011 -0400
+++ b/templates/tool_form.mako Wed Aug 03 13:54:28 2011 -0400
@@ -83,7 +83,7 @@
var ids = [];
counter = 1;
$('input[name=ldda_ids]:checked').each(function() {
- var name = $.trim( $(this).siblings("div").find("a").text() );
+ var name = $.trim( $(this).siblings("label").text() );
var id = $(this).val();
names.push( counter + ". " + name );
counter += 1;
--- a/templates/tracks/library_datasets_select_grid.mako Wed Aug 03 11:39:23 2011 -0400
+++ b/templates/tracks/library_datasets_select_grid.mako Wed Aug 03 13:54:28 2011 -0400
@@ -1,15 +1,13 @@
<%namespace file="/tracks/history_select_grid.mako" import="select_header" />
-##<%namespace file='/library/common/browse_library.mako' import="render_folder" /><%namespace file='/library/common/browse_library.mako' import="render_content, grid_javascripts" /><%def name="title()"><h2>History '${grid.get_current_item( trans, **kwargs ).name}'</h2></%def>
-
${select_header()}
${grid_javascripts()}
-${render_content()}
+${render_content(simple=True)}
<script type="text/javascript">
make_popup_menus();
</script>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Remove unspecified build validators from Cuffdiff wrapper; fixes #631
by Bitbucket 03 Aug '11
by Bitbucket 03 Aug '11
03 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/4212f675f95b/
changeset: 4212f675f95b
user: jgoecks
date: 2011-08-03 17:39:23
summary: Remove unspecified build validators from Cuffdiff wrapper; fixes #631
affected #: 1 file (265 bytes)
--- a/tools/ngs_rna/cuffdiff_wrapper.xml Wed Aug 03 10:13:01 2011 -0400
+++ b/tools/ngs_rna/cuffdiff_wrapper.xml Wed Aug 03 11:39:23 2011 -0400
@@ -78,19 +78,13 @@
<repeat name="groups" title="Group"><param name="group" title="Group name" type="text" label="Group name (no spaces or commas)"/><repeat name="files" title="Replicate">
- <param name="file" label="Add file" type="data" format="sam,bam">
- <validator type="unspecified_build" />
- </param>
+ <param name="file" label="Add file" type="data" format="sam,bam"/></repeat></repeat></when><when value="No">
- <param format="sam,bam" name="aligned_reads1" type="data" label="SAM or BAM file of aligned RNA-Seq reads" help="">
- <validator type="unspecified_build" />
- </param>
- <param format="sam,bam" name="aligned_reads2" type="data" label="SAM or BAM file of aligned RNA-Seq reads" help="">
- <validator type="unspecified_build" />
- </param>
+ <param format="sam,bam" name="aligned_reads1" type="data" label="SAM or BAM file of aligned RNA-Seq reads" help=""/>
+ <param format="sam,bam" name="aligned_reads2" type="data" label="SAM or BAM file of aligned RNA-Seq reads" help=""/></when></conditional>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Bugfix for the offline egg packaging script.
by Bitbucket 03 Aug '11
by Bitbucket 03 Aug '11
03 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/d2433ed56951/
changeset: d2433ed56951
user: natefoo
date: 2011-08-03 16:13:01
summary: Bugfix for the offline egg packaging script.
affected #: 1 file (608 bytes)
--- a/scripts/make_egg_packager.py Wed Aug 03 02:09:42 2011 -0400
+++ b/scripts/make_egg_packager.py Wed Aug 03 10:13:01 2011 -0400
@@ -1,6 +1,16 @@
#!/usr/bin/env python
import os, sys, logging, shutil
+from optparse import OptionParser
+
+parser = OptionParser()
+parser.add_option( '-c', '--config', dest='config', help='Path to Galaxy config file (universe_wsgi.ini)', default='universe_wsgi.ini' )
+parser.add_option( '-p', '--platform', dest='platform', help='Fetch for a specific platform (by default, eggs are fetched for *this* platform' )
+( options, args ) = parser.parse_args()
+
+if not os.path.exists( options.config ):
+ print "Config file does not exist (see 'python %s --help'): %s" % ( sys.argv[0], options.config )
+ sys.exit( 1 )
root = logging.getLogger()
root.setLevel( 10 )
@@ -13,12 +23,13 @@
import pkg_resources
try:
- platform = sys.argv[1]
- c = Crate( platform = platform )
+ assert options.platform
+ platform = options.platform
+ c = Crate( options.config, platform = platform )
print "Platform forced to '%s'" % platform
except:
platform = '-'.join( ( py, pkg_resources.get_platform() ) )
- c = Crate()
+ c = Crate( options.config )
print "Using Python interpreter at %s, Version %s" % ( sys.executable, sys.version )
print "This platform is '%s'" % platform
print "Override with:"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Workflows: Same fix as previous, but for rename_async...
by Bitbucket 03 Aug '11
by Bitbucket 03 Aug '11
03 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/f516ee9ce331/
changeset: f516ee9ce331
user: dannon
date: 2011-08-03 08:09:42
summary: Workflows: Same fix as previous, but for rename_async...
affected #: 1 file (112 bytes)
--- a/lib/galaxy/web/controllers/workflow.py Wed Aug 03 02:02:54 2011 -0400
+++ b/lib/galaxy/web/controllers/workflow.py Wed Aug 03 02:09:42 2011 -0400
@@ -400,7 +400,9 @@
def rename_async( self, trans, id, new_name=None, **kwargs ):
stored = self.get_stored_workflow( trans, id )
if new_name:
- stored.name = new_name
+ san_new_name = sanitize_html( new_name )
+ stored.name = san_new_name
+ stored.latest_workflow.name = san_new_name
trans.sa_session.flush()
return stored.name
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

03 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/70acce0591a9/
changeset: 70acce0591a9
user: dannon
date: 2011-08-03 08:02:54
summary: workflows: Fix for rename not updating latest_workflow in addition to the stored workflow. This fixes the error where workflow exports always had the *original* workflow name, regardless of renames.
affected #: 1 file (95 bytes)
--- a/lib/galaxy/web/controllers/workflow.py Wed Aug 03 00:34:27 2011 -0400
+++ b/lib/galaxy/web/controllers/workflow.py Wed Aug 03 02:02:54 2011 -0400
@@ -380,7 +380,9 @@
def rename( self, trans, id, new_name=None, **kwargs ):
stored = self.get_stored_workflow( trans, id )
if new_name is not None:
- stored.name = sanitize_html( new_name )
+ san_new_name = sanitize_html( new_name )
+ stored.name = san_new_name
+ stored.latest_workflow.name = san_new_name
trans.sa_session.flush()
# For current workflows grid:
trans.set_message ( "Workflow renamed to '%s'." % new_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new changesets in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/8024a7996c55/
changeset: 8024a7996c55
user: John Duddy
date: 2011-06-28 01:18:33
summary: Add ability for API to do things on another user's behalf
affected #: 2 files (1.1 KB)
--- a/lib/galaxy/web/framework/__init__.py Mon Jun 27 17:25:32 2011 -0400
+++ b/lib/galaxy/web/framework/__init__.py Mon Jun 27 16:18:33 2011 -0700
@@ -117,6 +117,23 @@
return error
trans.response.set_content_type( "application/json" )
trans.set_user( provided_key.user )
+# Perform api_run_as processing, possibly changing identity
+ if 'run_as' in kwargs:
+ if not trans.user_can_do_run_as():
+ error_message = 'User does not have permissions to run jobs as another user'
+ return error
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['run_as'] )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['run_as'] )
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.set_user(user)
+ except:
+ trans.response.status = 400
+ return "That user does not exist."
+
if trans.debug:
return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
else:
@@ -590,6 +607,9 @@
def user_is_admin( self ):
admin_users = self.app.config.get( "admin_users", "" ).split( "," )
return self.user and admin_users and self.user.email in admin_users
+ def user_can_do_run_as( self ):
+ run_as_users = self.app.config.get( "api_allow_run_as", "" ).split( "," )
+ return self.user and run_as_users and self.user.email in run_as_users
def get_toolbox(self):
"""Returns the application toolbox"""
return self.app.toolbox
--- a/universe_wsgi.ini.sample Mon Jun 27 17:25:32 2011 -0400
+++ b/universe_wsgi.ini.sample Mon Jun 27 16:18:33 2011 -0700
@@ -433,6 +433,10 @@
# Enable the (experimental! beta!) Web API. Documentation forthcoming.
#enable_api = False
+# Optional list of email addresses of API users who can make calls on behalf of
+# other users
+#api_allow_run_as = None
+
# Enable tool tags (associating tools with tags). This has its own option
# since its implementation has a few performance implications on startup for
# large servers.
http://bitbucket.org/galaxy/galaxy-central/changeset/2c7ddd0f8972/
changeset: 2c7ddd0f8972
user: John Duddy
date: 2011-07-29 21:39:29
summary: Promote run_as API users to admin if the API key user is an admin
affected #: 1 file (277 bytes)
--- a/lib/galaxy/web/framework/__init__.py Mon Jun 27 16:18:33 2011 -0700
+++ b/lib/galaxy/web/framework/__init__.py Fri Jul 29 12:39:29 2011 -0700
@@ -129,6 +129,7 @@
return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['run_as'] )
try:
user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
trans.set_user(user)
except:
trans.response.status = 400
@@ -145,13 +146,13 @@
def require_admin( func ):
def decorator( self, trans, *args, **kwargs ):
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- if not admin_users:
- return trans.show_error_message( "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration." )
- user = trans.get_user()
- if not user:
- return trans.show_error_message( "You must be logged in as an administrator to access this feature." )
- if not user.email in admin_users:
+ if not trans.user_is_admin():
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ if not admin_users:
+ return trans.show_error_message( "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration." )
+ user = trans.get_user()
+ if not user:
+ return trans.show_error_message( "You must be logged in as an administrator to access this feature." )
return trans.show_error_message( "You must be an administrator to access this feature." )
return func( self, trans, *args, **kwargs )
return decorator
@@ -214,6 +215,8 @@
# that the current history should not be used for parameter values
# and such).
self.workflow_building_mode = False
+ # Flag indicating whether this is an API call and the API key user is an administrator
+ self.api_inherit_admin = False
def setup_i18n( self ):
locales = []
if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
@@ -605,6 +608,8 @@
roles = []
return roles
def user_is_admin( self ):
+ if self.api_inherit_admin:
+ return True
admin_users = self.app.config.get( "admin_users", "" ).split( "," )
return self.user and admin_users and self.user.email in admin_users
def user_can_do_run_as( self ):
http://bitbucket.org/galaxy/galaxy-central/changeset/cde2d0c27d96/
changeset: cde2d0c27d96
user: dannon
date: 2011-08-03 06:34:27
summary: Merge API run_as changes from John Duddy.
affected #: 2 files (1.4 KB)
--- a/lib/galaxy/web/framework/__init__.py Tue Aug 02 16:05:55 2011 -0400
+++ b/lib/galaxy/web/framework/__init__.py Wed Aug 03 00:34:27 2011 -0400
@@ -117,6 +117,24 @@
return error
trans.response.set_content_type( "application/json" )
trans.set_user( provided_key.user )
+# Perform api_run_as processing, possibly changing identity
+ if 'run_as' in kwargs:
+ if not trans.user_can_do_run_as():
+ error_message = 'User does not have permissions to run jobs as another user'
+ return error
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['run_as'] )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['run_as'] )
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
+ trans.set_user(user)
+ except:
+ trans.response.status = 400
+ return "That user does not exist."
+
if trans.debug:
return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
else:
@@ -128,13 +146,13 @@
def require_admin( func ):
def decorator( self, trans, *args, **kwargs ):
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- if not admin_users:
- return trans.show_error_message( "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration." )
- user = trans.get_user()
- if not user:
- return trans.show_error_message( "You must be logged in as an administrator to access this feature." )
- if not user.email in admin_users:
+ if not trans.user_is_admin():
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ if not admin_users:
+ return trans.show_error_message( "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration." )
+ user = trans.get_user()
+ if not user:
+ return trans.show_error_message( "You must be logged in as an administrator to access this feature." )
return trans.show_error_message( "You must be an administrator to access this feature." )
return func( self, trans, *args, **kwargs )
return decorator
@@ -197,6 +215,8 @@
# that the current history should not be used for parameter values
# and such).
self.workflow_building_mode = False
+ # Flag indicating whether this is an API call and the API key user is an administrator
+ self.api_inherit_admin = False
def setup_i18n( self ):
locales = []
if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
@@ -593,8 +613,13 @@
roles = []
return roles
def user_is_admin( self ):
+ if self.api_inherit_admin:
+ return True
admin_users = self.app.config.get( "admin_users", "" ).split( "," )
return self.user and admin_users and self.user.email in admin_users
+ def user_can_do_run_as( self ):
+ run_as_users = self.app.config.get( "api_allow_run_as", "" ).split( "," )
+ return self.user and run_as_users and self.user.email in run_as_users
def get_toolbox(self):
"""Returns the application toolbox"""
return self.app.toolbox
--- a/universe_wsgi.ini.sample Tue Aug 02 16:05:55 2011 -0400
+++ b/universe_wsgi.ini.sample Wed Aug 03 00:34:27 2011 -0400
@@ -433,6 +433,10 @@
# Enable the (experimental! beta!) Web API. Documentation forthcoming.
#enable_api = False
+# Optional list of email addresses of API users who can make calls on behalf of
+# other users
+#api_allow_run_as = None
+
# Enable tool tags (associating tools with tags). This has its own option
# since its implementation has a few performance implications on startup for
# large servers.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Update GMAJ: fixes a ClassCastException that was encountered on Macs running Java 1.6.
by Bitbucket 02 Aug '11
by Bitbucket 02 Aug '11
02 Aug '11
1 new changeset in galaxy-central:
http://bitbucket.org/galaxy/galaxy-central/changeset/b4b6bfe0032c/
changeset: b4b6bfe0032c
user: dan
date: 2011-08-02 22:05:55
summary: Update GMAJ: fixes a ClassCastException that was encountered on Macs running Java 1.6.
affected #: 1 file (59.3 KB)
Binary file static/gmaj/gmaj.jar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0