galaxy-commits
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions

commit/galaxy-central: dan: Fix two more places where filenames in content-dispositions were not being surrounded by quotes.
by Bitbucket 12 Apr '12
by Bitbucket 12 Apr '12
12 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/de28eda68d0c/
changeset: de28eda68d0c
user: dan
date: 2012-04-12 20:14:45
summary: Fix two more places where filenames in content-dispositions were not being surrounded by quotes.
affected #: 2 files
diff -r 84d49e39069d965a45097ade1b71a57cdc0a0386 -r de28eda68d0c40f1a71c5ebf95e06cb816dc890b lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py
+++ b/lib/galaxy/web/controllers/root.py
@@ -250,7 +250,7 @@
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = data.name
fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
- trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, toext)
+ trans.response.headers["Content-Disposition"] = 'attachment; filename="GalaxyHistoryItem-%s-[%s]%s"' % (data.hid, fname, toext)
trans.log_event( "Display dataset id: %s" % str(id) )
try:
return open( data.file_name )
diff -r 84d49e39069d965a45097ade1b71a57cdc0a0386 -r de28eda68d0c40f1a71c5ebf95e06cb816dc890b lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -1109,7 +1109,7 @@
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
sname = stored.name
sname = ''.join(c in valid_chars and c or '_' for c in sname)[0:150]
- trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy-Workflow-%s.ga" % ( sname )
+ trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy-Workflow-%s.ga"' % ( sname )
trans.response.set_content_type( 'application/galaxy-archive' )
return stored_dict
@web.expose
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dan: Minor fix for clicking an OpenID provider to refresh credentials.
by Bitbucket 12 Apr '12
by Bitbucket 12 Apr '12
12 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/84d49e39069d/
changeset: 84d49e39069d
user: dan
date: 2012-04-12 18:16:24
summary: Minor fix for clicking an OpenID provider to refresh credentials.
affected #: 1 file
diff -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 -r 84d49e39069d965a45097ade1b71a57cdc0a0386 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -29,7 +29,7 @@
default_filter = { "openid" : "All" }
default_sort_key = "-create_time"
columns = [
- grids.TextColumn( "OpenID URL", key="openid", link=( lambda x: dict( operation='openid_auth', login_button="Login", openid_url=x.openid if not x.provider else '', openid_provider=x.provider, auto_associate=True ) ) ),
+ grids.TextColumn( "OpenID URL", key="openid", link=( lambda x: dict( action='openid_auth', login_button="Login", openid_url=x.openid if not x.provider else '', openid_provider=x.provider, auto_associate=True ) ) ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
]
operations = [
@@ -395,8 +395,6 @@
action='openid_disassociate',
use_panels=use_panels,
id=kwd['id'] ) )
- elif operation == 'openid_auth':
- return trans.response.send_redirect( url_for( controller='user', action='openid_auth', **kwd ) )
kwd['redirect'] = kwd.get( 'redirect', url_for( controller='user', action='openid_manage', use_panels=True ) )
kwd['openid_providers'] = trans.app.openid_providers
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d88a9fa7041c/
changeset: d88a9fa7041c
user: dan
date: 2012-04-12 17:32:01
summary: Rework OpenID process.
Add a never_associate_with_user flag to OpenID providers that will prevent an OpenID Provider from being able to be used for logging in to Galaxy. Post authentication actions will still be performed. This is now used for the GenomeSpace OpenID Provider until it is working correctly.
affected #: 10 files
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -76,6 +76,7 @@
Column( "session_id", Integer, ForeignKey( "galaxy_session.id" ), index=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
Column( "openid", TEXT, index=True, unique=True ),
+ Column( "provider", TrimmedString( 255 ) ),
)
History.table = Table( "history", metadata,
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 lib/galaxy/model/migrate/versions/0096_openid_provider.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0096_openid_provider.py
@@ -0,0 +1,45 @@
+"""
+Migration script to add column to openid table for provider.
+Remove any OpenID entries with nonunique GenomeSpace Identifier
+"""
+
+BAD_IDENTIFIER = 'https://identity.genomespace.org/identityServer/xrd.jsp'
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+from galaxy.model.custom_types import TrimmedString
+
+import logging
+log = logging.getLogger( __name__ )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+
+ try:
+ OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True )
+ c = Column( "provider", TrimmedString( 255 ) )
+ c.create( OpenID_table )
+ assert c is OpenID_table.c.provider
+ except Exception, e:
+ print "Adding provider column to galaxy_user_openid table failed: %s" % str( e )
+ log.debug( "Adding provider column to galaxy_user_openid table failed: %s" % str( e ) )
+
+ try:
+ cmd = "DELETE FROM galaxy_user_openid WHERE openid='%s'" % ( BAD_IDENTIFIER )
+ db_session.execute( cmd )
+ except Exception, e:
+ log.debug( "Deleting bad Identifiers from galaxy_user_openid failed: %s" % str( e ) )
+
+def downgrade():
+ metadata.reflect()
+ try:
+ OpenID_table = Table( "galaxy_user_openid", metadata, autoload=True )
+ OpenID_table.c.provider.drop()
+ except Exception, e:
+ print "Dropping provider column from galaxy_user_openid table failed: %s" % str( e )
+ log.debug( "Dropping provider column from galaxy_user_openid table failed: %s" % str( e ) )
\ No newline at end of file
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 lib/galaxy/openid/providers.py
--- a/lib/galaxy/openid/providers.py
+++ b/lib/galaxy/openid/providers.py
@@ -9,6 +9,9 @@
log = logging.getLogger( __name__ )
+NO_PROVIDER_ID = 'None'
+RESERVED_PROVIDER_IDS = [ NO_PROVIDER_ID ]
+
class OpenIDProvider( object ):
'''An OpenID Provider object.'''
@classmethod
@@ -22,7 +25,9 @@
op_endpoint_url = provider_elem.find( 'op_endpoint_url' )
if op_endpoint_url is not None:
op_endpoint_url = op_endpoint_url.text
+ never_associate_with_user = string_as_bool( provider_elem.get( 'never_associate_with_user', 'False' ) )
assert (provider_id and provider_name and op_endpoint_url), Exception( "OpenID Provider improperly configured" )
+ assert provider_id not in RESERVED_PROVIDER_IDS, Exception( 'Specified OpenID Provider uses a reserved id: %s' % ( provider_id ) )
sreg_required = []
sreg_optional = []
use_for = {}
@@ -45,8 +50,8 @@
sreg_required = None
sreg_optional = None
use_for = None
- return cls( provider_id, provider_name, op_endpoint_url, sreg_required, sreg_optional, use_for, store_user_preference )
- def __init__( self, id, name, op_endpoint_url, sreg_required=None, sreg_optional=None, use_for=None, store_user_preference=None ):
+ return cls( provider_id, provider_name, op_endpoint_url, sreg_required=sreg_required, sreg_optional=sreg_optional, use_for=use_for, store_user_preference=store_user_preference, never_associate_with_user=never_associate_with_user )
+ def __init__( self, id, name, op_endpoint_url, sreg_required=None, sreg_optional=None, use_for=None, store_user_preference=None, never_associate_with_user=None ):
'''When sreg options are not specified, defaults are used.'''
self.id = id
self.name = name
@@ -71,6 +76,10 @@
self.store_user_preference = store_user_preference
else:
self.store_user_preference = {}
+ if never_associate_with_user:
+ self.never_associate_with_user = True
+ else:
+ self.never_associate_with_user = False
def post_authentication( self, trans, openid_manager, info ):
sreg_attributes = openid_manager.get_sreg( info )
for store_pref_name, store_pref_value_name in self.store_user_preference.iteritems():
@@ -80,9 +89,12 @@
raise Exception( 'Only sreg is currently supported.' )
trans.sa_session.add( trans.user )
trans.sa_session.flush()
+ def has_post_authentication_actions( self ):
+ return bool( self.store_user_preference )
class OpenIDProviders( object ):
'''Collection of OpenID Providers'''
+ NO_PROVIDER_ID = NO_PROVIDER_ID
@classmethod
def from_file( cls, filename ):
try:
@@ -107,6 +119,7 @@
self.providers = providers
else:
self.providers = odict()
+ self._banned_identifiers = [ provider.op_endpoint_url for provider in self.providers.itervalues() if provider.never_associate_with_user ]
def __iter__( self ):
for provider in self.providers.itervalues():
yield provider
@@ -115,3 +128,5 @@
return self.providers[ name ]
else:
return default
+ def new_provider_from_identifier( self, identifier ):
+ return OpenIDProvider( None, identifier, identifier, never_associate_with_user = identifier in self._banned_identifiers )
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py
+++ b/lib/galaxy/web/controllers/tool_runner.py
@@ -59,7 +59,7 @@
trans.log_event( "Tool id '%s' does not exist" % tool_id )
return "Tool '%s' does not exist, kwd=%s " % (tool_id, kwd)
if tool.require_login and not trans.user:
- return trans.response.send_redirect( url_for( controller='user', action='login', cntrller='user', message="You must be logged in to use this tool.", status="info", referer=url_for( controller='/tool_runner', action='index', tool_id=tool_id, **kwd ) ) )
+ return trans.response.send_redirect( url_for( controller='user', action='login', cntrller='user', message="You must be logged in to use this tool.", status="info", redirect=url_for( controller='/tool_runner', action='index', tool_id=tool_id, **kwd ) ) )
params = util.Params( kwd, sanitize = False ) #Sanitize parameters when substituting into command line via input wrappers
#do param translation here, used by datasource tools
if tool.input_translator:
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py
+++ b/lib/galaxy/web/controllers/user.py
@@ -11,7 +11,6 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.web.framework.helpers import iff
from galaxy.security.validate_user_input import validate_email, validate_publicname, validate_password
-from galaxy.openid.providers import OpenIDProvider
log = logging.getLogger( __name__ )
@@ -30,7 +29,7 @@
default_filter = { "openid" : "All" }
default_sort_key = "-create_time"
columns = [
- grids.TextColumn( "OpenID URL", key="openid" ),
+ grids.TextColumn( "OpenID URL", key="openid", link=( lambda x: dict( operation='openid_auth', login_button="Login", openid_url=x.openid if not x.provider else '', openid_provider=x.provider, auto_associate=True ) ) ),
grids.GridColumn( "Created", key="create_time", format=time_ago ),
]
operations = [
@@ -48,32 +47,30 @@
return trans.fill_template( '/user/index.mako', cntrller=cntrller, webapp=webapp )
@web.expose
def openid_auth( self, trans, webapp='galaxy', **kwd ):
+ '''Handles user request to access an OpenID provider'''
if not trans.app.config.enable_openid:
return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
message = 'Unspecified failure authenticating via OpenID'
status = kwd.get( 'status', 'done' )
openid_url = kwd.get( 'openid_url', '' )
openid_provider = kwd.get( 'openid_provider', '' )
- referer = kwd.get( 'referer', trans.request.referer )
+ if not openid_provider or openid_url:
+ openid_provider = trans.app.openid_providers.NO_PROVIDER_ID #empty fields cause validation errors
+ redirect = kwd.get( 'redirect', '' )
auto_associate = util.string_as_bool( kwd.get( 'auto_associate', False ) )
use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
action = 'login'
- if auto_associate:
- action = 'openid_manage'
- if not referer:
- referer = url_for( '/' )
+ if not redirect:
+ redirect = url_for( '/' )
consumer = trans.app.openid_manager.get_consumer( trans )
- openid_provider_obj = None
- if not openid_url and openid_provider and trans.app.openid_providers.get( openid_provider ):
+ if openid_url:
+ openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( openid_url )
+ else:
openid_provider_obj = trans.app.openid_providers.get( openid_provider )
- elif openid_url:
- openid_provider_obj = OpenIDProvider( openid_url, openid_url, openid_url ) #for manually entered links use the link for id, name and url
- elif openid_provider:
- message = 'Invalid OpenID provider specified: %s' % ( openid_provider )
- else:
+ if not openid_url and openid_provider == trans.app.openid_providers.NO_PROVIDER_ID:
message = 'An OpenID provider was not specified'
- process_url = trans.request.base.rstrip( '/' ) + url_for( controller='user', action='openid_process', referer=referer, auto_associate=auto_associate, openid_provider=openid_provider )
- if openid_provider_obj is not None:
+ elif openid_provider_obj:
+ process_url = trans.request.base.rstrip( '/' ) + url_for( controller='user', action='openid_process', redirect=redirect, openid_provider=openid_provider, auto_associate=auto_associate )
request = None
try:
request = consumer.begin( openid_provider_obj.op_endpoint_url )
@@ -87,84 +84,96 @@
redirect_url = request.redirectURL(
trans.request.base, process_url )
trans.app.openid_manager.persist_session( trans, consumer )
- trans.response.send_redirect( redirect_url )
- return
+ return trans.response.send_redirect( redirect_url )
else:
form = request.htmlMarkup( trans.request.base, process_url, form_tag_attrs={'id':'openid_message','target':'_top'} )
trans.app.openid_manager.persist_session( trans, consumer )
return form
return trans.response.send_redirect( url_for( controller='user',
action=action,
+ redirect=redirect,
use_panels=use_panels,
message=message,
status='error' ) )
@web.expose
def openid_process( self, trans, webapp='galaxy', **kwd ):
+ '''Handle's response from OpenID Providers'''
if not trans.app.config.enable_openid:
return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
auto_associate = util.string_as_bool( kwd.get( 'auto_associate', False ) )
action = 'login'
- if auto_associate:
+ if trans.user:
action = 'openid_manage'
if trans.app.config.support_url is not None:
contact = '<a href="%s">support</a>' % trans.app.config.support_url
else:
contact = 'support'
- message = 'Verification failed for an unknown reason. Please contact support for assistance.'
+ message = 'Verification failed for an unknown reason. Please contact %s for assistance.' % ( contact )
status = 'error'
consumer = trans.app.openid_manager.get_consumer( trans )
info = consumer.complete( kwd, trans.request.url )
display_identifier = info.getDisplayIdentifier()
- redirect_url = kwd.get( 'referer', url_for( '/' ) )
- openid_provider = kwd.get( 'openid_provider', '' )
+ redirect = kwd.get( 'redirect', url_for( '/' ) )
+ openid_provider = kwd.get( 'openid_provider', None )
if info.status == trans.app.openid_manager.FAILURE and display_identifier:
message = "Login via OpenID failed. The technical reason for this follows, please include this message in your email if you need to %s to resolve this problem: %s" % ( contact, info.message )
return trans.response.send_redirect( url_for( controller='user',
action=action,
use_panels=True,
+ redirect=redirect,
message=message,
status='error' ) )
elif info.status == trans.app.openid_manager.SUCCESS:
if info.endpoint.canonicalID:
display_identifier = info.endpoint.canonicalID
+ openid_provider_obj = trans.app.openid_providers.get( openid_provider )
user_openid = trans.sa_session.query( trans.app.model.UserOpenID ).filter( trans.app.model.UserOpenID.table.c.openid == display_identifier ).first()
- openid_provider_obj = trans.app.openid_providers.get( openid_provider )
+ if not openid_provider_obj and user_openid and user_openid.provider:
+ openid_provider_obj = trans.app.openid_providers.get( user_openid.provider )
if not openid_provider_obj:
- openid_provider_obj = OpenIDProvider( display_identifier, display_identifier, display_identifier )
+ openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( display_identifier )
if not user_openid:
user_openid = trans.app.model.UserOpenID( session=trans.galaxy_session, openid=display_identifier )
- elif not user_openid.user and user_openid.session.id != trans.galaxy_session.id:
+ if not user_openid.user:
user_openid.session = trans.galaxy_session
- elif user_openid.user and not auto_associate:
+ if not user_openid.provider and openid_provider:
+ user_openid.provider = openid_provider
+ if trans.user:
+ if user_openid.user and user_openid.user.id != trans.user.id:
+ message = "The OpenID <strong>%s</strong> is already associated with another Galaxy account, <strong>%s</strong>. Please disassociate it from that account before attempting to associate it with a new account." % ( display_identifier, user_openid.user.email )
+ status = "error"
+ elif not user_openid.user or user_openid.user == trans.user:
+ if openid_provider_obj.id:
+ user_openid.provider = openid_provider_obj.id
+ user_openid.session = trans.galaxy_session
+ if not openid_provider_obj.never_associate_with_user:
+ if not auto_associate and ( user_openid.user and user_openid.user.id == trans.user.id ):
+ message = "The OpenID <strong>%s</strong> is already associated with your Galaxy account, <strong>%s</strong>." % ( display_identifier, trans.user.email )
+ status = "warning"
+ else:
+ message = "The OpenID <strong>%s</strong> has been associated with your Galaxy account, <strong>%s</strong>." % ( display_identifier, trans.user.email )
+ status = "done"
+ user_openid.user = trans.user
+ trans.sa_session.add( user_openid )
+ trans.sa_session.flush()
+ trans.log_event( "User associated OpenID: %s" % display_identifier )
+ else:
+ message = "The OpenID <strong>%s</strong> cannot be used to log into your Galaxy account, but any post authentication actions have been performed." % ( openid_provider_obj.name )
+ status ="info"
+ openid_provider_obj.post_authentication( trans, trans.app.openid_manager, info )
+ if redirect:
+ message = '%s<br>Click <a href="%s"><strong>here</strong></a> to return to the page you were previously viewing.' % ( message, redirect )
+ return trans.response.send_redirect( url_for( controller='user',
+ action='openid_manage',
+ use_panels=True,
+ redirect=redirect,
+ message=message,
+ status=status ) )
+ elif user_openid.user:
trans.handle_user_login( user_openid.user, webapp )
trans.log_event( "User logged in via OpenID: %s" % display_identifier )
openid_provider_obj.post_authentication( trans, trans.app.openid_manager, info )
- trans.response.send_redirect( redirect_url )
- return
- if auto_associate and trans.user:
- # The user is already logged in and requested association from
- # the user prefs as opposed to using the OpenID form on the
- # login page.
- if user_openid.user and user_openid.user.id != trans.user.id:
- message = "The OpenID <strong>%s</strong> is already associated with another Galaxy account, <strong>%s</strong>. Please disassociate it from that account before attempting to associate it with a new account." % ( display_identifier, user_openid.user.email )
- status = "error"
- elif user_openid.user and user_openid.user.id == trans.user.id:
- message = "The OpenID <strong>%s</strong> is already associated with your Galaxy account, <strong>%s</strong>." % ( display_identifier, trans.user.email )
- status = "warning"
- else:
- user_openid.user_id = trans.user.id
- trans.sa_session.add( user_openid )
- trans.sa_session.flush()
- trans.log_event( "User associated OpenID: %s" % display_identifier )
- message = "The OpenID <strong>%s</strong> has been associated with your Galaxy account, <strong>%s</strong>." % ( display_identifier, trans.user.email )
- status = "done"
- openid_provider_obj.post_authentication( trans, trans.app.openid_manager, info )
- trans.response.send_redirect( url_for( controller='user',
- action='openid_manage',
- use_panels=True,
- message=message,
- status=status ) )
- return
+ return trans.response.send_redirect( redirect )
trans.sa_session.add( user_openid )
trans.sa_session.flush()
message = "OpenID authentication was successful, but you need to associate your OpenID with a Galaxy account."
@@ -179,10 +188,11 @@
email = sreg_resp.get( sreg_email_name, '' )
except AttributeError:
email = ''
- trans.response.send_redirect( url_for( controller='user',
+ #OpenID success, but user not logged in, and not previously associated
+ return trans.response.send_redirect( url_for( controller='user',
action='openid_associate',
- openid_provider=openid_provider,
use_panels=True,
+ redirect=redirect,
username=username,
email=email,
message=message,
@@ -198,10 +208,12 @@
return trans.response.send_redirect( url_for( controller='user',
action=action,
use_panels=True,
+ redirect=redirect,
message=message,
status=status ) )
@web.expose
def openid_associate( self, trans, cntrller='user', webapp='galaxy', **kwd ):
+ '''Associates a user with an OpenID log in'''
if not trans.app.config.enable_openid:
return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
@@ -209,9 +221,7 @@
status = kwd.get( 'status', 'done' )
email = kwd.get( 'email', '' )
username = kwd.get( 'username', '' )
- referer = kwd.get( 'referer', trans.request.referer )
- openid_provider = kwd.get( 'openid_provider', '' )
- openid_provider_obj = trans.app.openid_providers.get( openid_provider )
+ redirect = kwd.get( 'redirect', '' )
params = util.Params( kwd )
is_admin = cntrller == 'admin' and trans.user_is_admin()
openids = trans.galaxy_session.openids
@@ -223,18 +233,33 @@
if kwd.get( 'login_button', False ):
message, status, user, success = self.__validate_login( trans, webapp, **kwd )
if success:
+ openid_objs = []
for openid in openids:
- openid.user = user
- trans.sa_session.add( openid )
+ openid_provider_obj = trans.app.openid_providers.get( openid.provider )
+ if not openid_provider_obj or not openid_provider_obj.never_associate_with_user:
+ openid.user = user
+ trans.sa_session.add( openid )
+ trans.log_event( "User associated OpenID: %s" % openid.openid )
+ if openid_provider_obj and openid_provider_obj.has_post_authentication_actions():
+ openid_objs.append( openid_provider_obj )
trans.sa_session.flush()
- for openid in openids:
- trans.log_event( "User associated OpenID: %s" % openid.openid )
- redirect_url = referer
- if not redirect_url:
- redirect_url = url_for( '/' )
- if openid_provider_obj:
- return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_provider, referer=redirect_url ) )
- return trans.response.send_redirect( redirect_url )
+ if len( openid_objs ) == 1:
+ return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_objs[0].id, redirect=redirect, auto_associate=True ) )
+ elif openid_objs:
+ message = 'You have authenticated with several OpenID providers, please click the following links to execute the post authentication actions. '
+ message = "%s<br/><ul>" % ( message )
+ for openid in openid_objs:
+ message = '%s<li><a href="%s" target="_blank">%s</a></li>' % ( message, url_for( controller='user', action='openid_auth', openid_provider=openid.id, redirect=redirect, auto_associate=True ), openid.name )
+ message = "%s</ul>" % ( message )
+ return trans.response.send_redirect( url_for( controller='user',
+ action='openid_manage',
+ use_panels=True,
+ redirect=redirect,
+ message=message,
+ status='info' ) )
+ if not redirect:
+ redirect = url_for( '/' )
+ return trans.response.send_redirect( redirect )
if kwd.get( 'create_user_button', False ):
password = kwd.get( 'password', '' )
confirm = kwd.get( 'confirm', '' )
@@ -253,21 +278,35 @@
subscribe_checked,
**kwd )
if success:
- trans.handle_user_login( user, webapp )
- trans.log_event( "User created a new account" )
- trans.log_event( "User logged in" )
+ openid_objs = []
for openid in openids:
- openid.user = user
- trans.sa_session.add( openid )
+ openid_provider_obj = trans.app.openid_providers.get( openid.provider )
+ if not openid_provider_obj:
+ openid_provider_obj = trans.app.openid_providers.new_provider_from_identifier( openid.identifier )
+ if not openid_provider_obj.never_associate_with_user:
+ openid.user = user
+ trans.sa_session.add( openid )
+ trans.log_event( "User associated OpenID: %s" % openid.openid )
+ if openid_provider_obj.has_post_authentication_actions():
+ openid_objs.append( openid_provider_obj )
trans.sa_session.flush()
- for openid in openids:
- trans.log_event( "User associated OpenID: %s" % openid.openid )
- redirect_url = referer
- if not redirect_url:
- redirect_url = url_for( '/' )
- if openid_provider_obj:
- return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_provider, referer=redirect_url ) )
- return trans.response.send_redirect( redirect_url )
+ if len( openid_objs ) == 1:
+ return trans.response.send_redirect( url_for( controller='user', action='openid_auth', openid_provider=openid_objs[0].id, redirect=redirect, auto_associate=True ) )
+ elif openid_objs:
+ message = 'You have authenticated with several OpenID providers, please click the following links to execute the post authentication actions. '
+ message = "%s<br/><ul>" % ( message )
+ for openid in openid_objs:
+ message = '%s<li><a href="%s" target="_blank">%s</a></li>' % ( message, url_for( controller='user', action='openid_auth', openid_provider=openid.id, redirect=redirect, auto_associate=True ), openid.name )
+ message = "%s</ul>" % ( message )
+ return trans.response.send_redirect( url_for( controller='user',
+ action='openid_manage',
+ use_panels=True,
+ redirect=redirect,
+ message=message,
+ status='info' ) )
+ if not redirect:
+ redirect = url_for( '/' )
+ return trans.response.send_redirect( redirect )
else:
message = error
status = 'error'
@@ -291,8 +330,7 @@
username=username,
header='',
use_panels=use_panels,
- redirect_url='',
- referer='',
+ redirect=redirect,
refresh_frames=[],
message=message,
status=status,
@@ -301,11 +339,11 @@
user_type_fd_id_select_field=user_type_fd_id_select_field,
user_type_form_definition=user_type_form_definition,
widgets=widgets,
- openids=openids,
- openid_provider=openid_provider )
+ openids=openids )
@web.expose
@web.require_login( 'manage OpenIDs' )
def openid_disassociate( self, trans, webapp='galaxy', **kwd ):
+ '''Disassociates a user with an OpenID'''
if not trans.app.config.enable_openid:
return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
params = util.Params( kwd )
@@ -338,7 +376,7 @@
trans.log_event( "User disassociated OpenID: %s" % deleted_url )
message = '%s OpenIDs were disassociated from your Galaxy account.' % len( ids )
status = 'done'
- trans.response.send_redirect( url_for( controller='user',
+ return trans.response.send_redirect( url_for( controller='user',
action='openid_manage',
use_panels=use_panels,
message=message,
@@ -346,29 +384,33 @@
@web.expose
@web.require_login( 'manage OpenIDs' )
def openid_manage( self, trans, webapp='galaxy', **kwd ):
+ '''Manage OpenIDs for user'''
if not trans.app.config.enable_openid:
return trans.show_error_message( 'OpenID authentication is not enabled in this instance of Galaxy' )
use_panels = kwd.get( 'use_panels', False )
if 'operation' in kwd:
operation = kwd['operation'].lower()
if operation == "delete":
- trans.response.send_redirect( url_for( controller='user',
+ return trans.response.send_redirect( url_for( controller='user',
action='openid_disassociate',
use_panels=use_panels,
id=kwd['id'] ) )
- kwd['referer'] = url_for( controller='user', action='openid_manage', use_panels=True )
+ elif operation == 'openid_auth':
+ return trans.response.send_redirect( url_for( controller='user', action='openid_auth', **kwd ) )
+
+ kwd['redirect'] = kwd.get( 'redirect', url_for( controller='user', action='openid_manage', use_panels=True ) )
kwd['openid_providers'] = trans.app.openid_providers
return self.user_openid_grid( trans, **kwd )
@web.expose
def login( self, trans, webapp='galaxy', redirect_url='', refresh_frames=[], **kwd ):
- referer = kwd.get( 'referer', trans.request.referer )
+ '''Handle Galaxy Log in'''
+ redirect = kwd.get( 'redirect', trans.request.referer )
use_panels = util.string_as_bool( kwd.get( 'use_panels', False ) )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
header = ''
user = None
email = kwd.get( 'email', '' )
- openid_provider = kwd.get( 'openid_provider', '' )
if kwd.get( 'login_button', False ):
if webapp == 'galaxy' and not refresh_frames:
if trans.app.config.require_login:
@@ -376,8 +418,8 @@
else:
refresh_frames = [ 'masthead', 'history' ]
message, status, user, success = self.__validate_login( trans, webapp, **kwd )
- if success and referer and not referer.startswith( trans.request.base + url_for( controller='user', action='logout' ) ):
- redirect_url = referer
+ if success and redirect and not redirect.startswith( trans.request.base + url_for( controller='user', action='logout' ) ):
+ redirect_url = redirect
elif success:
redirect_url = url_for( '/' )
if not user and trans.app.config.require_login:
@@ -399,7 +441,7 @@
header=header,
use_panels=use_panels,
redirect_url=redirect_url,
- referer=referer,
+ redirect=redirect,
refresh_frames=refresh_frames,
message=message,
status=status,
@@ -410,7 +452,7 @@
status = kwd.get( 'status', 'done' )
email = kwd.get( 'email', '' )
password = kwd.get( 'password', '' )
- referer = kwd.get( 'referer', trans.request.referer )
+ redirect = kwd.get( 'redirect', trans.request.referer )
success = False
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first()
if not user:
@@ -430,7 +472,7 @@
if webapp == 'galaxy':
trans.log_event( "User logged in" )
message = 'You are now logged in as %s.<br>You can <a target="_top" href="%s">go back to the page you were visiting</a> or <a target="_top" href="%s">go to the home page</a>.' % \
- ( user.email, referer, url_for( '/' ) )
+ ( user.email, redirect, url_for( '/' ) )
if trans.app.config.require_login:
message += ' <a target="_top" href="%s">Click here</a> to continue to the home page.' % web.url_for( '/static/welcome.html' )
success = True
@@ -470,7 +512,7 @@
username = util.restore_text( params.get( 'username', '' ) )
subscribe = params.get( 'subscribe', '' )
subscribe_checked = CheckboxField.is_checked( subscribe )
- referer = kwd.get( 'referer', trans.request.referer )
+ redirect = kwd.get( 'redirect', trans.request.referer )
is_admin = cntrller == 'admin' and trans.user_is_admin
if not trans.app.config.allow_user_creation and not trans.user_is_admin():
message = 'User registration is disabled. Please contact your Galaxy administrator for an account.'
@@ -534,7 +576,7 @@
widgets=widgets,
webapp=webapp,
use_panels=use_panels,
- referer=referer,
+ redirect=redirect,
redirect_url=redirect_url,
refresh_frames=refresh_frames,
message=message,
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 openid/genomespace.xml
--- a/openid/genomespace.xml
+++ b/openid/genomespace.xml
@@ -1,5 +1,5 @@
<?xml version="1.0"?>
-<provider id="genomespace" name="GenomeSpace">
+<provider id="genomespace" name="GenomeSpace" never_associate_with_user="True"><op_endpoint_url>https://identity.genomespace.org/identityServer/xrd.jsp</op_endpoint_url><sreg><field name="nickname" required="True">
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 templates/user/login.mako
--- a/templates/user/login.mako
+++ b/templates/user/login.mako
@@ -50,7 +50,7 @@
%if trans.app.config.enable_openid:
<br/>
- ${render_openid_form( referer, False, openid_providers )}
+ ${render_openid_form( redirect, False, openid_providers )}
%endif
%endif
@@ -59,7 +59,7 @@
</%def>
-<%def name="render_login_form( form_action=None, openid_provider='' )">
+<%def name="render_login_form( form_action=None )"><%
if form_action is None:
@@ -76,8 +76,7 @@
<label>Email address:</label><input type="text" name="email" value="${email}" size="40"/><input type="hidden" name="webapp" value="${webapp}" size="40"/>
- <input type="hidden" name="referer" value="${referer}" size="40"/>
- <input type="hidden" name="openid_provider" value="${openid_provider}" />
+ <input type="hidden" name="redirect" value="${redirect}" size="40"/></div><div class="form-row"><label>Password:</label>
@@ -94,7 +93,7 @@
</%def>
-<%def name="render_openid_form( referer, auto_associate, openid_providers )">
+<%def name="render_openid_form( redirect, auto_associate, openid_providers )"><div class="toolForm"><div class="toolFormTitle">OpenID Login</div><form name="openid" id="openid" action="${h.url_for( controller='user', action='openid_auth' )}" method="post" target="_parent" >
@@ -102,8 +101,7 @@
<label>OpenID URL:</label><input type="text" name="openid_url" size="60" style="background-image:url('${h.url_for( '/static/images/openid-16x16.gif' )}' ); background-repeat: no-repeat; padding-right: 20px; background-position: 99% 50%;"/><input type="hidden" name="webapp" value="${webapp}" size="40"/>
- <input type="hidden" name="referer" value="${referer}" size="40"/>
- <input type="hidden" name="auto_associate" value="${auto_associate}" size="40"/>
+ <input type="hidden" name="redirect" value="${redirect}" size="40"/></div><div class="form-row">
Or, authenticate with your <select name="openid_provider">
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 templates/user/openid_associate.mako
--- a/templates/user/openid_associate.mako
+++ b/templates/user/openid_associate.mako
@@ -65,11 +65,11 @@
<% form_action = h.url_for( cntrller=cntrller, use_panels=use_panels ) %>
- ${render_login_form( form_action=form_action, openid_provider=openid_provider )}
+ ${render_login_form( form_action=form_action )}
<br/>
- ${render_registration_form( form_action=form_action, openid_provider=openid_provider )}
+ ${render_registration_form( form_action=form_action )}
</div></div>
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 templates/user/openid_manage.mako
--- a/templates/user/openid_manage.mako
+++ b/templates/user/openid_manage.mako
@@ -7,7 +7,7 @@
<%def name="grid_body( grid )">
${make_grid( grid )}
<h2>Associate more OpenIDs</h2>
- ${render_openid_form( kwargs['referer'], True, kwargs['openid_providers'] )}
+ ${render_openid_form( kwargs['redirect'], True, kwargs['openid_providers'] )}
</%def><%def name="center_panel()">
diff -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf -r d88a9fa7041c02f8c448eecb2a86c93b5c47d6a0 templates/user/register.mako
--- a/templates/user/register.mako
+++ b/templates/user/register.mako
@@ -21,7 +21,7 @@
${render_registration_form()}
%endif
-<%def name="render_registration_form( form_action=None, openid_provider='' )">
+<%def name="render_registration_form( form_action=None )"><%
if form_action is None:
@@ -37,8 +37,7 @@
<label>Email address:</label><input type="text" name="email" value="${email}" size="40"/><input type="hidden" name="webapp" value="${webapp}" size="40"/>
- <input type="hidden" name="referer" value="${referer}" size="40"/>
- <input type="hidden" name="openid_provider" value="${openid_provider}" />
+ <input type="hidden" name="redirect" value="${redirect}" size="40"/></div><div class="form-row"><label>Password:</label>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Remove enable_api flag; API is now enabled by default and cannot be disabled.
by Bitbucket 11 Apr '12
by Bitbucket 11 Apr '12
11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8376ad08ae41/
changeset: 8376ad08ae41
user: jgoecks
date: 2012-04-11 20:45:44
summary: Remove enable_api flag; API is now enabled by default and cannot be disabled.
affected #: 9 files
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -42,8 +42,6 @@
tempfile.tempdir = self.new_file_path
self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root )
self.cookie_path = kwargs.get( "cookie_path", "/" )
- # web API
- self.enable_api = string_as_bool( kwargs.get( 'enable_api', False ) )
# Galaxy OpenID settings
self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
self.openid_config = kwargs.get( 'openid_config_file', 'openid_conf.xml' )
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -103,37 +103,37 @@
webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' )
webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' )
- # If enabled, add the web API
- if asbool( kwargs.get( 'enable_api', False ) ):
- add_api_controllers( webapp, app )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='library_contents',
- name_prefix='library_',
- path_prefix='/api/libraries/:library_id',
- parent_resources=dict( member_name='library', collection_name='libraries' ) )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='history_contents',
- name_prefix='history_',
- path_prefix='/api/histories/:history_id',
- parent_resources=dict( member_name='history', collection_name='histories' ) )
- webapp.api_mapper.resource( 'permission',
- 'permissions',
- path_prefix='/api/libraries/:library_id',
- parent_resources=dict( member_name='library', collection_name='libraries' ) )
- webapp.api_mapper.resource( 'library', 'libraries', path_prefix='/api' )
- webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' )
- webapp.api_mapper.resource( 'request', 'requests', path_prefix='/api' )
- webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' )
- webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
- webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' )
- webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
- webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
- webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
- webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
- webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
- #webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
+
+ # Add the web API
+ add_api_controllers( webapp, app )
+ webapp.api_mapper.resource( 'content',
+ 'contents',
+ controller='library_contents',
+ name_prefix='library_',
+ path_prefix='/api/libraries/:library_id',
+ parent_resources=dict( member_name='library', collection_name='libraries' ) )
+ webapp.api_mapper.resource( 'content',
+ 'contents',
+ controller='history_contents',
+ name_prefix='history_',
+ path_prefix='/api/histories/:history_id',
+ parent_resources=dict( member_name='history', collection_name='histories' ) )
+ webapp.api_mapper.resource( 'permission',
+ 'permissions',
+ path_prefix='/api/libraries/:library_id',
+ parent_resources=dict( member_name='library', collection_name='libraries' ) )
+ webapp.api_mapper.resource( 'library', 'libraries', path_prefix='/api' )
+ webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' )
+ webapp.api_mapper.resource( 'request', 'requests', path_prefix='/api' )
+ webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' )
+ webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
+ webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' )
+ webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
+ webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
+ webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
+ webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
+ webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
+ #webapp.api_mapper.connect( 'run_workflow', '/api/workflow/{workflow_id}/library/{library_id}', controller='workflows', action='run', workflow_id=None, library_id=None, conditions=dict(method=["GET"]) )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py
+++ b/lib/galaxy/web/controllers/requests_admin.py
@@ -625,9 +625,6 @@
or not scp_configs.get( 'user_name', '' ) \
or not scp_configs.get( 'password', '' ):
err_msg += "Error in external service login information. "
- # Make sure web API is enabled and API key exists
- if not trans.app.config.enable_api:
- err_msg += "The 'enable_api = True' setting is not correctly set in the Galaxy config file. "
if not trans.user.api_keys:
err_msg += "Set your API Key in your User Preferences to transfer datasets. "
# Check if library_import_dir is set
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -39,8 +39,6 @@
self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
self.cookie_path = kwargs.get( "cookie_path", "/" )
- # web API
- self.enable_api = string_as_bool( kwargs.get( 'enable_api', False ) )
self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
self.datatypes_config = kwargs.get( 'datatypes_config_file', 'datatypes_conf.xml' )
self.test_conf = resolve_path( kwargs.get( "test_conf", "" ), self.root )
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf scripts/api/README
--- a/scripts/api/README
+++ b/scripts/api/README
@@ -3,7 +3,6 @@
Set these options in universe_wsgi.ini and start the server:
-enable_api = True
admin_users = you(a)example.org
library_import_dir = /path/to/some/directory
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf templates/user/index.mako
--- a/templates/user/index.mako
+++ b/templates/user/index.mako
@@ -12,9 +12,7 @@
%if webapp == 'galaxy':
<li><a href="${h.url_for( controller='user', action='manage_user_info', cntrller=cntrller, webapp=webapp )}">${_('Manage your information')}</a></li><li><a href="${h.url_for( controller='user', action='set_default_permissions', cntrller=cntrller, webapp=webapp )}">${_('Change default permissions')}</a> for new histories</li>
- %if trans.app.config.enable_api:
- <li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller, webapp=webapp )}">${_('Manage your API keys')}</a></li>
- %endif
+ <li><a href="${h.url_for( controller='user', action='api_keys', cntrller=cntrller, webapp=webapp )}">${_('Manage your API keys')}</a></li>
%if trans.app.config.enable_openid:
<li><a href="${h.url_for( controller='user', action='openid_manage', cntrller=cntrller, webapp=webapp )}">${_('Manage OpenIDs')}</a> linked to your account</li>
%endif
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf templates/webapps/galaxy/base_panels.mako
--- a/templates/webapps/galaxy/base_panels.mako
+++ b/templates/webapps/galaxy/base_panels.mako
@@ -166,8 +166,7 @@
menu_options.append( [ _('Saved Datasets'), h.url_for( controller='/dataset', action='list' ), "galaxy_main" ] )
if app.config.get_bool( 'enable_pages', False ):
menu_options.append( [ _('Saved Pages'), h.url_for( controller='/page', action='list' ), "_top" ] )
- if app.config.enable_api:
- menu_options.append( [ _('API Keys'), h.url_for( controller='/user', action='api_keys', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
+ menu_options.append( [ _('API Keys'), h.url_for( controller='/user', action='api_keys', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
if app.config.use_remote_user:
menu_options.append( [ _('Public Name'), h.url_for( controller='/user', action='edit_username', cntrller='user', webapp='galaxy' ), "galaxy_main" ] )
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf test/functional/test_sample_tracking.py
--- a/test/functional/test_sample_tracking.py
+++ b/test/functional/test_sample_tracking.py
@@ -853,7 +853,7 @@
sample_dataset_ids = [ self.security.encode_id( dataset.id ) for dataset in request1_sample1.datasets ]
strings_displayed = [ 'Manage "%s" datasets' % request1_sample1.name ]
strings_displayed_count = [ ( galaxy.model.SampleDataset.transfer_status.NOT_STARTED, len( request1_sample1.datasets ) ) ]
- strings_displayed_after_submit = [ "Error in sequencer login information. The 'enable_api = True' setting is not correctly set in the Galaxy config file. Set your API Key in your User Preferences to transfer datasets." ]
+ strings_displayed_after_submit = [ "Error in sequencer login information. Please set your API Key in your User Preferences to transfer datasets." ]
strings_not_displayed = [ galaxy.model.SampleDataset.transfer_status.IN_QUEUE,
galaxy.model.SampleDataset.transfer_status.TRANSFERRING,
galaxy.model.SampleDataset.transfer_status.ADD_TO_LIBRARY,
diff -r bb7f51fb545d0517eeceb902fa83dc5a659c64df -r 8376ad08ae41b9b7efa06622b031cb731e2c0bcf universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -504,9 +504,6 @@
#enable_openid = False
#openid_config_file = openid_conf.xml
-# Enable the (experimental! beta!) Web API. Documentation forthcoming.
-#enable_api = False
-
# Optional list of email addresses of API users who can make calls on behalf of
# other users
#api_allow_run_as = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Tabular dataset display - Remove logs, revert scroll detection to base.
by Bitbucket 11 Apr '12
by Bitbucket 11 Apr '12
11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bb7f51fb545d/
changeset: bb7f51fb545d
user: dannon
date: 2012-04-11 18:03:51
summary: Tabular dataset display - Remove logs, revert scroll detection to base.
affected #: 1 file
diff -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d -r bb7f51fb545d0517eeceb902fa83dc5a659c64df templates/dataset/tabular_chunked.mako
--- a/templates/dataset/tabular_chunked.mako
+++ b/templates/dataset/tabular_chunked.mako
@@ -39,11 +39,7 @@
$(document).ready(function(){
fillTable();
$(window).scroll(function(){
- console.log($(window).scrollTop());
- console.log($(document).height());
- console.log($(window).height());
- // if ($(window).scrollTop() == $(document).height() - $(window).height()){
- if ($(document).height() - $(window).scrollTop() <= $(window).height()){
+ if ($(window).scrollTop() == $(document).height() - $(window).height()){
fillTable();
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ff62ddc66b1a/
changeset: ff62ddc66b1a
user: dannon
date: 2012-04-11 17:52:52
summary: Incremental display for tabular datatypes.
affected #: 9 files
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -34,6 +34,17 @@
"""Returns the mime type of the datatype"""
return 'application/octet-stream'
+ def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
+ trans.response.set_content_type(dataset.get_mime())
+ trans.log_event( "Display dataset id: %s" % str( dataset.id ) )
+ trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
+ to_ext = dataset.extension
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ fname = ''.join(c in valid_chars and c or '_' for c in dataset.name)[0:150]
+ trans.response.set_content_type( "application/octet-stream" ) #force octet-stream so Safari doesn't append mime extensions to filename
+ trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
+ return open( dataset.file_name )
+
class Ab1( Binary ):
"""Class describing an ab1 binary sequence file"""
file_ext = "ab1"
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -39,7 +39,7 @@
'test'
>>> type( DataTest.metadata_spec.test.param )
<class 'galaxy.datatypes.metadata.MetadataParameter'>
-
+
"""
__metaclass__ = DataMeta
# Add metadata elements
@@ -60,7 +60,7 @@
primary_file_name = 'index'
#A per datatype setting (inherited): max file size (in bytes) for setting optional metadata
_max_optional_metadata_filesize = None
-
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
@@ -118,7 +118,7 @@
to_check = dataset.metadata.items()
for key, value in to_check:
if key in skip or ( not check and dataset.metadata.spec[key].get( "optional" ) ):
- continue #we skip check for optional and nonrequested values here
+ continue #we skip check for optional and nonrequested values here
if not value:
return True
return False
@@ -142,6 +142,7 @@
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'
+
def display_peek(self, dataset ):
"""Create HTML table, used for displaying peek"""
out = ['<table cellspacing="0" cellpadding="3">']
@@ -163,6 +164,158 @@
except Exception, exc:
out = "Can't create peek %s" % str( exc )
return out
+
+ def _archive_composite_dataset( self, trans, data=None, **kwd ):
+ # save a composite object into a compressed archive for downloading
+ params = util.Params( kwd )
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ outfname = data.name[0:150]
+ outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
+ if (params.do_action == None):
+ params.do_action = 'zip' # default
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ if not data:
+ msg = "You must select at least one dataset"
+ messagetype = 'error'
+ else:
+ error = False
+ try:
+ if (params.do_action == 'zip'):
+ # Can't use mkstemp - the file must not exist first
+ tmpd = tempfile.mkdtemp()
+ tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
+ if ziptype == '64':
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
+ else:
+ archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED )
+ archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
+ elif params.do_action == 'tgz':
+ archive = util.streamball.StreamBall( 'w|gz' )
+ elif params.do_action == 'tbz':
+ archive = util.streamball.StreamBall( 'w|bz2' )
+ except (OSError, zipfile.BadZipFile):
+ error = True
+ log.exception( "Unable to create archive for download" )
+ msg = "Unable to create archive for %s for download, please report this error" % outfname
+ messagetype = 'error'
+ if not error:
+ current_user_roles = trans.get_current_user_roles()
+ ext = data.extension
+ path = data.file_name
+ fname = os.path.split(path)[-1]
+ efp = data.extra_files_path
+ htmlname = os.path.splitext(outfname)[0]
+ if not htmlname.endswith(ext):
+ htmlname = '%s_%s' % (htmlname,ext)
+ archname = '%s.html' % htmlname # fake the real nature of the html file
+ try:
+ archive.add(data.file_name,archname)
+ except IOError:
+ error = True
+ log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ for root, dirs, files in os.walk(efp):
+ for fname in files:
+ fpath = os.path.join(root,fname)
+ rpath = os.path.relpath(fpath,efp)
+ try:
+ archive.add( fpath,rpath )
+ except IOError:
+ error = True
+ log.exception( "Unable to add %s to temporary library download archive" % rpath)
+ msg = "Unable to create archive for download, please report this error"
+ messagetype = 'error'
+ continue
+ if not error:
+ if params.do_action == 'zip':
+ archive.close()
+ tmpfh = open( tmpf )
+ # CANNOT clean up - unlink/rmdir was always failing because file handle retained to return - must rely on a cron job to clean up tmp
+ trans.response.set_content_type( "application/x-zip-compressed" )
+ trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.zip"' % outfname
+ return tmpfh
+ else:
+ trans.response.set_content_type( "application/x-tar" )
+ outext = 'tgz'
+ if params.do_action == 'tbz':
+ outext = 'tbz'
+ trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (outfname,outext)
+ archive.wsgi_status = trans.response.wsgi_status()
+ archive.wsgi_headeritems = trans.response.wsgi_headeritems()
+ return archive.stream
+ return trans.show_error_message( msg )
+
+ def _serve_raw(self, trans, dataset, to_ext):
+ trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ fname = ''.join(c in valid_chars and c or '_' for c in dataset.name)[0:150]
+ trans.response.set_content_type( "application/octet-stream" ) #force octet-stream so Safari doesn't append mime extensions to filename
+ trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
+ return open( dataset.file_name )
+
+ def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, size=None, offset=None, **kwd):
+ """ Old display method, for transition """
+ #Relocate all composite datatype display to a common location.
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html') # for archiving composite datatypes
+ if isinstance( dataset, basestring ):
+ return dataset
+ if filename and filename != "index":
+ # For files in extra_files_path
+ file_path = trans.app.object_store.get_filename(dataset, extra_dir='dataset_%s_files' % dataset.id, alt_name=filename)
+ if os.path.exists( file_path ):
+ if os.path.isdir( file_path ):
+ return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
+ mime, encoding = mimetypes.guess_type( file_path )
+ if not mime:
+ try:
+ mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] )
+ except:
+ mime = "text/plain"
+ trans.response.set_content_type( mime )
+ return open( file_path )
+ else:
+ return trans.show_error_message( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
+ trans.response.set_content_type(dataset.get_mime())
+ trans.log_event( "Display dataset id: %s" % str( dataset.id ) )
+ from galaxy import datatypes #DBTODO REMOVE THIS AT REFACTOR
+ if to_ext or isinstance(dataset.datatype, datatypes.binary.Binary): # Saving the file, or binary file
+ if dataset.extension in composite_extensions:
+ return self._archive_composite_dataset( trans, dataset, **kwd )
+ else:
+ trans.response.headers['Content-Length'] = int( os.stat( dataset.file_name ).st_size )
+ if not to_ext:
+ to_ext = dataset.extension
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ fname = ''.join(c in valid_chars and c or '_' for c in dataset.name)[0:150]
+ trans.response.set_content_type( "application/octet-stream" ) #force octet-stream so Safari doesn't append mime extensions to filename
+ trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (dataset.hid, fname, to_ext)
+ return open( dataset.file_name )
+ if not os.path.exists( dataset.file_name ):
+ raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % dataset.file_name )
+ max_peek_size = 1000000 # 1 MB
+ if isinstance(dataset.datatype, datatypes.images.Html):
+ max_peek_size = 10000000 # 10 MB for html
+ if not preview or isinstance(dataset.datatype, datatypes.images.Image) or os.stat( dataset.file_name ).st_size < max_peek_size:
+ if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
+ # Sanitize anytime we respond with plain text/html content.
+ return sanitize_html(open( dataset.file_name ).read())
+ return open( dataset.file_name )
+ else:
+ trans.response.set_content_type( "text/html" )
+ return trans.stream_template_mako( "/dataset/large_file.mako",
+ truncated_data = open( dataset.file_name ).read(max_peek_size),
+ data = dataset )
+ """Returns dataset contents for display.
+ This allows for customization of subtype displays"""
+ file_path = trans.app.object_store.get_filename(dataset, extra_dir='dataset_%s_files' % dataset.id, alt_name=filename)
+ if size:
+ return open(dataset.file_path).read(size)
+ else:
+ open(dataset.file_path)
+
def display_name(self, dataset):
"""Returns formatted html of dataset name"""
try:
@@ -183,11 +336,11 @@
info = info.replace( '\r', '<br/>' )
if info.find( '\n' ) >= 0:
info = info.replace( '\n', '<br/>' )
-
+
# Convert to unicode to display non-ascii characters.
if type( info ) is not unicode:
info = unicode( info, 'utf-8')
-
+
return info
except:
return "info unavailable"
@@ -272,7 +425,7 @@
def convert_dataset(self, trans, original_dataset, target_type, return_output=False, visible=True, deps=None, set_output_history=True):
"""This function adds a job to the queue to convert a dataset to another type. Returns a message about success/failure."""
converter = trans.app.datatypes_registry.get_converter_by_target_type( original_dataset.ext, target_type )
-
+
if converter is None:
raise Exception( "A converter does not exist for %s to %s." % ( original_dataset.ext, target_type ) )
#Generate parameter dictionary
@@ -284,7 +437,7 @@
params[value.name] = deps[value.name]
elif value.type == 'data':
input_name = key
-
+
params[input_name] = original_dataset
#Run converter, job is dispatched through Queue
converted_dataset = converter.execute( trans, incoming=params, set_output_hid=visible, set_output_history=set_output_history)[1]
@@ -351,18 +504,18 @@
@property
def has_resolution(self):
return False
-
-
- def merge( split_files, output_file):
+
+
+ def merge( split_files, output_file):
"""
TODO: Do we need to merge gzip files using gzjoin? cat seems to work,
but might be brittle. Need to revisit this.
"""
if len(split_files) == 1:
- cmd = 'mv -f %s %s' % ( split_files[0], output_file )
+ cmd = 'mv -f %s %s' % ( split_files[0], output_file )
else:
- cmd = 'cat %s > %s' % ( ' '.join(split_files), output_file )
+ cmd = 'cat %s > %s' % ( ' '.join(split_files), output_file )
result = os.system(cmd)
if result != 0:
raise Exception('Result %s from %s' % (result, cmd))
@@ -377,7 +530,7 @@
def write_from_stream(self, dataset, stream):
"""Writes data from a stream"""
- # write it twice for now
+ # write it twice for now
fd, temp_name = tempfile.mkstemp()
while 1:
chunk = stream.read(1048576)
@@ -468,11 +621,11 @@
"""
if split_params is None:
return
-
+
if len(input_datasets) > 1:
raise Exception("Text file splitting does not support multiple files")
input_files = [ds.file_name for ds in input_datasets]
-
+
lines_per_file = None
chunk_size = None
if split_params['split_mode'] == 'number_of_parts':
@@ -501,7 +654,7 @@
chunk_size = int(split_params['split_size'])
else:
raise Exception('Unsupported split mode %s' % split_params['split_mode'])
-
+
f = open(input_files[0], 'rt')
try:
chunk_idx = 0
@@ -562,7 +715,7 @@
def get_file_peek( file_name, is_multi_byte=False, WIDTH=256, LINE_COUNT=5, skipchars=[] ):
"""
Returns the first LINE_COUNT lines wrapped to WIDTH
-
+
## >>> fname = get_test_fname('4.bed')
## >>> get_file_peek(fname)
## 'chr22 30128507 31828507 uc003bnx.1_cds_2_0_chr22_29227_f 0 +\n'
@@ -601,11 +754,12 @@
lines.append( line )
count += 1
temp.close()
- if file_type in [ 'gzipped', 'binary' ]:
- text = "%s file" % file_type
+ if file_type in [ 'gzipped', 'binary' ]:
+ text = "%s file" % file_type
else:
try:
text = unicode( '\n'.join( lines ), 'utf-8' )
except UnicodeDecodeError:
text = "binary/unknown file"
return text
+
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -13,11 +13,13 @@
from galaxy.datatypes.metadata import MetadataElement
import galaxy_utils.sequence.vcf
from sniff import *
+from galaxy.util.json import to_json_string
log = logging.getLogger(__name__)
class Tabular( data.Text ):
"""Tab delimited data"""
+ CHUNK_SIZE = 20000
"""Add metadata elements"""
MetadataElement( name="comment_lines", default=0, desc="Number of comment lines", readonly=False, optional=True, no_value=0 )
@@ -33,15 +35,15 @@
that contain numerical values in the dataset. A skip parameter is
used because various tabular data types reuse this function, and
their data type classes are responsible to determine how many invalid
- comment lines should be skipped. Using None for skip will cause skip
- to be zero, but the first line will be processed as a header. A
- max_data_lines parameter is used because various tabular data types
- reuse this function, and their data type classes are responsible to
+ comment lines should be skipped. Using None for skip will cause skip
+ to be zero, but the first line will be processed as a header. A
+ max_data_lines parameter is used because various tabular data types
+ reuse this function, and their data type classes are responsible to
determine how many data lines should be processed to ensure that the
- non-optional metadata parameters are properly set; if used, optional
- metadata parameters will be set to None, unless the entire file has
- already been read. Using None (default) for max_data_lines will
- process all data lines.
+ non-optional metadata parameters are properly set; if used, optional
+ metadata parameters will be set to None, unless the entire file has
+ already been read. Using None (default) for max_data_lines will
+ process all data lines.
Items of interest:
1. We treat 'overwrite' as always True (we always want to set tabular metadata when called).
@@ -58,7 +60,7 @@
column_type_set_order = [ 'int', 'float', 'list', 'str' ] #Order to set column types in
default_column_type = column_type_set_order[-1] # Default column type is lowest in list
column_type_compare_order = list( column_type_set_order ) #Order to compare column types
- column_type_compare_order.reverse()
+ column_type_compare_order.reverse()
def type_overrules_type( column_type1, column_type2 ):
if column_type1 is None or column_type1 == column_type2:
return False
@@ -75,13 +77,13 @@
try:
int( column_text )
return True
- except:
+ except:
return False
def is_float( column_text ):
try:
float( column_text )
return True
- except:
+ except:
if column_text.strip().lower() == 'na':
return True #na is special cased to be a float
return False
@@ -126,7 +128,7 @@
if type_overrules_type( column_type, column_types[field_count] ):
column_types[field_count] = column_type
if i == 0 and requested_skip is None:
- # This is our first line, people seem to like to upload files that have a header line, but do not
+ # This is our first line, people seem to like to upload files that have a header line, but do not
# start with '#' (i.e. all column types would then most likely be detected as str). We will assume
# that the first line is always a header (this was previous behavior - it was always skipped). When
# the requested skip is None, we only use the data from the first line if we have no other data for
@@ -148,7 +150,7 @@
break
i += 1
dataset_fh.close()
-
+
#we error on the larger number of columns
#first we pad our column_types by using data from first line
if len( first_line_column_types ) > len( column_types ):
@@ -177,6 +179,7 @@
except Exception, exc:
out = "Can't create peek %s" % str( exc )
return out
+
def make_html_peek_header( self, dataset, skipchars=[], column_names=[], column_number_format='%s', column_parameter_alias={}, **kwargs ):
out = []
try:
@@ -212,6 +215,7 @@
except Exception, exc:
raise Exception, "Can't create peek header %s" % str( exc )
return "".join( out )
+
def make_html_peek_rows( self, dataset, skipchars=[], **kwargs ):
out = []
try:
@@ -233,6 +237,28 @@
except Exception, exc:
raise Exception, "Can't create peek rows %s" % str( exc )
return "".join( out )
+
+ def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None):
+ #TODO Prevent failure when displaying extremely long > 50kb lines.
+ if to_ext:
+ return self._serve_raw(trans, dataset, to_ext)
+ if chunk:
+ ck_index = int(chunk)
+ f = open(dataset.file_name)
+ f.seek(ck_index * self.CHUNK_SIZE)
+ # If we aren't at the start of the file, seek to next newline. Do this better eventually.
+ if f.tell() != 0:
+ cursor = f.read(1)
+ while cursor and cursor != '\n':
+ cursor = f.read(1)
+ ck_data = f.read(self.CHUNK_SIZE)
+ cursor = f.read(1)
+ while cursor and ck_data[-1] != '\n':
+ ck_data += cursor
+ cursor = f.read(1)
+ return to_json_string({'ck_data': ck_data, 'ck_index': ck_index+1})
+ return trans.fill_template( "/dataset/tabular_chunked.mako",dataset = dataset)
+
def set_peek( self, dataset, line_count=None, is_multi_byte=False):
super(Tabular, self).set_peek( dataset, line_count=line_count, is_multi_byte=is_multi_byte)
if dataset.metadata.comment_lines:
@@ -281,7 +307,7 @@
def sniff( self, filename ):
"""
Determines whether the file is in SAM format
-
+
A file in SAM format consists of lines of tab-separated data.
The following header line may be the first line:
@QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL
@@ -290,12 +316,12 @@
Data in the OPT column is optional and can consist of tab-separated data
For complete details see http://samtools.sourceforge.net/SAM1.pdf
-
+
Rules for sniffing as True:
There must be 11 or more columns of data on each line
Columns 2 (FLAG), 4(POS), 5 (MAPQ), 8 (MPOS), and 9 (ISIZE) must be numbers (9 can be negative)
We will only check that up to the first 5 alignments are correctly formatted.
-
+
>>> fname = get_test_fname( 'sequence.maf' )
>>> Sam().sniff( fname )
False
@@ -311,7 +337,7 @@
line = line.strip()
if not line:
break #EOF
- if line:
+ if line:
if line[0] != '@':
linePieces = line.split('\t')
if len(linePieces) < 11:
@@ -373,10 +399,10 @@
if result != 0:
raise Exception('Result %s from %s' % (result, cmd))
merge = staticmethod(merge)
-
+
def get_track_type( self ):
return "ReadTrack", {"data": "bam", "index": "summary_tree"}
-
+
class Pileup( Tabular ):
"""Tab delimited data in pileup (6- or 10-column) format"""
file_ext = "pileup"
@@ -402,7 +428,7 @@
"""
Checks for 'pileup-ness'
- There are two main types of pileup: 6-column and 10-column. For both,
+ There are two main types of pileup: 6-column and 10-column. For both,
the first three and last two columns are the same. We only check the
first three to allow for some personalization of the format.
@@ -436,27 +462,27 @@
class ElandMulti( Tabular ):
file_ext = 'elandmulti'
-
+
def sniff( self, filename ):
return False
-
+
class Vcf( Tabular ):
""" Variant Call Format for describing SNPs and other simple genome variations. """
-
+
file_ext = 'vcf'
column_names = [ 'Chrom', 'Pos', 'ID', 'Ref', 'Alt', 'Qual', 'Filter', 'Info', 'Format', 'data' ]
-
+
MetadataElement( name="columns", default=10, desc="Number of columns", readonly=True, visible=False )
MetadataElement( name="column_types", default=['str','int','str','str','str','int','str','list','str','str'], param=metadata.ColumnTypesParameter, desc="Column types", readonly=True, visible=False )
MetadataElement( name="viz_filter_cols", desc="Score column for visualization", default=[5], param=metadata.ColumnParameter, multiple=True )
-
+
def sniff( self, filename ):
headers = get_headers( filename, '\n', count=1 )
return headers[0][0].startswith("##fileformat=VCF")
def display_peek( self, dataset ):
"""Returns formated html of peek"""
return Tabular.make_html_table( self, dataset, column_names=self.column_names )
-
+
def get_track_type( self ):
return "VcfTrack", {"data": "tabix", "index": "summary_tree"}
@@ -500,10 +526,10 @@
def sniff( self, filename ):
"""
Determines whether the file is in ELAND export format
-
+
A file in ELAND export format consists of lines of tab-separated data.
There is no header.
-
+
Rules for sniffing as True:
There must be 22 columns on each line
LANE, TILEm X, Y, INDEX, READ_NO, SEQ, QUAL, POSITION, *STRAND, FILT must be correct
@@ -522,7 +548,7 @@
line = line.strip()
if not line:
break #EOF
- if line:
+ if line:
linePieces = line.split('\t')
if len(linePieces) != 22:
return False
@@ -568,7 +594,7 @@
#else:
# # Otherwise, read the whole thing and set num data lines.
for i, line in enumerate(dataset_fh):
- if line:
+ if line:
linePieces = line.split('\t')
if len(linePieces) != 22:
raise Exception('%s:%d:Corrupt line!' % (dataset.file_name,i))
@@ -586,5 +612,5 @@
dataset.metadata.tiles = ["%04d" % int(t) for t in tiles.keys()]
dataset.metadata.barcodes = filter(lambda x: x != '0', barcodes.keys()) + ['NoIndex' for x in barcodes.keys() if x == '0']
dataset.metadata.reads = reads.keys()
-
-
+
+
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -1,17 +1,22 @@
-import logging, os, string, shutil, re, socket, mimetypes, urllib, tempfile, zipfile, glob, sys
+import logging
+import mimetypes
+import os
+import string
+import sys
+import tempfile
+import urllib
+import zipfile
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
-from galaxy import util, datatypes, jobs, web, model
-from cgi import escape, FieldStorage
+from galaxy import util, datatypes, web, model
from galaxy.datatypes.display_applications.util import encode_dataset_user, decode_dataset_user
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util import inflector
from galaxy.model.item_attrs import *
-from galaxy.model import LibraryDatasetDatasetAssociation, HistoryDatasetAssociation
from galaxy.web.framework.helpers import to_unicode
-import pkg_resources;
+import pkg_resources;
pkg_resources.require( "Paste" )
import paste.httpexceptions
@@ -32,7 +37,7 @@
except RuntimeError:
log.exception( "Compression error when testing zip compression. This option will be disabled for library downloads." )
except (TypeError, zipfile.LargeZipFile): # ZIP64 is only in Python2.5+. Remove TypeError when 2.4 support is dropped
- log.warning( 'Max zip file size is 2GB, ZIP64 not supported' )
+ log.warning( 'Max zip file size is 2GB, ZIP64 not supported' )
comptypes.append( 'zip' )
try:
os.unlink( tmpf )
@@ -53,7 +58,7 @@
-----------------------------------------------------------------------------
You should be able to view the history containing the related history item
-${hid}: ${history_item_name}
+${hid}: ${history_item_name}
by logging in as a Galaxy admin user to the Galaxy instance referenced above
and pointing your browser to the following link.
@@ -90,7 +95,7 @@
class HistoryColumn( grids.GridColumn ):
def get_value( self, trans, grid, hda):
return hda.history.name
-
+
class StatusColumn( grids.GridColumn ):
def get_value( self, trans, grid, hda ):
if hda.deleted:
@@ -111,19 +116,19 @@
template='/dataset/grid.mako'
default_sort_key = "-update_time"
columns = [
- grids.TextColumn( "Name", key="name",
+ grids.TextColumn( "Name", key="name",
# Link name to dataset's history.
link=( lambda item: iff( item.history.deleted, None, dict( operation="switch", id=item.id ) ) ), filterable="advanced", attach_popup=True ),
- HistoryColumn( "History", key="history",
+ HistoryColumn( "History", key="history",
link=( lambda item: iff( item.history.deleted, None, dict( operation="switch_history", id=item.id ) ) ) ),
grids.IndividualTagsColumn( "Tags", key="tags", model_tag_association_class=model.HistoryDatasetAssociationTagAssociation, filterable="advanced", grid_name="HistoryDatasetAssocationListGrid" ),
StatusColumn( "Status", key="deleted", attach_popup=False ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search",
- cols_to_filter=[ columns[0], columns[2] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search",
+ cols_to_filter=[ columns[0], columns[2] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = [
@@ -136,17 +141,17 @@
num_rows_per_page = 50
def build_initial_query( self, trans, **kwargs ):
# Show user's datasets that are not deleted, not in deleted histories, and not hidden.
- # To filter HDAs by user, need to join model class/HDA and History table so that it is
- # possible to filter by user. However, for dictionary-based filtering to work, need a
+ # To filter HDAs by user, need to join model class/HDA and History table so that it is
+ # possible to filter by user. However, for dictionary-based filtering to work, need a
# primary table for the query.
return trans.sa_session.query( self.model_class ).select_from( self.model_class.table.join( model.History.table ) ) \
.filter( model.History.user == trans.user ) \
.filter( self.model_class.deleted==False ) \
.filter( model.History.deleted==False ) \
.filter( self.model_class.visible==True )
-
+
class DatasetInterface( BaseUIController, UsesAnnotations, UsesHistory, UsesHistoryDatasetAssociation, UsesItemRatings ):
-
+
stored_list_grid = HistoryDatasetAssociationListGrid()
@web.expose
@@ -202,7 +207,7 @@
job_stdout=job.stdout,
job_info=job.info,
job_traceback=job.traceback,
- email=email,
+ email=email,
message=message )
frm = to_address
# Check email a bit
@@ -219,130 +224,45 @@
return trans.show_ok_message( "Your error report has been sent" )
except Exception, e:
return trans.show_error_message( "An error occurred sending the report by email: %s" % str( e ) )
-
+
@web.expose
def default(self, trans, dataset_id=None, **kwd):
return 'This link may not be followed from within Galaxy.'
-
- @web.expose
- def archive_composite_dataset( self, trans, data=None, **kwd ):
- # save a composite object into a compressed archive for downloading
- params = util.Params( kwd )
- valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
- outfname = data.name[0:150]
- outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
- if (params.do_action == None):
- params.do_action = 'zip' # default
- msg = util.restore_text( params.get( 'msg', '' ) )
- messagetype = params.get( 'messagetype', 'done' )
- if not data:
- msg = "You must select at least one dataset"
- messagetype = 'error'
- else:
- error = False
- try:
- if (params.do_action == 'zip'):
- # Can't use mkstemp - the file must not exist first
- tmpd = tempfile.mkdtemp()
- tmpf = os.path.join( tmpd, 'library_download.' + params.do_action )
- if ziptype == '64':
- archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED, True )
- else:
- archive = zipfile.ZipFile( tmpf, 'w', zipfile.ZIP_DEFLATED )
- archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
- elif params.do_action == 'tgz':
- archive = util.streamball.StreamBall( 'w|gz' )
- elif params.do_action == 'tbz':
- archive = util.streamball.StreamBall( 'w|bz2' )
- except (OSError, zipfile.BadZipFile):
- error = True
- log.exception( "Unable to create archive for download" )
- msg = "Unable to create archive for %s for download, please report this error" % outfname
- messagetype = 'error'
- if not error:
- current_user_roles = trans.get_current_user_roles()
- ext = data.extension
- path = data.file_name
- fname = os.path.split(path)[-1]
- efp = data.extra_files_path
- htmlname = os.path.splitext(outfname)[0]
- if not htmlname.endswith(ext):
- htmlname = '%s_%s' % (htmlname,ext)
- archname = '%s.html' % htmlname # fake the real nature of the html file
- try:
- archive.add(data.file_name,archname)
- except IOError:
- error = True
- log.exception( "Unable to add composite parent %s to temporary library download archive" % data.file_name)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- for root, dirs, files in os.walk(efp):
- for fname in files:
- fpath = os.path.join(root,fname)
- rpath = os.path.relpath(fpath,efp)
- try:
- archive.add( fpath,rpath )
- except IOError:
- error = True
- log.exception( "Unable to add %s to temporary library download archive" % rpath)
- msg = "Unable to create archive for download, please report this error"
- messagetype = 'error'
- continue
- if not error:
- if params.do_action == 'zip':
- archive.close()
- tmpfh = open( tmpf )
- # CANNOT clean up - unlink/rmdir was always failing because file handle retained to return - must rely on a cron job to clean up tmp
- trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.zip"' % outfname
- return tmpfh
- else:
- trans.response.set_content_type( "application/x-tar" )
- outext = 'tgz'
- if params.do_action == 'tbz':
- outext = 'tbz'
- trans.response.headers[ "Content-Disposition" ] = 'attachment; filename="%s.%s"' % (outfname,outext)
- archive.wsgi_status = trans.response.wsgi_status()
- archive.wsgi_headeritems = trans.response.wsgi_headeritems()
- return archive.stream
- return trans.show_error_message( msg )
-
+
@web.expose
def get_metadata_file(self, trans, hda_id, metadata_name):
""" Allows the downloading of metadata files associated with datasets (eg. bai index for bam files) """
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( hda_id ) )
if not data or not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
return trans.show_error_message( "You are not allowed to access this dataset" )
-
+
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = ''.join(c in valid_chars and c or '_' for c in data.name)[0:150]
-
+
file_ext = data.metadata.spec.get(metadata_name).get("file_ext", metadata_name)
trans.response.headers["Content-Type"] = "application/octet-stream"
trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, file_ext)
return open(data.metadata.get(metadata_name).file_name)
-
- def _check_dataset(self, trans, dataset_id):
+
+ def _check_dataset(self, trans, hda_id):
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
- data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
+ data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( hda_id) )
if data is None:
- raise ValueError( 'Invalid reference dataset id: %s.' % dataset_id )
+ raise ValueError( 'Invalid reference dataset id: %s.' % hda_id)
except:
try:
- data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( dataset_id ) )
+ data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( int( hda_id ) )
except:
data = None
if not data:
- raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
+ raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( hda_id ) )
if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ):
return trans.show_error_message( "You are not allowed to access this dataset" )
-
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to view it." )
-
return data
-
+
@web.expose
@web.json
def transfer_status(self, trans, dataset_id, filename=None):
@@ -352,7 +272,7 @@
if isinstance( data, basestring ):
return data
log.debug( "Checking transfer status for dataset %s..." % data.dataset.id )
-
+
# Pulling files in extra_files_path into cache is not handled via this
# method but that's primarily because those files are typically linked to
# through tool's output page anyhow so tying a JavaScript event that will
@@ -361,63 +281,11 @@
return True
else:
return trans.app.object_store.file_ready(data.dataset)
-
+
@web.expose
- def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
- """Catches the dataset id and displays file contents as directed"""
- composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
- composite_extensions.append('html') # for archiving composite datatypes
+ def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, chunk=None, **kwd):
data = self._check_dataset(trans, dataset_id)
- if isinstance( data, basestring ):
- return data
-
- if filename and filename != "index":
- # For files in extra_files_path
- file_path = trans.app.object_store.get_filename(data.dataset, extra_dir='dataset_%s_files' % data.dataset.id, alt_name=filename)
- if os.path.exists( file_path ):
- if os.path.isdir( file_path ):
- return trans.show_error_message( "Directory listing is not allowed." ) #TODO: Reconsider allowing listing of directories?
- mime, encoding = mimetypes.guess_type( file_path )
- if not mime:
- try:
- mime = trans.app.datatypes_registry.get_mimetype_by_extension( ".".split( file_path )[-1] )
- except:
- mime = "text/plain"
- trans.response.set_content_type( mime )
- return open( file_path )
- else:
- return trans.show_error_message( "Could not find '%s' on the extra files path %s." % ( filename, file_path ) )
-
- trans.response.set_content_type(data.get_mime())
- trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
-
- if to_ext or isinstance(data.datatype, datatypes.binary.Binary): # Saving the file, or binary file
- if data.extension in composite_extensions:
- return self.archive_composite_dataset( trans, data, **kwd )
- else:
- trans.response.headers['Content-Length'] = int( os.stat( data.file_name ).st_size )
- if not to_ext:
- to_ext = data.extension
- valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
- fname = ''.join(c in valid_chars and c or '_' for c in data.name)[0:150]
- trans.response.set_content_type( "application/octet-stream" ) #force octet-stream so Safari doesn't append mime extensions to filename
- trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % (data.hid, fname, to_ext)
- return open( data.file_name )
- if not os.path.exists( data.file_name ):
- raise paste.httpexceptions.HTTPNotFound( "File Not Found (%s)." % data.file_name )
- max_peek_size = 1000000 # 1 MB
- if isinstance(data.datatype, datatypes.images.Html):
- max_peek_size = 10000000 # 10 MB for html
- if not preview or isinstance(data.datatype, datatypes.images.Image) or os.stat( data.file_name ).st_size < max_peek_size:
- if trans.app.config.sanitize_all_html and trans.response.get_content_type() == "text/html":
- # Sanitize anytime we respond with plain text/html content.
- return sanitize_html(open( data.file_name ).read())
- return open( data.file_name )
- else:
- trans.response.set_content_type( "text/html" )
- return trans.stream_template_mako( "/dataset/large_file.mako",
- truncated_data = open( data.file_name ).read(max_peek_size),
- data = data )
+ return data.datatype.display_data(trans, data, preview, filename, to_ext, chunk, **kwd)
@web.expose
def edit(self, trans, dataset_id=None, filename=None, hid=None, **kwd):
@@ -443,7 +311,7 @@
# TODO: hid handling
data = history.datasets[ int( hid ) - 1 ]
id = None
- elif dataset_id is not None:
+ elif dataset_id is not None:
id = trans.app.security.decode_id( dataset_id )
data = trans.sa_session.query( self.app.model.HistoryDatasetAssociation ).get( id )
else:
@@ -463,7 +331,7 @@
# permission. In this case, we'll reset this permission to the hda user's private role.
manage_permissions_action = trans.app.security_agent.get_action( trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS.action )
permissions = { manage_permissions_action : [ trans.app.security_agent.get_private_user_role( data.history.user ) ] }
- trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
+ trans.app.security_agent.set_dataset_permission( data.dataset, permissions )
if trans.app.security_agent.can_access_dataset( current_user_roles, data.dataset ):
if data.state == trans.model.Dataset.states.UPLOAD:
return trans.show_error_message( "Please wait until this dataset finishes uploading before attempting to edit its metadata." )
@@ -600,7 +468,7 @@
refresh_frames=refresh_frames )
else:
return trans.show_error_message( "You do not have permission to edit this dataset's ( id: %s ) information." % str( dataset_id ) )
-
+
@web.expose
@web.require_login( "see all available datasets" )
def list( self, trans, **kwargs ):
@@ -610,7 +478,7 @@
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
hda_ids = util.listify( kwargs.get( 'id', [] ) )
-
+
# Display no message by default
status, message = None, None
@@ -630,15 +498,15 @@
if hdas:
if operation == "switch" or operation == "switch_history":
# Switch to a history that the HDA resides in.
-
+
# Convert hda to histories.
histories = []
for hda in hdas:
histories.append( hda.history )
-
+
# Use history controller to switch the history. TODO: is this reasonable?
status, message = trans.webapp.controllers['history']._list_switch( trans, histories )
-
+
# Current history changed, refresh history frame; if switching to a dataset, set hda seek.
trans.template_context['refresh_frames'] = ['history']
if operation == "switch":
@@ -648,35 +516,35 @@
# Copy a dataset to the current history.
target_histories = [ trans.get_history() ]
status, message = self._copy_datasets( trans, hda_ids, target_histories )
-
+
# Current history changed, refresh history frame.
trans.template_context['refresh_frames'] = ['history']
# Render the list view
return self.stored_list_grid( trans, status=status, message=message, **kwargs )
-
+
@web.expose
def imp( self, trans, dataset_id=None, **kwd ):
""" Import another user's dataset via a shared URL; dataset is added to user's current history. """
msg = ""
-
+
# Set referer message.
referer = trans.request.referer
if referer is not "":
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Error checking.
if not dataset_id:
return trans.show_error_message( "You must specify a dataset to import. You can %s." % referer_message, use_panels=True )
-
+
# Do import.
cur_history = trans.get_history( create=True )
status, message = self._copy_datasets( trans, [ dataset_id ], [ cur_history ], imported=True )
message = "Dataset imported. <br>You can <a href='%s'>start using the dataset</a> or %s." % ( url_for('/'), referer_message )
return trans.show_message( message, type=status, use_panels=True )
-
+
@web.expose
@web.json
@web.require_login( "use Galaxy datasets" )
@@ -685,7 +553,7 @@
dataset = self.get_dataset( trans, id, False, True )
return_dict = { "name" : dataset.name, "link" : url_for( action="display_by_username_and_slug", username=dataset.history.user.username, slug=trans.security.encode_id( dataset.id ) ) }
return return_dict
-
+
@web.expose
def get_embed_html_async( self, trans, id ):
""" Returns HTML for embedding a dataset in a page. """
@@ -698,7 +566,7 @@
def set_accessible_async( self, trans, id=None, accessible=False ):
""" Does nothing because datasets do not have an importable/accessible attribute. This method could potentially set another attribute. """
return
-
+
@web.expose
@web.require_login( "rate items" )
@web.json
@@ -713,7 +581,7 @@
dataset_rating = self.rate_item( rate_item, trans.get_user(), dataset, rating )
return self.get_ave_item_rating_data( trans.sa_session, dataset )
-
+
@web.expose
def display_by_username_and_slug( self, trans, username, slug, filename=None, preview=True ):
""" Display dataset by username and slug; because datasets do not yet have slugs, the slug is the dataset's id. """
@@ -722,10 +590,10 @@
# Filename used for composite types.
if filename:
return self.display( trans, dataset_id=slug, filename=filename)
-
+
truncated, dataset_data = self.get_data( dataset, preview )
dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset )
-
+
# If data is binary or an image, stream without template; otherwise, use display template.
# TODO: figure out a way to display images in display template.
if isinstance(dataset.datatype, datatypes.binary.Binary) or isinstance(dataset.datatype, datatypes.images.Image) or isinstance(dataset.datatype, datatypes.images.Html):
@@ -741,12 +609,12 @@
else:
user_item_rating = 0
ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, dataset )
-
+
return trans.fill_template_mako( "/dataset/display.mako", item=dataset, item_data=dataset_data, truncated=truncated,
user_item_rating = user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings )
else:
raise web.httpexceptions.HTTPNotFound()
-
+
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
@@ -758,7 +626,7 @@
# Get annotation.
dataset.annotation = self.get_item_annotation_str( trans.sa_session, trans.user, dataset )
return trans.stream_template_mako( "/dataset/item_content.mako", item=dataset, item_data=dataset_data, truncated=truncated )
-
+
@web.expose
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
dataset = self.get_dataset( trans, id, False, True )
@@ -770,7 +638,7 @@
self.add_item_annotation( trans.sa_session, trans.get_user(), dataset, new_annotation )
trans.sa_session.flush()
return new_annotation
-
+
@web.expose
def get_annotation_async( self, trans, id ):
dataset = self.get_dataset( trans, id, False, True )
@@ -841,7 +709,7 @@
if app_action in [ 'data', 'param' ]:
assert action_param, "An action param must be provided for a data or param action"
#data is used for things with filenames that could be passed off to a proxy
- #in case some display app wants all files to be in the same 'directory',
+ #in case some display app wants all files to be in the same 'directory',
#data can be forced to param, but not the other way (no filename for other direction)
#get param name from url param name
try:
@@ -960,7 +828,7 @@
trans.log_event( "Dataset id %s has been unhidden" % str(id) )
return True
return False
-
+
def _purge( self, trans, dataset_id ):
message = None
status = 'done'
@@ -1037,7 +905,7 @@
return "OK"
else:
raise Exception( message )
-
+
@web.expose
def unhide( self, trans, dataset_id, filename ):
if self._unhide( trans, dataset_id ):
@@ -1070,7 +938,7 @@
"""
Show the parameters used for an HDA
"""
-
+
def source_dataset_chain( dataset, lst ):
try:
cp_from_ldda = dataset.copied_from_library_dataset_dataset_association
@@ -1084,13 +952,13 @@
except:
pass
return lst
-
+
hda = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( trans.security.decode_id( dataset_id ) )
if not hda:
raise paste.httpexceptions.HTTPRequestRangeNotSatisfiable( "Invalid reference dataset id: %s." % str( dataset_id ) )
if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), hda.dataset ):
return trans.show_error_message( "You are not allowed to access this dataset" )
-
+
# Get the associated job, if any. If this hda was copied from another,
# we need to find the job that created the origial hda
params_objects = None
@@ -1102,7 +970,7 @@
job = None
for assoc in job_hda.creating_job_associations:
job = assoc.job
- break
+ break
if job:
# Get the tool object
try:
@@ -1113,10 +981,10 @@
params_objects = job.get_param_values( trans.app )
except:
pass
-
+
inherit_chain = source_dataset_chain(hda, [])
return trans.fill_template( "show_params.mako", inherit_chain=inherit_chain, history=trans.get_history(), hda=hda, tool=tool, params_objects=params_objects )
-
+
@web.expose
def copy_datasets( self, trans, source_history=None, source_dataset_ids="", target_history_id=None, target_history_ids="", new_history_name="", do_copy=False, **kwd ):
params = util.Params( kwd )
@@ -1175,7 +1043,7 @@
if history in target_histories:
refresh_frames = ['history']
trans.sa_session.flush()
- hist_names_str = ", ".join( ['<a href="%s" target="_top">%s</a>' %
+ hist_names_str = ", ".join( ['<a href="%s" target="_top">%s</a>' %
( url_for( controller="history", action="switch_to_history", \
hist_id=trans.security.encode_id( hist.id ) ), hist.name ) \
for hist in target_histories ] )
@@ -1186,7 +1054,7 @@
source_datasets = history.visible_datasets
target_histories = [history]
if user:
- target_histories = user.active_histories
+ target_histories = user.active_histories
return trans.fill_template( "/dataset/copy_view.mako",
source_history = history,
current_history = trans.get_history(),
@@ -1204,7 +1072,7 @@
""" Helper method for copying datasets. """
user = trans.get_user()
done_msg = error_msg = ""
-
+
invalid_datasets = 0
if not dataset_ids or not target_histories:
error_msg = "You must provide both source datasets and target histories."
@@ -1229,7 +1097,7 @@
done_msg = "%i dataset%s copied to %i histor%s." % \
( num_datasets_copied, iff( num_datasets_copied == 1, "", "s"), len( target_histories ), iff( len ( target_histories ) == 1, "y", "ies") )
trans.sa_session.refresh( history )
-
+
if error_msg != "":
status = ERROR
message = error_msg
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d static/june_2007_style/base.less
--- a/static/june_2007_style/base.less
+++ b/static/june_2007_style/base.less
@@ -1700,5 +1700,22 @@
position: relative;
}
+// Dataset Display Styles
+
+#loading_indicator{
+ position:fixed;
+ right:10px;
+ top:10px;
+ height:32px;
+ width:32px;
+ background:url(largespinner.gif);
+}
+
+#content_table td{
+ text-align:right;
+ white-space:nowrap;
+ padding:2px 10px;
+}
+
@import "base_sprites";
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -723,6 +723,8 @@
div.toolSectionBody div.toolPanelLabel{padding-top:5px;padding-bottom:5px;margin-left:16px;margin-right:10px;display:list-item;list-style:none outside;}
div.toolTitleNoSection{padding-bottom:5px;font-weight:bold;}
#tool-search{padding-top:5px;padding-bottom:10px;position:relative;}
+#loading_indicator{position:fixed;right:10px;top:10px;height:32px;width:32px;background:url(largespinner.gif);}
+#content_table td{text-align:right;white-space:nowrap;padding:2px 10px;}
.icon-button.display{background:url(history-buttons.png) no-repeat 0px 0px;}
.icon-button.display:hover{background:url(history-buttons.png) no-repeat 0px -26px;}
.icon-button.display_disabled{background:url(history-buttons.png) no-repeat 0px -52px;}
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d static/june_2007_style/blue/largespinner.gif
Binary file static/june_2007_style/blue/largespinner.gif has changed
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d templates/dataset/tabular_chunked.mako
--- /dev/null
+++ b/templates/dataset/tabular_chunked.mako
@@ -0,0 +1,65 @@
+<%inherit file="/base.mako"/>
+
+
+<%def name="title()">Dataset Display</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ var DATASET_URL = "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( dataset.id ))}";
+ var DATASET_COLS = ${dataset.metadata.columns};
+ var current_chunk = 0;
+
+ function fillTable(){
+ if (current_chunk !== -1){
+ var table = $('#content_table');
+ $.getJSON(DATASET_URL, {chunk: current_chunk}, function (result) {
+ if (result.ck_data !== ""){
+ var lines = result.ck_data.split('\n');
+ $.each(lines, function(){
+ var line = this;
+ var cells = line.split('\t');
+ /* Check length of cells to ensure this is a complete row. */
+ if (cells.length == DATASET_COLS){
+ table.append('<tr><td>' + cells.join('</td><td>') + '</td></tr>');
+ }
+ else{
+ table.append('<tr><td colspan="'+ DATASET_COLS+ '">' + line + '</td></tr>');
+ }
+ });
+ current_chunk = result.ck_index;
+ }
+ else {
+ current_chunk = -1;
+ }
+ });
+ }
+ }
+
+ $(document).ready(function(){
+ fillTable();
+ $(window).scroll(function(){
+ console.log($(window).scrollTop());
+ console.log($(document).height());
+ console.log($(window).height());
+ // if ($(window).scrollTop() == $(document).height() - $(window).height()){
+ if ($(document).height() - $(window).scrollTop() <= $(window).height()){
+ fillTable();
+ }
+ });
+ $('#loading_indicator').ajaxStart(function(){
+ $(this).show();
+ }).ajaxStop(function(){
+ $(this).hide();
+ });
+ });
+ </script>
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+</%def>
+
+<div id="loading_indicator" ></div>
+<table id="content_table" cellpadding="0">
+</table>
diff -r 60061d1a369635facf858766c7a992272b806750 -r ff62ddc66b1a454e9b3ba11297c0bd8e2a74dc8d templates/display_base.mako
--- a/templates/display_base.mako
+++ b/templates/display_base.mako
@@ -361,4 +361,4 @@
</div></div>
-</%def>
\ No newline at end of file
+</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Move tools controller from UI controller to an API controller and add show method for tool.
by Bitbucket 11 Apr '12
by Bitbucket 11 Apr '12
11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/60061d1a3696/
changeset: 60061d1a3696
user: jgoecks
date: 2012-04-11 17:37:08
summary: Move tools controller from UI controller to an API controller and add show method for tool.
affected #: 3 files
diff -r 423f4f2910ac90098e799d354a2936513242a21e -r 60061d1a369635facf858766c7a992272b806750 lib/galaxy/web/api/tools.py
--- /dev/null
+++ b/lib/galaxy/web/api/tools.py
@@ -0,0 +1,32 @@
+from galaxy import config, tools, web, util
+from galaxy.web.base.controller import BaseController, BaseAPIController
+
+class ToolsController( BaseAPIController ):
+ """
+ RESTful controller for interactions with tools.
+ """
+
+ @web.json
+ def index( self, trans, **kwds ):
+ """
+ GET /api/tools: returns a list of tools defined by parameters
+ parameters:
+ in_panel - if true, tools are returned in panel structure, including sections and labels
+ trackster - if true, only tools that are compatible with Trackster are returned
+ """
+
+ # Read params.
+ in_panel = util.string_as_bool( kwds.get( 'in_panel', 'True' ) )
+ trackster = util.string_as_bool( kwds.get( 'trackster', 'False' ) )
+
+ # Create return value.
+ return self.app.toolbox.to_dict( trans, in_panel=in_panel, trackster=trackster )
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/tools/{tool_id}
+ Returns tool information, including parameters and inputs.
+ """
+ return self.app.toolbox.tools_by_id[ id ].to_dict( trans, all=True )
+
\ No newline at end of file
diff -r 423f4f2910ac90098e799d354a2936513242a21e -r 60061d1a369635facf858766c7a992272b806750 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -129,6 +129,7 @@
webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
+ webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
diff -r 423f4f2910ac90098e799d354a2936513242a21e -r 60061d1a369635facf858766c7a992272b806750 lib/galaxy/web/controllers/tools.py
--- a/lib/galaxy/web/controllers/tools.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from galaxy import config, tools, web, util
-from galaxy.web.base.controller import BaseController, BaseUIController
-
-class ToolsController( BaseUIController ):
- """
- RESTful controller for interactions with tools. Once session-based
- authentication can be done with API controllers, this will be moved
- to be part of the API.
- """
-
- @web.json
- def index( self, trans, **kwds ):
- """
- GET /api/tools: returns a list of tools defined by parameters
- parameters:
- in_panel - if true, tools are returned in panel structure, including sections and labels
- trackster - if true, only tools that are compatible with Trackster are returned
- """
-
- # Read params.
- in_panel = util.string_as_bool( kwds.get( 'in_panel', 'True' ) )
- trackster = util.string_as_bool( kwds.get( 'trackster', 'False' ) )
-
- # Create return value.
- return self.app.toolbox.to_dict( trans, in_panel=in_panel, trackster=trackster )
-
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0a957e499a3c/
changeset: 0a957e499a3c
user: jgoecks
date: 2012-04-11 16:31:18
summary: Add Backbone-relational library; this is used for managing complex object/collection relationships.
affected #: 2 files
diff -r bf88ce609370a7829337dad758fafe388b3fd117 -r 0a957e499a3c9b7ba11f88d337e4878260d73d6d static/scripts/libs/backbone-relational.js
--- /dev/null
+++ b/static/scripts/libs/backbone-relational.js
@@ -0,0 +1,1406 @@
+/**
+ * Backbone-relational.js 0.5.0
+ * (c) 2011 Paul Uithol
+ *
+ * Backbone-relational may be freely distributed under the MIT license.
+ * For details and documentation: https://github.com/PaulUithol/Backbone-relational.
+ * Depends on Backbone: https://github.com/documentcloud/backbone.
+ */
+( function( undefined ) {
+ "use strict";
+
+ /**
+ * CommonJS shim
+ **/
+ var _, Backbone, exports;
+ if ( typeof window === 'undefined' ) {
+ _ = require( 'underscore' );
+ Backbone = require( 'backbone' );
+ exports = module.exports = Backbone;
+ }
+ else {
+ var _ = window._;
+ Backbone = window.Backbone;
+ exports = window;
+ }
+
+ Backbone.Relational = {
+ showWarnings: true
+ };
+
+ /**
+ * Semaphore mixin; can be used as both binary and counting.
+ **/
+ Backbone.Semaphore = {
+ _permitsAvailable: null,
+ _permitsUsed: 0,
+
+ acquire: function() {
+ if ( this._permitsAvailable && this._permitsUsed >= this._permitsAvailable ) {
+ throw new Error( 'Max permits acquired' );
+ }
+ else {
+ this._permitsUsed++;
+ }
+ },
+
+ release: function() {
+ if ( this._permitsUsed === 0 ) {
+ throw new Error( 'All permits released' );
+ }
+ else {
+ this._permitsUsed--;
+ }
+ },
+
+ isLocked: function() {
+ return this._permitsUsed > 0;
+ },
+
+ setAvailablePermits: function( amount ) {
+ if ( this._permitsUsed > amount ) {
+ throw new Error( 'Available permits cannot be less than used permits' );
+ }
+ this._permitsAvailable = amount;
+ }
+ };
+
+ /**
+ * A BlockingQueue that accumulates items while blocked (via 'block'),
+ * and processes them when unblocked (via 'unblock').
+ * Process can also be called manually (via 'process').
+ */
+ Backbone.BlockingQueue = function() {
+ this._queue = [];
+ };
+ _.extend( Backbone.BlockingQueue.prototype, Backbone.Semaphore, {
+ _queue: null,
+
+ add: function( func ) {
+ if ( this.isBlocked() ) {
+ this._queue.push( func );
+ }
+ else {
+ func();
+ }
+ },
+
+ process: function() {
+ while ( this._queue && this._queue.length ) {
+ this._queue.shift()();
+ }
+ },
+
+ block: function() {
+ this.acquire();
+ },
+
+ unblock: function() {
+ this.release();
+ if ( !this.isBlocked() ) {
+ this.process();
+ }
+ },
+
+ isBlocked: function() {
+ return this.isLocked();
+ }
+ });
+ /**
+ * Global event queue. Accumulates external events ('add:<key>', 'remove:<key>' and 'update:<key>')
+ * until the top-level object is fully initialized (see 'Backbone.RelationalModel').
+ */
+ Backbone.Relational.eventQueue = new Backbone.BlockingQueue();
+
+ /**
+ * Backbone.Store keeps track of all created (and destruction of) Backbone.RelationalModel.
+ * Handles lookup for relations.
+ */
+ Backbone.Store = function() {
+ this._collections = [];
+ this._reverseRelations = [];
+ };
+ _.extend( Backbone.Store.prototype, Backbone.Events, {
+ _collections: null,
+ _reverseRelations: null,
+
+ /**
+ * Add a reverse relation. Is added to the 'relations' property on model's prototype, and to
+ * existing instances of 'model' in the store as well.
+ * @param {object} relation
+ * @param {Backbone.RelationalModel} relation.model
+ * @param {String} relation.type
+ * @param {String} relation.key
+ * @param {String|object} relation.relatedModel
+ */
+ addReverseRelation: function( relation ) {
+ var exists = _.any( this._reverseRelations, function( rel ) {
+ return _.all( relation, function( val, key ) {
+ return val === rel[ key ];
+ });
+ });
+
+ if ( !exists && relation.model && relation.type ) {
+ this._reverseRelations.push( relation );
+
+ if ( !relation.model.prototype.relations ) {
+ relation.model.prototype.relations = [];
+ }
+ relation.model.prototype.relations.push( relation );
+
+ this.retroFitRelation( relation );
+ }
+ },
+
+ /**
+ * Add a 'relation' to all existing instances of 'relation.model' in the store
+ * @param {object} relation
+ */
+ retroFitRelation: function( relation ) {
+ var coll = this.getCollection( relation.model );
+ coll.each( function( model ) {
+ new relation.type( model, relation );
+ }, this);
+ },
+
+ /**
+ * Find the Store's collection for a certain type of model.
+ * @param {Backbone.RelationalModel} model
+ * @return {Backbone.Collection} A collection if found (or applicable for 'model'), or null
+ */
+ getCollection: function( model ) {
+ var coll = _.detect( this._collections, function( c ) {
+ // Check if model is the type itself (a ref to the constructor), or is of type c.model
+ return model === c.model || model.constructor === c.model;
+ });
+
+ if ( !coll ) {
+ coll = this._createCollection( model );
+ }
+
+ return coll;
+ },
+
+ /**
+ * Find a type on the global object by name. Splits name on dots.
+ * @param {String} name
+ * @return {Object}
+ */
+ getObjectByName: function( name ) {
+ var type = _.reduce( name.split( '.' ), function( memo, val ) {
+ return memo[ val ];
+ }, exports);
+ return type !== exports ? type: null;
+ },
+
+ _createCollection: function( type ) {
+ var coll;
+
+ // If 'type' is an instance, take it's constructor
+ if ( type instanceof Backbone.RelationalModel ) {
+ type = type.constructor;
+ }
+
+ // Type should inherit from Backbone.RelationalModel.
+ if ( type.prototype instanceof Backbone.RelationalModel.prototype.constructor ) {
+ coll = new Backbone.Collection();
+ coll.model = type;
+
+ this._collections.push( coll );
+ }
+
+ return coll;
+ },
+
+ /**
+ * Find an id
+ * @param type
+ * @param {String|Number|Object|Backbone.RelationalModel} item
+ */
+ resolveIdForItem: function( type, item ) {
+ var id = _.isString( item ) || _.isNumber( item ) ? item : null;
+
+ if ( id == null ) {
+ if ( item instanceof Backbone.RelationalModel ) {
+ id = item.id;
+ }
+ else if ( _.isObject( item ) ) {
+ id = item[ type.prototype.idAttribute ];
+ }
+ }
+
+ return id;
+ },
+
+ /**
+ *
+ * @param type
+ * @param {String|Number|Object|Backbone.RelationalModel} item
+ */
+ find: function( type, item ) {
+ var id = this.resolveIdForItem( type, item );
+ var coll = this.getCollection( type );
+ return coll && coll.get( id );
+ },
+
+ /**
+ * Add a 'model' to it's appropriate collection. Retain the original contents of 'model.collection'.
+ * @param {Backbone.RelationalModel} model
+ */
+ register: function( model ) {
+ var modelColl = model.collection;
+ var coll = this.getCollection( model );
+ coll && coll.add( model );
+ model.bind( 'destroy', this.unregister, this );
+ model.collection = modelColl;
+ },
+
+ /**
+ * Explicitly update a model's id in it's store collection
+ * @param {Backbone.RelationalModel} model
+ */
+ update: function( model ) {
+ var coll = this.getCollection( model );
+ coll._onModelEvent( 'change:' + model.idAttribute, model, coll );
+ },
+
+ /**
+ * Remove a 'model' from the store.
+ * @param {Backbone.RelationalModel} model
+ */
+ unregister: function( model ) {
+ model.unbind( 'destroy', this.unregister );
+ var coll = this.getCollection( model );
+ coll && coll.remove( model );
+ }
+ });
+ Backbone.Relational.store = new Backbone.Store();
+
+ /**
+ * The main Relation class, from which 'HasOne' and 'HasMany' inherit. Internally, 'relational:<key>' events
+ * are used to regulate addition and removal of models from relations.
+ *
+ * @param {Backbone.RelationalModel} instance
+ * @param {object} options
+ * @param {string} options.key
+ * @param {Backbone.RelationalModel.constructor} options.relatedModel
+ * @param {Boolean|String} [options.includeInJSON=true] Serialize the given attribute for related model(s)' in toJSON, or just their ids.
+ * @param {Boolean} [options.createModels=true] Create objects from the contents of keys if the object is not found in Backbone.store.
+ * @param {object} [options.reverseRelation] Specify a bi-directional relation. If provided, Relation will reciprocate
+ * the relation to the 'relatedModel'. Required and optional properties match 'options', except that it also needs
+ * {Backbone.Relation|String} type ('HasOne' or 'HasMany').
+ */
+ Backbone.Relation = function( instance, options ) {
+ this.instance = instance;
+ // Make sure 'options' is sane, and fill with defaults from subclasses and this object's prototype
+ options = ( typeof options === 'object' && options ) || {};
+ this.reverseRelation = _.defaults( options.reverseRelation || {}, this.options.reverseRelation );
+ this.reverseRelation.type = !_.isString( this.reverseRelation.type ) ? this.reverseRelation.type :
+ Backbone[ this.reverseRelation.type ] || Backbone.Relational.store.getObjectByName( this.reverseRelation.type );
+ this.model = options.model || this.instance.constructor;
+ this.options = _.defaults( options, this.options, Backbone.Relation.prototype.options );
+
+ this.key = this.options.key;
+ this.keySource = this.options.keySource || this.key;
+ this.keyDestination = this.options.keyDestination || this.options.keySource || this.key;
+
+ // 'exports' should be the global object where 'relatedModel' can be found on if given as a string.
+ this.relatedModel = this.options.relatedModel;
+ if ( _.isString( this.relatedModel ) ) {
+ this.relatedModel = Backbone.Relational.store.getObjectByName( this.relatedModel );
+ }
+
+ if ( !this.checkPreconditions() ) {
+ return false;
+ }
+
+ if ( instance ) {
+ this.keyContents = this.instance.get( this.keySource );
+
+ // Explicitly clear 'keySource', to prevent a leaky abstraction if 'keySource' differs from 'key'.
+ if ( this.key !== this.keySource ) {
+ this.instance.unset( this.keySource, { silent: true } );
+ }
+
+ // Add this Relation to instance._relations
+ this.instance._relations.push( this );
+ }
+
+ // Add the reverse relation on 'relatedModel' to the store's reverseRelations
+ if ( !this.options.isAutoRelation && this.reverseRelation.type && this.reverseRelation.key ) {
+ Backbone.Relational.store.addReverseRelation( _.defaults( {
+ isAutoRelation: true,
+ model: this.relatedModel,
+ relatedModel: this.model,
+ reverseRelation: this.options // current relation is the 'reverseRelation' for it's own reverseRelation
+ },
+ this.reverseRelation // Take further properties from this.reverseRelation (type, key, etc.)
+ ) );
+ }
+
+ _.bindAll( this, '_modelRemovedFromCollection', '_relatedModelAdded', '_relatedModelRemoved' );
+
+ if( instance ) {
+ this.initialize();
+
+ // When a model in the store is destroyed, check if it is 'this.instance'.
+ Backbone.Relational.store.getCollection( this.instance )
+ .bind( 'relational:remove', this._modelRemovedFromCollection );
+
+ // When 'relatedModel' are created or destroyed, check if it affects this relation.
+ Backbone.Relational.store.getCollection( this.relatedModel )
+ .bind( 'relational:add', this._relatedModelAdded )
+ .bind( 'relational:remove', this._relatedModelRemoved );
+ }
+ };
+ // Fix inheritance :\
+ Backbone.Relation.extend = Backbone.Model.extend;
+ // Set up all inheritable **Backbone.Relation** properties and methods.
+ _.extend( Backbone.Relation.prototype, Backbone.Events, Backbone.Semaphore, {
+ options: {
+ createModels: true,
+ includeInJSON: true,
+ isAutoRelation: false
+ },
+
+ instance: null,
+ key: null,
+ keyContents: null,
+ relatedModel: null,
+ reverseRelation: null,
+ related: null,
+
+ _relatedModelAdded: function( model, coll, options ) {
+ // Allow 'model' to set up it's relations, before calling 'tryAddRelated'
+ // (which can result in a call to 'addRelated' on a relation of 'model')
+ var dit = this;
+ model.queue( function() {
+ dit.tryAddRelated( model, options );
+ });
+ },
+
+ _relatedModelRemoved: function( model, coll, options ) {
+ this.removeRelated( model, options );
+ },
+
+ _modelRemovedFromCollection: function( model ) {
+ if ( model === this.instance ) {
+ this.destroy();
+ }
+ },
+
+ /**
+ * Check several pre-conditions.
+ * @return {Boolean} True if pre-conditions are satisfied, false if they're not.
+ */
+ checkPreconditions: function() {
+ var i = this.instance,
+ k = this.key,
+ m = this.model,
+ rm = this.relatedModel,
+ warn = Backbone.Relational.showWarnings && typeof console !== 'undefined';
+
+ if ( !m || !k || !rm ) {
+ warn && console.warn( 'Relation=%o; no model, key or relatedModel (%o, %o, %o)', this, m, k, rm );
+ return false;
+ }
+ // Check if the type in 'relatedModel' inherits from Backbone.RelationalModel
+ if ( !( m.prototype instanceof Backbone.RelationalModel.prototype.constructor ) ) {
+ warn && console.warn( 'Relation=%o; model does not inherit from Backbone.RelationalModel (%o)', this, i );
+ return false;
+ }
+ // Check if the type in 'relatedModel' inherits from Backbone.RelationalModel
+ if ( !( rm.prototype instanceof Backbone.RelationalModel.prototype.constructor ) ) {
+ warn && console.warn( 'Relation=%o; relatedModel does not inherit from Backbone.RelationalModel (%o)', this, rm );
+ return false;
+ }
+ // Check if this is not a HasMany, and the reverse relation is HasMany as well
+ if ( this instanceof Backbone.HasMany && this.reverseRelation.type === Backbone.HasMany.prototype.constructor ) {
+ warn && console.warn( 'Relation=%o; relation is a HasMany, and the reverseRelation is HasMany as well.', this );
+ return false;
+ }
+
+ // Check if we're not attempting to create a duplicate relationship
+ if( i && i._relations.length ) {
+ var exists = _.any( i._relations, function( rel ) {
+ var hasReverseRelation = this.reverseRelation.key && rel.reverseRelation.key;
+ return rel.relatedModel === rm && rel.key === k &&
+ ( !hasReverseRelation || this.reverseRelation.key === rel.reverseRelation.key );
+ }, this );
+
+ if ( exists ) {
+ warn && console.warn( 'Relation=%o between instance=%o.%s and relatedModel=%o.%s already exists',
+ this, i, k, rm, this.reverseRelation.key );
+ return false;
+ }
+ }
+
+ return true;
+ },
+
+ /**
+ * Set the related model(s) for this relation
+ * @param {Backbone.Mode|Backbone.Collection} related
+ * @param {Object} [options]
+ */
+ setRelated: function( related, options ) {
+ this.related = related;
+
+ this.instance.acquire();
+ this.instance.set( this.key, related, _.defaults( options || {}, { silent: true } ) );
+ this.instance.release();
+ },
+
+ createModel: function( item ) {
+ if ( this.options.createModels && typeof( item ) === 'object' ) {
+ return new this.relatedModel( item );
+ }
+ },
+
+ /**
+ * Determine if a relation (on a different RelationalModel) is the reverse
+ * relation of the current one.
+ * @param {Backbone.Relation} relation
+ * @return {Boolean}
+ */
+ _isReverseRelation: function( relation ) {
+ if ( relation.instance instanceof this.relatedModel && this.reverseRelation.key === relation.key &&
+ this.key === relation.reverseRelation.key ) {
+ return true;
+ }
+ return false;
+ },
+
+ /**
+ * Get the reverse relations (pointing back to 'this.key' on 'this.instance') for the currently related model(s).
+ * @param {Backbone.RelationalModel} [model] Get the reverse relations for a specific model.
+ * If not specified, 'this.related' is used.
+ * @return {Backbone.Relation[]}
+ */
+ getReverseRelations: function( model ) {
+ var reverseRelations = [];
+ // Iterate over 'model', 'this.related.models' (if this.related is a Backbone.Collection), or wrap 'this.related' in an array.
+ var models = !_.isUndefined( model ) ? [ model ] : this.related && ( this.related.models || [ this.related ] );
+ _.each( models , function( related ) {
+ _.each( related.getRelations(), function( relation ) {
+ if ( this._isReverseRelation( relation ) ) {
+ reverseRelations.push( relation );
+ }
+ }, this );
+ }, this );
+
+ return reverseRelations;
+ },
+
+ /**
+ * Rename options.silent to options.silentChange, so events propagate properly.
+ * (for example in HasMany, from 'addRelated'->'handleAddition')
+ * @param {Object} [options]
+ * @return {Object}
+ */
+ sanitizeOptions: function( options ) {
+ options = options ? _.clone( options ) : {};
+ if ( options.silent ) {
+ options = _.extend( {}, options, { silentChange: true } );
+ delete options.silent;
+ }
+ return options;
+ },
+
+ /**
+ * Rename options.silentChange to options.silent, so events are silenced as intended in Backbone's
+ * original functions.
+ * @param {Object} [options]
+ * @return {Object}
+ */
+ unsanitizeOptions: function( options ) {
+ options = options ? _.clone( options ) : {};
+ if ( options.silentChange ) {
+ options = _.extend( {}, options, { silent: true } );
+ delete options.silentChange;
+ }
+ return options;
+ },
+
+ // Cleanup. Get reverse relation, call removeRelated on each.
+ destroy: function() {
+ Backbone.Relational.store.getCollection( this.instance )
+ .unbind( 'relational:remove', this._modelRemovedFromCollection );
+
+ Backbone.Relational.store.getCollection( this.relatedModel )
+ .unbind( 'relational:add', this._relatedModelAdded )
+ .unbind( 'relational:remove', this._relatedModelRemoved );
+
+ _.each( this.getReverseRelations(), function( relation ) {
+ relation.removeRelated( this.instance );
+ }, this );
+ }
+ });
+
+ Backbone.HasOne = Backbone.Relation.extend({
+ options: {
+ reverseRelation: { type: 'HasMany' }
+ },
+
+ initialize: function() {
+ _.bindAll( this, 'onChange' );
+
+ this.instance.bind( 'relational:change:' + this.key, this.onChange );
+
+ var model = this.findRelated( { silent: true } );
+ this.setRelated( model );
+
+ // Notify new 'related' object of the new relation.
+ var dit = this;
+ _.each( dit.getReverseRelations(), function( relation ) {
+ relation.addRelated( dit.instance );
+ } );
+ },
+
+ findRelated: function( options ) {
+ var item = this.keyContents;
+ var model = null;
+
+ if ( item instanceof this.relatedModel ) {
+ model = item;
+ }
+ else if ( item ) {
+ // Try to find an instance of the appropriate 'relatedModel' in the store, or create it
+ model = Backbone.Relational.store.find( this.relatedModel, item );
+
+ if ( model && _.isObject( item ) ) {
+ model.set( item, options );
+ }
+ else if ( !model ) {
+ model = this.createModel( item );
+ }
+ }
+
+ return model;
+ },
+
+ /**
+ * If the key is changed, notify old & new reverse relations and initialize the new relation
+ */
+ onChange: function( model, attr, options ) {
+ // Don't accept recursive calls to onChange (like onChange->findRelated->createModel->initializeRelations->addRelated->onChange)
+ if ( this.isLocked() ) {
+ return;
+ }
+ this.acquire();
+ options = this.sanitizeOptions( options );
+
+ // 'options._related' is set by 'addRelated'/'removeRelated'. If it is set, the change
+ // is the result of a call from a relation. If it's not, the change is the result of
+ // a 'set' call on this.instance.
+ var changed = _.isUndefined( options._related );
+ var oldRelated = changed ? this.related : options._related;
+
+ if ( changed ) {
+ this.keyContents = attr;
+
+ // Set new 'related'
+ if ( attr instanceof this.relatedModel ) {
+ this.related = attr;
+ }
+ else if ( attr ) {
+ var related = this.findRelated( options );
+ this.setRelated( related );
+ }
+ else {
+ this.setRelated( null );
+ }
+ }
+
+ // Notify old 'related' object of the terminated relation
+ if ( oldRelated && this.related !== oldRelated ) {
+ _.each( this.getReverseRelations( oldRelated ), function( relation ) {
+ relation.removeRelated( this.instance, options );
+ }, this );
+ }
+
+ // Notify new 'related' object of the new relation. Note we do re-apply even if this.related is oldRelated;
+ // that can be necessary for bi-directional relations if 'this.instance' was created after 'this.related'.
+ // In that case, 'this.instance' will already know 'this.related', but the reverse might not exist yet.
+ _.each( this.getReverseRelations(), function( relation ) {
+ relation.addRelated( this.instance, options );
+ }, this);
+
+ // Fire the 'update:<key>' event if 'related' was updated
+ if ( !options.silentChange && this.related !== oldRelated ) {
+ var dit = this;
+ Backbone.Relational.eventQueue.add( function() {
+ dit.instance.trigger( 'update:' + dit.key, dit.instance, dit.related, options );
+ });
+ }
+ this.release();
+ },
+
+ /**
+ * If a new 'this.relatedModel' appears in the 'store', try to match it to the last set 'keyContents'
+ */
+ tryAddRelated: function( model, options ) {
+ if ( this.related ) {
+ return;
+ }
+ options = this.sanitizeOptions( options );
+
+ var item = this.keyContents;
+ if ( item ) {
+ var id = Backbone.Relational.store.resolveIdForItem( this.relatedModel, item );
+ if ( model.id === id ) {
+ this.addRelated( model, options );
+ }
+ }
+ },
+
+ addRelated: function( model, options ) {
+ if ( model !== this.related ) {
+ var oldRelated = this.related || null;
+ this.setRelated( model );
+ this.onChange( this.instance, model, { _related: oldRelated } );
+ }
+ },
+
+ removeRelated: function( model, options ) {
+ if ( !this.related ) {
+ return;
+ }
+
+ if ( model === this.related ) {
+ var oldRelated = this.related || null;
+ this.setRelated( null );
+ this.onChange( this.instance, model, { _related: oldRelated } );
+ }
+ }
+ });
+
+ Backbone.HasMany = Backbone.Relation.extend({
+ collectionType: null,
+
+ options: {
+ reverseRelation: { type: 'HasOne' },
+ collectionType: Backbone.Collection,
+ collectionKey: true,
+ collectionOptions: {}
+ },
+
+ initialize: function() {
+ _.bindAll( this, 'onChange', 'handleAddition', 'handleRemoval', 'handleReset' );
+ this.instance.bind( 'relational:change:' + this.key, this.onChange );
+
+ // Handle a custom 'collectionType'
+ this.collectionType = this.options.collectionType;
+ if ( _( this.collectionType ).isString() ) {
+ this.collectionType = Backbone.Relational.store.getObjectByName( this.collectionType );
+ }
+ if ( !this.collectionType.prototype instanceof Backbone.Collection.prototype.constructor ){
+ throw new Error( 'collectionType must inherit from Backbone.Collection' );
+ }
+
+ // Handle cases where a model/relation is created with a collection passed straight into 'attributes'
+ if ( this.keyContents instanceof Backbone.Collection ) {
+ this.setRelated( this._prepareCollection( this.keyContents ) );
+ }
+ else {
+ this.setRelated( this._prepareCollection() );
+ }
+
+ this.findRelated( { silent: true } );
+ },
+
+ _getCollectionOptions: function() {
+ return _.isFunction( this.options.collectionOptions ) ?
+ this.options.collectionOptions( this.instance ) :
+ this.options.collectionOptions;
+ },
+
+ /**
+ * Bind events and setup collectionKeys for a collection that is to be used as the backing store for a HasMany.
+ * If no 'collection' is supplied, a new collection will be created of the specified 'collectionType' option.
+ * @param {Backbone.Collection} [collection]
+ */
+ _prepareCollection: function( collection ) {
+ if ( this.related ) {
+ this.related
+ .unbind( 'relational:add', this.handleAddition )
+ .unbind( 'relational:remove', this.handleRemoval )
+ .unbind( 'relational:reset', this.handleReset )
+ }
+
+ if ( !collection || !( collection instanceof Backbone.Collection ) ) {
+ collection = new this.collectionType( [], this._getCollectionOptions() );
+ }
+
+ collection.model = this.relatedModel;
+
+ if ( this.options.collectionKey ) {
+ var key = this.options.collectionKey === true ? this.options.reverseRelation.key : this.options.collectionKey;
+
+ if (collection[ key ] && collection[ key ] !== this.instance ) {
+ if ( Backbone.Relational.showWarnings && typeof console !== 'undefined' ) {
+ console.warn( 'Relation=%o; collectionKey=%s already exists on collection=%o', this, key, this.options.collectionKey );
+ }
+ }
+ else if (key) {
+ collection[ key ] = this.instance;
+ }
+ }
+
+ collection
+ .bind( 'relational:add', this.handleAddition )
+ .bind( 'relational:remove', this.handleRemoval )
+ .bind( 'relational:reset', this.handleReset );
+
+ return collection;
+ },
+
+ findRelated: function( options ) {
+ if ( this.keyContents ) {
+ var models = [];
+
+ if ( this.keyContents instanceof Backbone.Collection ) {
+ models = this.keyContents.models;
+ }
+ else {
+ // Handle cases the an API/user supplies just an Object/id instead of an Array
+ this.keyContents = _.isArray( this.keyContents ) ? this.keyContents : [ this.keyContents ];
+
+ // Try to find instances of the appropriate 'relatedModel' in the store
+ _.each( this.keyContents, function( item ) {
+ var model = Backbone.Relational.store.find( this.relatedModel, item );
+
+ if ( model && _.isObject( item ) ) {
+ model.set( item, options );
+ }
+ else if ( !model ) {
+ model = this.createModel( item );
+ }
+
+ if ( model && !this.related.getByCid( model ) && !this.related.get( model ) ) {
+ models.push( model );
+ }
+ }, this );
+ }
+
+ // Add all found 'models' in on go, so 'add' will only be called once (and thus 'sort', etc.)
+ if ( models.length ) {
+ options = this.unsanitizeOptions( options );
+ this.related.add( models, options );
+ }
+ }
+ },
+
+ /**
+ * If the key is changed, notify old & new reverse relations and initialize the new relation
+ */
+ onChange: function( model, attr, options ) {
+ options = this.sanitizeOptions( options );
+ this.keyContents = attr;
+
+ // Notify old 'related' object of the terminated relation
+ _.each( this.getReverseRelations(), function( relation ) {
+ relation.removeRelated( this.instance, options );
+ }, this );
+
+ // Replace 'this.related' by 'attr' if it is a Backbone.Collection
+ if ( attr instanceof Backbone.Collection ) {
+ this._prepareCollection( attr );
+ this.related = attr;
+ }
+ // Otherwise, 'attr' should be an array of related object ids.
+ // Re-use the current 'this.related' if it is a Backbone.Collection, and remove any current entries.
+ // Otherwise, create a new collection.
+ else {
+ var coll;
+
+ if ( this.related instanceof Backbone.Collection ) {
+ coll = this.related;
+ coll.reset( [], { silent: true } );
+ }
+ else {
+ coll = this._prepareCollection();
+ }
+
+ this.setRelated( coll );
+ this.findRelated( options );
+ }
+
+ // Notify new 'related' object of the new relation
+ _.each( this.getReverseRelations(), function( relation ) {
+ relation.addRelated( this.instance, options );
+ }, this );
+
+ var dit = this;
+ Backbone.Relational.eventQueue.add( function() {
+ !options.silentChange && dit.instance.trigger( 'update:' + dit.key, dit.instance, dit.related, options );
+ });
+ },
+
+ tryAddRelated: function( model, options ) {
+ options = this.sanitizeOptions( options );
+ if ( !this.related.getByCid( model ) && !this.related.get( model ) ) {
+ // Check if this new model was specified in 'this.keyContents'
+ var item = _.any( this.keyContents, function( item ) {
+ var id = Backbone.Relational.store.resolveIdForItem( this.relatedModel, item );
+ return id && id === model.id;
+ }, this );
+
+ if ( item ) {
+ this.related.add( model, options );
+ }
+ }
+ },
+
+ /**
+ * When a model is added to a 'HasMany', trigger 'add' on 'this.instance' and notify reverse relations.
+ * (should be 'HasOne', must set 'this.instance' as their related).
+ */
+ handleAddition: function( model, coll, options ) {
+ //console.debug('handleAddition called; args=%o', arguments);
+ // Make sure the model is in fact a valid model before continuing.
+ // (it can be invalid as a result of failing validation in Backbone.Collection._prepareModel)
+ if( !( model instanceof Backbone.Model ) ) {
+ return;
+ }
+
+ options = this.sanitizeOptions( options );
+
+ _.each( this.getReverseRelations( model ), function( relation ) {
+ relation.addRelated( this.instance, options );
+ }, this );
+
+ // Only trigger 'add' once the newly added model is initialized (so, has it's relations set up)
+ var dit = this;
+ Backbone.Relational.eventQueue.add( function() {
+ !options.silentChange && dit.instance.trigger( 'add:' + dit.key, model, dit.related, options );
+ });
+ },
+
+ /**
+ * When a model is removed from a 'HasMany', trigger 'remove' on 'this.instance' and notify reverse relations.
+ * (should be 'HasOne', which should be nullified)
+ */
+ handleRemoval: function( model, coll, options ) {
+ //console.debug('handleRemoval called; args=%o', arguments);
+ if( !( model instanceof Backbone.Model ) ) {
+ return;
+ }
+
+ options = this.sanitizeOptions( options );
+
+ _.each( this.getReverseRelations( model ), function( relation ) {
+ relation.removeRelated( this.instance, options );
+ }, this );
+
+ var dit = this;
+ Backbone.Relational.eventQueue.add( function() {
+ !options.silentChange && dit.instance.trigger( 'remove:' + dit.key, model, dit.related, options );
+ });
+ },
+
+ handleReset: function( coll, options ) {
+ options = this.sanitizeOptions( options );
+
+ var dit = this;
+ Backbone.Relational.eventQueue.add( function() {
+ !options.silentChange && dit.instance.trigger( 'reset:' + dit.key, dit.related, options );
+ });
+ },
+
+ addRelated: function( model, options ) {
+ var dit = this;
+ options = this.unsanitizeOptions( options );
+ model.queue( function() { // Queued to avoid errors for adding 'model' to the 'this.related' set twice
+ if ( dit.related && !dit.related.getByCid( model ) && !dit.related.get( model ) ) {
+ dit.related.add( model, options );
+ }
+ });
+ },
+
+ removeRelated: function( model, options ) {
+ options = this.unsanitizeOptions( options );
+ if ( this.related.getByCid( model ) || this.related.get( model ) ) {
+ this.related.remove( model, options );
+ }
+ }
+ });
+
+ /**
+ * A type of Backbone.Model that also maintains relations to other models and collections.
+ * New events when compared to the original:
+ * - 'add:<key>' (model, related collection, options)
+ * - 'remove:<key>' (model, related collection, options)
+ * - 'update:<key>' (model, related model or collection, options)
+ */
+ Backbone.RelationalModel = Backbone.Model.extend({
+ relations: null, // Relation descriptions on the prototype
+ _relations: null, // Relation instances
+ _isInitialized: false,
+ _deferProcessing: false,
+ _queue: null,
+
+ constructor: function( attributes, options ) {
+ // Nasty hack, for cases like 'model.get( <HasMany key> ).add( item )'.
+ // Defer 'processQueue', so that when 'Relation.createModels' is used we:
+ // a) Survive 'Backbone.Collection.add'; this takes care we won't error on "can't add model to a set twice"
+ // (by creating a model from properties, having the model add itself to the collection via one of
+ // it's relations, then trying to add it to the collection).
+ // b) Trigger 'HasMany' collection events only after the model is really fully set up.
+ // Example that triggers both a and b: "p.get('jobs').add( { company: c, person: p } )".
+ var dit = this;
+ if ( options && options.collection ) {
+ this._deferProcessing = true;
+
+ var processQueue = function( model ) {
+ if ( model === dit ) {
+ dit._deferProcessing = false;
+ dit.processQueue();
+ options.collection.unbind( 'relational:add', processQueue );
+ }
+ };
+ options.collection.bind( 'relational:add', processQueue );
+
+ // So we do process the queue eventually, regardless of whether this model really gets added to 'options.collection'.
+ _.defer( function() {
+ processQueue( dit );
+ });
+ }
+
+ this._queue = new Backbone.BlockingQueue();
+ this._queue.block();
+ Backbone.Relational.eventQueue.block();
+
+ Backbone.Model.prototype.constructor.apply( this, arguments );
+
+ // Try to run the global queue holding external events
+ Backbone.Relational.eventQueue.unblock();
+ },
+
+ /**
+ * Override 'trigger' to queue 'change' and 'change:*' events
+ */
+ trigger: function( eventName ) {
+ if ( eventName.length > 5 && 'change' === eventName.substr( 0, 6 ) ) {
+ var dit = this, args = arguments;
+ Backbone.Relational.eventQueue.add( function() {
+ Backbone.Model.prototype.trigger.apply( dit, args );
+ });
+ }
+ else {
+ Backbone.Model.prototype.trigger.apply( this, arguments );
+ }
+
+ return this;
+ },
+
+ /**
+ * Initialize Relations present in this.relations; determine the type (HasOne/HasMany), then creates a new instance.
+ * Invoked in the first call so 'set' (which is made from the Backbone.Model constructor).
+ */
+ initializeRelations: function() {
+ this.acquire(); // Setting up relations often also involve calls to 'set', and we only want to enter this function once
+ this._relations = [];
+
+ _.each( this.relations, function( rel ) {
+ var type = !_.isString( rel.type ) ? rel.type : Backbone[ rel.type ] || Backbone.Relational.store.getObjectByName( rel.type );
+ if ( type && type.prototype instanceof Backbone.Relation.prototype.constructor ) {
+ new type( this, rel ); // Also pushes the new Relation into _relations
+ }
+ else {
+ Backbone.Relational.showWarnings && typeof console !== 'undefined' && console.warn( 'Relation=%o; missing or invalid type!', rel );
+ }
+ }, this );
+
+ this._isInitialized = true;
+ this.release();
+ this.processQueue();
+ },
+
+ /**
+ * When new values are set, notify this model's relations (also if options.silent is set).
+ * (Relation.setRelated locks this model before calling 'set' on it to prevent loops)
+ */
+ updateRelations: function( options ) {
+ if( this._isInitialized && !this.isLocked() ) {
+ _.each( this._relations, function( rel ) {
+ var val = this.attributes[ rel.key ];
+ if ( rel.related !== val ) {
+ this.trigger('relational:change:' + rel.key, this, val, options || {} );
+ }
+ }, this );
+ }
+ },
+
+ /**
+ * Either add to the queue (if we're not initialized yet), or execute right away.
+ */
+ queue: function( func ) {
+ this._queue.add( func );
+ },
+
+ /**
+ * Process _queue
+ */
+ processQueue: function() {
+ if ( this._isInitialized && !this._deferProcessing && this._queue.isBlocked() ) {
+ this._queue.unblock();
+ }
+ },
+
+ /**
+ * Get a specific relation.
+ * @param key {string} The relation key to look for.
+ * @return {Backbone.Relation} An instance of 'Backbone.Relation', if a relation was found for 'key', or null.
+ */
+ getRelation: function( key ) {
+ return _.detect( this._relations, function( rel ) {
+ if ( rel.key === key ) {
+ return true;
+ }
+ }, this );
+ },
+
+ /**
+ * Get all of the created relations.
+ * @return {Backbone.Relation[]}
+ */
+ getRelations: function() {
+ return this._relations;
+ },
+
+ /**
+ * Retrieve related objects.
+ * @param key {string} The relation key to fetch models for.
+ * @param options {object} Options for 'Backbone.Model.fetch' and 'Backbone.sync'.
+ * @return {jQuery.when[]} An array of request objects
+ */
+ fetchRelated: function( key, options ) {
+ options || ( options = {} );
+ var setUrl,
+ requests = [],
+ rel = this.getRelation( key ),
+ keyContents = rel && rel.keyContents,
+ toFetch = keyContents && _.select( _.isArray( keyContents ) ? keyContents : [ keyContents ], function( item ) {
+ var id = Backbone.Relational.store.resolveIdForItem( rel.relatedModel, item );
+ return id && !Backbone.Relational.store.find( rel.relatedModel, id );
+ }, this );
+
+ if ( toFetch && toFetch.length ) {
+ // Create a model for each entry in 'keyContents' that is to be fetched
+ var models = _.map( toFetch, function( item ) {
+ var model;
+
+ if ( typeof( item ) === 'object' ) {
+ model = new rel.relatedModel( item );
+ }
+ else {
+ var attrs = {};
+ attrs[ rel.relatedModel.prototype.idAttribute ] = item;
+ model = new rel.relatedModel( attrs );
+ }
+
+ return model;
+ }, this );
+
+ // Try if the 'collection' can provide a url to fetch a set of models in one request.
+ if ( rel.related instanceof Backbone.Collection && _.isFunction( rel.related.url ) ) {
+ setUrl = rel.related.url( models );
+ }
+
+ // An assumption is that when 'Backbone.Collection.url' is a function, it can handle building of set urls.
+ // To make sure it can, test if the url we got by supplying a list of models to fetch is different from
+ // the one supplied for the default fetch action (without args to 'url').
+ if ( setUrl && setUrl !== rel.related.url() ) {
+ var opts = _.defaults(
+ {
+ error: function() {
+ var args = arguments;
+ _.each( models, function( model ) {
+ model.trigger( 'destroy', model, model.collection, options );
+ options.error && options.error.apply( model, args );
+ });
+ },
+ url: setUrl
+ },
+ options,
+ { add: true }
+ );
+
+ requests = [ rel.related.fetch( opts ) ];
+ }
+ else {
+ requests = _.map( models, function( model ) {
+ var opts = _.defaults(
+ {
+ error: function() {
+ model.trigger( 'destroy', model, model.collection, options );
+ options.error && options.error.apply( model, arguments );
+ }
+ },
+ options
+ );
+ return model.fetch( opts );
+ }, this );
+ }
+ }
+
+ return requests;
+ },
+
+ set: function( key, value, options ) {
+ Backbone.Relational.eventQueue.block();
+
+ // Duplicate backbone's behavior to allow separate key/value parameters, instead of a single 'attributes' object
+ var attributes;
+ if (_.isObject( key ) || key == null) {
+ attributes = key;
+ options = value;
+ }
+ else {
+ attributes = {};
+ attributes[ key ] = value;
+ }
+
+ var result = Backbone.Model.prototype.set.apply( this, arguments );
+
+ // 'set' is called quite late in 'Backbone.Model.prototype.constructor', but before 'initialize'.
+ // Ideal place to set up relations :)
+ if ( !this._isInitialized && !this.isLocked() ) {
+ Backbone.Relational.store.register( this );
+ this.initializeRelations();
+ }
+ // Update the 'idAttribute' in Backbone.store if; we don't want it to miss an 'id' update due to {silent:true}
+ else if ( attributes && this.idAttribute in attributes ) {
+ Backbone.Relational.store.update( this );
+ }
+
+ if ( attributes ) {
+ this.updateRelations( options );
+ }
+
+ // Try to run the global queue holding external events
+ Backbone.Relational.eventQueue.unblock();
+
+ return result;
+ },
+
+ unset: function( attribute, options ) {
+ Backbone.Relational.eventQueue.block();
+
+ var result = Backbone.Model.prototype.unset.apply( this, arguments );
+ this.updateRelations( options );
+
+ // Try to run the global queue holding external events
+ Backbone.Relational.eventQueue.unblock();
+
+ return result;
+ },
+
+ clear: function( options ) {
+ Backbone.Relational.eventQueue.block();
+
+ var result = Backbone.Model.prototype.clear.apply( this, arguments );
+ this.updateRelations( options );
+
+ // Try to run the global queue holding external events
+ Backbone.Relational.eventQueue.unblock();
+
+ return result;
+ },
+
+ /**
+ * Override 'change', so the change will only execute after 'set' has finised (relations are updated),
+ * and 'previousAttributes' will be available when the event is fired.
+ */
+ change: function( options ) {
+ var dit = this, args = arguments;
+ Backbone.Relational.eventQueue.add( function() {
+ Backbone.Model.prototype.change.apply( dit, args );
+ });
+ },
+
+ clone: function() {
+ var attributes = _.clone( this.attributes );
+ if ( !_.isUndefined( attributes[ this.idAttribute ] ) ) {
+ attributes[ this.idAttribute ] = null;
+ }
+
+ _.each( this.getRelations(), function( rel ) {
+ delete attributes[ rel.key ];
+ });
+
+ return new this.constructor( attributes );
+ },
+
+ /**
+ * Convert relations to JSON, omits them when required
+ */
+ toJSON: function() {
+ // If this Model has already been fully serialized in this branch once, return to avoid loops
+ if ( this.isLocked() ) {
+ return this.id;
+ }
+
+ this.acquire();
+ var json = Backbone.Model.prototype.toJSON.call( this );
+
+ _.each( this._relations, function( rel ) {
+ var value = json[ rel.key ];
+
+ if ( rel.options.includeInJSON === true && value && _.isFunction( value.toJSON ) ) {
+ json[ rel.keyDestination ] = value.toJSON();
+ }
+ else if ( _.isString( rel.options.includeInJSON ) ) {
+ if ( value instanceof Backbone.Collection ) {
+ json[ rel.keyDestination ] = value.pluck( rel.options.includeInJSON );
+ }
+ else if ( value instanceof Backbone.Model ) {
+ json[ rel.keyDestination ] = value.get( rel.options.includeInJSON );
+ }
+ }
+ else {
+ delete json[ rel.key ];
+ }
+
+ if ( rel.keyDestination !== rel.key ) {
+ delete json[ rel.key ];
+ }
+ }, this );
+
+ this.release();
+ return json;
+ }
+ });
+ _.extend( Backbone.RelationalModel.prototype, Backbone.Semaphore );
+
+ /**
+ * Override Backbone.Collection.add, so objects fetched from the server multiple times will
+ * update the existing Model. Also, trigger 'relational:add'.
+ */
+ var add = Backbone.Collection.prototype.__add = Backbone.Collection.prototype.add;
+ Backbone.Collection.prototype.add = function( models, options ) {
+ options || (options = {});
+ if ( !_.isArray( models ) ) {
+ models = [ models ];
+ }
+
+ var modelsToAdd = [];
+
+ //console.debug( 'calling add on coll=%o; model=%o, options=%o', this, models, options );
+ _.each( models, function( model ) {
+ if ( !( model instanceof Backbone.Model ) ) {
+ // Try to find 'model' in Backbone.store. If it already exists, set the new properties on it.
+ var existingModel = Backbone.Relational.store.find( this.model, model[ this.model.prototype.idAttribute ] );
+ if ( existingModel ) {
+ existingModel.set( existingModel.parse ? existingModel.parse( model ) : model, options );
+ model = existingModel;
+ }
+ else {
+ model = Backbone.Collection.prototype._prepareModel.call( this, model, options );
+ }
+ }
+
+ if ( model instanceof Backbone.Model && !this.get( model ) && !this.getByCid( model ) ) {
+ modelsToAdd.push( model );
+ }
+ }, this );
+
+
+ // Add 'models' in a single batch, so the original add will only be called once (and thus 'sort', etc).
+ if ( modelsToAdd.length ) {
+ add.call( this, modelsToAdd, options );
+
+ _.each( modelsToAdd, function( model ) {
+ this.trigger('relational:add', model, this, options);
+ }, this );
+ }
+
+ return this;
+ };
+
+ /**
+ * Override 'Backbone.Collection.remove' to trigger 'relational:remove'.
+ */
+ var remove = Backbone.Collection.prototype.__remove = Backbone.Collection.prototype.remove;
+ Backbone.Collection.prototype.remove = function( models, options ) {
+ options || (options = {});
+ if (!_.isArray( models ) ) {
+ models = [ models ];
+ }
+
+ //console.debug('calling remove on coll=%o; models=%o, options=%o', this, models, options );
+ _.each( models, function( model ) {
+ model = this.getByCid( model ) || this.get( model );
+
+ if ( model instanceof Backbone.Model ) {
+ remove.call( this, model, options );
+ this.trigger('relational:remove', model, this, options);
+ }
+ }, this );
+
+ return this;
+ };
+
+ /**
+ * Override 'Backbone.Collection.reset' to trigger 'relational:reset'.
+ */
+ var reset = Backbone.Collection.prototype.__reset = Backbone.Collection.prototype.reset;
+ Backbone.Collection.prototype.reset = function( models, options ) {
+ reset.call( this, models, options );
+ this.trigger( 'relational:reset', models, options );
+
+ return this;
+ };
+
+ /**
+ * Override 'Backbone.Collection.trigger' so 'add', 'remove' and 'reset' events are queued until relations
+ * are ready.
+ */
+ var trigger = Backbone.Collection.prototype.__trigger = Backbone.Collection.prototype.trigger;
+ Backbone.Collection.prototype.trigger = function( eventName ) {
+ if ( eventName === 'add' || eventName === 'remove' || eventName === 'reset' ) {
+ var dit = this, args = arguments;
+ Backbone.Relational.eventQueue.add( function() {
+ trigger.apply( dit, args );
+ });
+ }
+ else {
+ trigger.apply( this, arguments );
+ }
+
+ return this;
+ };
+
+ // Override .extend() to check for reverseRelations to initialize.
+ Backbone.RelationalModel.extend = function( protoProps, classProps ) {
+ var child = Backbone.Model.extend.apply( this, arguments );
+
+ var relations = ( protoProps && protoProps.relations ) || [];
+ _.each( relations, function( rel ) {
+ if( rel.reverseRelation ) {
+ rel.model = child;
+
+ var preInitialize = true;
+ if ( _.isString( rel.relatedModel ) ) {
+ /**
+ * The related model might not be defined for two reasons
+ * 1. it never gets defined, e.g. a typo
+ * 2. it is related to itself
+ * In neither of these cases do we need to pre-initialize reverse relations.
+ */
+ var relatedModel = Backbone.Relational.store.getObjectByName( rel.relatedModel );
+ preInitialize = relatedModel && ( relatedModel.prototype instanceof Backbone.RelationalModel.prototype.constructor );
+ }
+
+ var type = !_.isString( rel.type ) ? rel.type : Backbone[ rel.type ] || Backbone.Relational.store.getObjectByName( rel.type );
+ if ( preInitialize && type && type.prototype instanceof Backbone.Relation.prototype.constructor ) {
+ new type( null, rel );
+ }
+ }
+ });
+
+ return child;
+ };
+
+})();
diff -r bf88ce609370a7829337dad758fafe388b3fd117 -r 0a957e499a3c9b7ba11f88d337e4878260d73d6d static/scripts/packed/libs/backbone-relational.js
--- /dev/null
+++ b/static/scripts/packed/libs/backbone-relational.js
@@ -0,0 +1,1 @@
+(function(g){var d,h,b;if(typeof window==="undefined"){d=require("underscore");h=require("backbone");b=module.exports=h}else{var d=window._;h=window.Backbone;b=window}h.Relational={showWarnings:true};h.Semaphore={_permitsAvailable:null,_permitsUsed:0,acquire:function(){if(this._permitsAvailable&&this._permitsUsed>=this._permitsAvailable){throw new Error("Max permits acquired")}else{this._permitsUsed++}},release:function(){if(this._permitsUsed===0){throw new Error("All permits released")}else{this._permitsUsed--}},isLocked:function(){return this._permitsUsed>0},setAvailablePermits:function(i){if(this._permitsUsed>i){throw new Error("Available permits cannot be less than used permits")}this._permitsAvailable=i}};h.BlockingQueue=function(){this._queue=[]};d.extend(h.BlockingQueue.prototype,h.Semaphore,{_queue:null,add:function(i){if(this.isBlocked()){this._queue.push(i)}else{i()}},process:function(){while(this._queue&&this._queue.length){this._queue.shift()()}},block:function(){this.acquire()},unblock:function(){this.release();if(!this.isBlocked()){this.process()}},isBlocked:function(){return this.isLocked()}});h.Relational.eventQueue=new h.BlockingQueue();h.Store=function(){this._collections=[];this._reverseRelations=[]};d.extend(h.Store.prototype,h.Events,{_collections:null,_reverseRelations:null,addReverseRelation:function(j){var i=d.any(this._reverseRelations,function(k){return d.all(j,function(m,l){return m===k[l]})});if(!i&&j.model&&j.type){this._reverseRelations.push(j);if(!j.model.prototype.relations){j.model.prototype.relations=[]}j.model.prototype.relations.push(j);this.retroFitRelation(j)}},retroFitRelation:function(j){var i=this.getCollection(j.model);i.each(function(k){new j.type(k,j)},this)},getCollection:function(i){var j=d.detect(this._collections,function(k){return i===k.model||i.constructor===k.model});if(!j){j=this._createCollection(i)}return j},getObjectByName:function(i){var j=d.reduce(i.split("."),function(k,l){return k[l]},b);return j!==b?j:null},_createCollection:function(j){var i;if(j instanceof h.RelationalModel){j=j.constructor}if(j.prototype instanceof h.RelationalModel.prototype.constructor){i=new h.Collection();i.model=j;this._collections.push(i)}return i},resolveIdForItem:function(i,j){var k=d.isString(j)||d.isNumber(j)?j:null;if(k==null){if(j instanceof h.RelationalModel){k=j.id}else{if(d.isObject(j)){k=j[i.prototype.idAttribute]}}}return k},find:function(j,k){var l=this.resolveIdForItem(j,k);var i=this.getCollection(j);return i&&i.get(l)},register:function(j){var i=j.collection;var k=this.getCollection(j);k&&k.add(j);j.bind("destroy",this.unregister,this);j.collection=i},update:function(i){var j=this.getCollection(i);j._onModelEvent("change:"+i.idAttribute,i,j)},unregister:function(i){i.unbind("destroy",this.unregister);var j=this.getCollection(i);j&&j.remove(i)}});h.Relational.store=new h.Store();h.Relation=function(i,j){this.instance=i;j=(typeof j==="object"&&j)||{};this.reverseRelation=d.defaults(j.reverseRelation||{},this.options.reverseRelation);this.reverseRelation.type=!d.isString(this.reverseRelation.type)?this.reverseRelation.type:h[this.reverseRelation.type]||h.Relational.store.getObjectByName(this.reverseRelation.type);this.model=j.model||this.instance.constructor;this.options=d.defaults(j,this.options,h.Relation.prototype.options);this.key=this.options.key;this.keySource=this.options.keySource||this.key;this.keyDestination=this.options.keyDestination||this.options.keySource||this.key;this.relatedModel=this.options.relatedModel;if(d.isString(this.relatedModel)){this.relatedModel=h.Relational.store.getObjectByName(this.relatedModel)}if(!this.checkPreconditions()){return false}if(i){this.keyContents=this.instance.get(this.keySource);if(this.key!==this.keySource){this.instance.unset(this.keySource,{silent:true})}this.instance._relations.push(this)}if(!this.options.isAutoRelation&&this.reverseRelation.type&&this.reverseRelation.key){h.Relational.store.addReverseRelation(d.defaults({isAutoRelation:true,model:this.relatedModel,relatedModel:this.model,reverseRelation:this.options},this.reverseRelation))}d.bindAll(this,"_modelRemovedFromCollection","_relatedModelAdded","_relatedModelRemoved");if(i){this.initialize();h.Relational.store.getCollection(this.instance).bind("relational:remove",this._modelRemovedFromCollection);h.Relational.store.getCollection(this.relatedModel).bind("relational:add",this._relatedModelAdded).bind("relational:remove",this._relatedModelRemoved)}};h.Relation.extend=h.Model.extend;d.extend(h.Relation.prototype,h.Events,h.Semaphore,{options:{createModels:true,includeInJSON:true,isAutoRelation:false},instance:null,key:null,keyContents:null,relatedModel:null,reverseRelation:null,related:null,_relatedModelAdded:function(k,l,j){var i=this;k.queue(function(){i.tryAddRelated(k,j)})},_relatedModelRemoved:function(j,k,i){this.removeRelated(j,i)},_modelRemovedFromCollection:function(i){if(i===this.instance){this.destroy()}},checkPreconditions:function(){var n=this.instance,l=this.key,j=this.model,p=this.relatedModel,q=h.Relational.showWarnings&&typeof console!=="undefined";if(!j||!l||!p){q&&console.warn("Relation=%o; no model, key or relatedModel (%o, %o, %o)",this,j,l,p);return false}if(!(j.prototype instanceof h.RelationalModel.prototype.constructor)){q&&console.warn("Relation=%o; model does not inherit from Backbone.RelationalModel (%o)",this,n);return false}if(!(p.prototype instanceof h.RelationalModel.prototype.constructor)){q&&console.warn("Relation=%o; relatedModel does not inherit from Backbone.RelationalModel (%o)",this,p);return false}if(this instanceof h.HasMany&&this.reverseRelation.type===h.HasMany.prototype.constructor){q&&console.warn("Relation=%o; relation is a HasMany, and the reverseRelation is HasMany as well.",this);return false}if(n&&n._relations.length){var o=d.any(n._relations,function(i){var k=this.reverseRelation.key&&i.reverseRelation.key;return i.relatedModel===p&&i.key===l&&(!k||this.reverseRelation.key===i.reverseRelation.key)},this);if(o){q&&console.warn("Relation=%o between instance=%o.%s and relatedModel=%o.%s already exists",this,n,l,p,this.reverseRelation.key);return false}}return true},setRelated:function(j,i){this.related=j;this.instance.acquire();this.instance.set(this.key,j,d.defaults(i||{},{silent:true}));this.instance.release()},createModel:function(i){if(this.options.createModels&&typeof(i)==="object"){return new this.relatedModel(i)}},_isReverseRelation:function(i){if(i.instance instanceof this.relatedModel&&this.reverseRelation.key===i.key&&this.key===i.reverseRelation.key){return true}return false},getReverseRelations:function(i){var j=[];var k=!d.isUndefined(i)?[i]:this.related&&(this.related.models||[this.related]);d.each(k,function(l){d.each(l.getRelations(),function(m){if(this._isReverseRelation(m)){j.push(m)}},this)},this);return j},sanitizeOptions:function(i){i=i?d.clone(i):{};if(i.silent){i=d.extend({},i,{silentChange:true});delete i.silent}return i},unsanitizeOptions:function(i){i=i?d.clone(i):{};if(i.silentChange){i=d.extend({},i,{silent:true});delete i.silentChange}return i},destroy:function(){h.Relational.store.getCollection(this.instance).unbind("relational:remove",this._modelRemovedFromCollection);h.Relational.store.getCollection(this.relatedModel).unbind("relational:add",this._relatedModelAdded).unbind("relational:remove",this._relatedModelRemoved);d.each(this.getReverseRelations(),function(i){i.removeRelated(this.instance)},this)}});h.HasOne=h.Relation.extend({options:{reverseRelation:{type:"HasMany"}},initialize:function(){d.bindAll(this,"onChange");this.instance.bind("relational:change:"+this.key,this.onChange);var j=this.findRelated({silent:true});this.setRelated(j);var i=this;d.each(i.getReverseRelations(),function(k){k.addRelated(i.instance)})},findRelated:function(j){var k=this.keyContents;var i=null;if(k instanceof this.relatedModel){i=k}else{if(k){i=h.Relational.store.find(this.relatedModel,k);if(i&&d.isObject(k)){i.set(k,j)}else{if(!i){i=this.createModel(k)}}}}return i},onChange:function(l,i,k){if(this.isLocked()){return}this.acquire();k=this.sanitizeOptions(k);var o=d.isUndefined(k._related);var m=o?this.related:k._related;if(o){this.keyContents=i;if(i instanceof this.relatedModel){this.related=i}else{if(i){var n=this.findRelated(k);this.setRelated(n)}else{this.setRelated(null)}}}if(m&&this.related!==m){d.each(this.getReverseRelations(m),function(p){p.removeRelated(this.instance,k)},this)}d.each(this.getReverseRelations(),function(p){p.addRelated(this.instance,k)},this);if(!k.silentChange&&this.related!==m){var j=this;h.Relational.eventQueue.add(function(){j.instance.trigger("update:"+j.key,j.instance,j.related,k)})}this.release()},tryAddRelated:function(j,i){if(this.related){return}i=this.sanitizeOptions(i);var k=this.keyContents;if(k){var l=h.Relational.store.resolveIdForItem(this.relatedModel,k);if(j.id===l){this.addRelated(j,i)}}},addRelated:function(j,i){if(j!==this.related){var k=this.related||null;this.setRelated(j);this.onChange(this.instance,j,{_related:k})}},removeRelated:function(j,i){if(!this.related){return}if(j===this.related){var k=this.related||null;this.setRelated(null);this.onChange(this.instance,j,{_related:k})}}});h.HasMany=h.Relation.extend({collectionType:null,options:{reverseRelation:{type:"HasOne"},collectionType:h.Collection,collectionKey:true,collectionOptions:{}},initialize:function(){d.bindAll(this,"onChange","handleAddition","handleRemoval","handleReset");this.instance.bind("relational:change:"+this.key,this.onChange);this.collectionType=this.options.collectionType;if(d(this.collectionType).isString()){this.collectionType=h.Relational.store.getObjectByName(this.collectionType)}if(!this.collectionType.prototype instanceof h.Collection.prototype.constructor){throw new Error("collectionType must inherit from Backbone.Collection")}if(this.keyContents instanceof h.Collection){this.setRelated(this._prepareCollection(this.keyContents))}else{this.setRelated(this._prepareCollection())}this.findRelated({silent:true})},_getCollectionOptions:function(){return d.isFunction(this.options.collectionOptions)?this.options.collectionOptions(this.instance):this.options.collectionOptions},_prepareCollection:function(j){if(this.related){this.related.unbind("relational:add",this.handleAddition).unbind("relational:remove",this.handleRemoval).unbind("relational:reset",this.handleReset)}if(!j||!(j instanceof h.Collection)){j=new this.collectionType([],this._getCollectionOptions())}j.model=this.relatedModel;if(this.options.collectionKey){var i=this.options.collectionKey===true?this.options.reverseRelation.key:this.options.collectionKey;if(j[i]&&j[i]!==this.instance){if(h.Relational.showWarnings&&typeof console!=="undefined"){console.warn("Relation=%o; collectionKey=%s already exists on collection=%o",this,i,this.options.collectionKey)}}else{if(i){j[i]=this.instance}}}j.bind("relational:add",this.handleAddition).bind("relational:remove",this.handleRemoval).bind("relational:reset",this.handleReset);return j},findRelated:function(i){if(this.keyContents){var j=[];if(this.keyContents instanceof h.Collection){j=this.keyContents.models}else{this.keyContents=d.isArray(this.keyContents)?this.keyContents:[this.keyContents];d.each(this.keyContents,function(l){var k=h.Relational.store.find(this.relatedModel,l);if(k&&d.isObject(l)){k.set(l,i)}else{if(!k){k=this.createModel(l)}}if(k&&!this.related.getByCid(k)&&!this.related.get(k)){j.push(k)}},this)}if(j.length){i=this.unsanitizeOptions(i);this.related.add(j,i)}}},onChange:function(l,i,k){k=this.sanitizeOptions(k);this.keyContents=i;d.each(this.getReverseRelations(),function(n){n.removeRelated(this.instance,k)},this);if(i instanceof h.Collection){this._prepareCollection(i);this.related=i}else{var m;if(this.related instanceof h.Collection){m=this.related;m.reset([],{silent:true})}else{m=this._prepareCollection()}this.setRelated(m);this.findRelated(k)}d.each(this.getReverseRelations(),function(n){n.addRelated(this.instance,k)},this);var j=this;h.Relational.eventQueue.add(function(){!k.silentChange&&j.instance.trigger("update:"+j.key,j.instance,j.related,k)})},tryAddRelated:function(j,i){i=this.sanitizeOptions(i);if(!this.related.getByCid(j)&&!this.related.get(j)){var k=d.any(this.keyContents,function(l){var m=h.Relational.store.resolveIdForItem(this.relatedModel,l);return m&&m===j.id},this);if(k){this.related.add(j,i)}}},handleAddition:function(k,l,j){if(!(k instanceof h.Model)){return}j=this.sanitizeOptions(j);d.each(this.getReverseRelations(k),function(m){m.addRelated(this.instance,j)},this);var i=this;h.Relational.eventQueue.add(function(){!j.silentChange&&i.instance.trigger("add:"+i.key,k,i.related,j)})},handleRemoval:function(k,l,j){if(!(k instanceof h.Model)){return}j=this.sanitizeOptions(j);d.each(this.getReverseRelations(k),function(m){m.removeRelated(this.instance,j)},this);var i=this;h.Relational.eventQueue.add(function(){!j.silentChange&&i.instance.trigger("remove:"+i.key,k,i.related,j)})},handleReset:function(k,j){j=this.sanitizeOptions(j);var i=this;h.Relational.eventQueue.add(function(){!j.silentChange&&i.instance.trigger("reset:"+i.key,i.related,j)})},addRelated:function(k,j){var i=this;j=this.unsanitizeOptions(j);k.queue(function(){if(i.related&&!i.related.getByCid(k)&&!i.related.get(k)){i.related.add(k,j)}})},removeRelated:function(j,i){i=this.unsanitizeOptions(i);if(this.related.getByCid(j)||this.related.get(j)){this.related.remove(j,i)}}});h.RelationalModel=h.Model.extend({relations:null,_relations:null,_isInitialized:false,_deferProcessing:false,_queue:null,constructor:function(j,k){var i=this;if(k&&k.collection){this._deferProcessing=true;var l=function(m){if(m===i){i._deferProcessing=false;i.processQueue();k.collection.unbind("relational:add",l)}};k.collection.bind("relational:add",l);d.defer(function(){l(i)})}this._queue=new h.BlockingQueue();this._queue.block();h.Relational.eventQueue.block();h.Model.prototype.constructor.apply(this,arguments);h.Relational.eventQueue.unblock()},trigger:function(j){if(j.length>5&&"change"===j.substr(0,6)){var i=this,k=arguments;h.Relational.eventQueue.add(function(){h.Model.prototype.trigger.apply(i,k)})}else{h.Model.prototype.trigger.apply(this,arguments)}return this},initializeRelations:function(){this.acquire();this._relations=[];d.each(this.relations,function(i){var j=!d.isString(i.type)?i.type:h[i.type]||h.Relational.store.getObjectByName(i.type);if(j&&j.prototype instanceof h.Relation.prototype.constructor){new j(this,i)}else{h.Relational.showWarnings&&typeof console!=="undefined"&&console.warn("Relation=%o; missing or invalid type!",i)}},this);this._isInitialized=true;this.release();this.processQueue()},updateRelations:function(i){if(this._isInitialized&&!this.isLocked()){d.each(this._relations,function(j){var k=this.attributes[j.key];if(j.related!==k){this.trigger("relational:change:"+j.key,this,k,i||{})}},this)}},queue:function(i){this._queue.add(i)},processQueue:function(){if(this._isInitialized&&!this._deferProcessing&&this._queue.isBlocked()){this._queue.unblock()}},getRelation:function(i){return d.detect(this._relations,function(j){if(j.key===i){return true}},this)},getRelations:function(){return this._relations},fetchRelated:function(n,p){p||(p={});var l,j=[],o=this.getRelation(n),q=o&&o.keyContents,m=q&&d.select(d.isArray(q)?q:[q],function(r){var s=h.Relational.store.resolveIdForItem(o.relatedModel,r);return s&&!h.Relational.store.find(o.relatedModel,s)},this);if(m&&m.length){var k=d.map(m,function(t){var s;if(typeof(t)==="object"){s=new o.relatedModel(t)}else{var r={};r[o.relatedModel.prototype.idAttribute]=t;s=new o.relatedModel(r)}return s},this);if(o.related instanceof h.Collection&&d.isFunction(o.related.url)){l=o.related.url(k)}if(l&&l!==o.related.url()){var i=d.defaults({error:function(){var r=arguments;d.each(k,function(s){s.trigger("destroy",s,s.collection,p);p.error&&p.error.apply(s,r)})},url:l},p,{add:true});j=[o.related.fetch(i)]}else{j=d.map(k,function(r){var s=d.defaults({error:function(){r.trigger("destroy",r,r.collection,p);p.error&&p.error.apply(r,arguments)}},p);return r.fetch(s)},this)}}return j},set:function(l,m,k){h.Relational.eventQueue.block();var j;if(d.isObject(l)||l==null){j=l;k=m}else{j={};j[l]=m}var i=h.Model.prototype.set.apply(this,arguments);if(!this._isInitialized&&!this.isLocked()){h.Relational.store.register(this);this.initializeRelations()}else{if(j&&this.idAttribute in j){h.Relational.store.update(this)}}if(j){this.updateRelations(k)}h.Relational.eventQueue.unblock();return i},unset:function(k,j){h.Relational.eventQueue.block();var i=h.Model.prototype.unset.apply(this,arguments);this.updateRelations(j);h.Relational.eventQueue.unblock();return i},clear:function(j){h.Relational.eventQueue.block();var i=h.Model.prototype.clear.apply(this,arguments);this.updateRelations(j);h.Relational.eventQueue.unblock();return i},change:function(k){var i=this,j=arguments;h.Relational.eventQueue.add(function(){h.Model.prototype.change.apply(i,j)})},clone:function(){var i=d.clone(this.attributes);if(!d.isUndefined(i[this.idAttribute])){i[this.idAttribute]=null}d.each(this.getRelations(),function(j){delete i[j.key]});return new this.constructor(i)},toJSON:function(){if(this.isLocked()){return this.id}this.acquire();var i=h.Model.prototype.toJSON.call(this);d.each(this._relations,function(j){var k=i[j.key];if(j.options.includeInJSON===true&&k&&d.isFunction(k.toJSON)){i[j.keyDestination]=k.toJSON()}else{if(d.isString(j.options.includeInJSON)){if(k instanceof h.Collection){i[j.keyDestination]=k.pluck(j.options.includeInJSON)}else{if(k instanceof h.Model){i[j.keyDestination]=k.get(j.options.includeInJSON)}}}else{delete i[j.key]}}if(j.keyDestination!==j.key){delete i[j.key]}},this);this.release();return i}});d.extend(h.RelationalModel.prototype,h.Semaphore);var f=h.Collection.prototype.__add=h.Collection.prototype.add;h.Collection.prototype.add=function(k,i){i||(i={});if(!d.isArray(k)){k=[k]}var j=[];d.each(k,function(m){if(!(m instanceof h.Model)){var l=h.Relational.store.find(this.model,m[this.model.prototype.idAttribute]);if(l){l.set(l.parse?l.parse(m):m,i);m=l}else{m=h.Collection.prototype._prepareModel.call(this,m,i)}}if(m instanceof h.Model&&!this.get(m)&&!this.getByCid(m)){j.push(m)}},this);if(j.length){f.call(this,j,i);d.each(j,function(l){this.trigger("relational:add",l,this,i)},this)}return this};var a=h.Collection.prototype.__remove=h.Collection.prototype.remove;h.Collection.prototype.remove=function(j,i){i||(i={});if(!d.isArray(j)){j=[j]}d.each(j,function(k){k=this.getByCid(k)||this.get(k);if(k instanceof h.Model){a.call(this,k,i);this.trigger("relational:remove",k,this,i)}},this);return this};var e=h.Collection.prototype.__reset=h.Collection.prototype.reset;h.Collection.prototype.reset=function(j,i){e.call(this,j,i);this.trigger("relational:reset",j,i);return this};var c=h.Collection.prototype.__trigger=h.Collection.prototype.trigger;h.Collection.prototype.trigger=function(j){if(j==="add"||j==="remove"||j==="reset"){var i=this,k=arguments;h.Relational.eventQueue.add(function(){c.apply(i,k)})}else{c.apply(this,arguments)}return this};h.RelationalModel.extend=function(j,k){var l=h.Model.extend.apply(this,arguments);var i=(j&&j.relations)||[];d.each(i,function(m){if(m.reverseRelation){m.model=l;var o=true;if(d.isString(m.relatedModel)){var n=h.Relational.store.getObjectByName(m.relatedModel);o=n&&(n.prototype instanceof h.RelationalModel.prototype.constructor)}var p=!d.isString(m.type)?m.type:h[m.type]||h.Relational.store.getObjectByName(m.type);if(o&&p&&p.prototype instanceof h.Relation.prototype.constructor){new p(null,m)}}});return l}})();
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/423f4f2910ac/
changeset: 423f4f2910ac
user: jgoecks
date: 2012-04-11 16:32:02
summary: Add Backbone-relational to base templates
affected #: 2 files
diff -r 0a957e499a3c9b7ba11f88d337e4878260d73d6d -r 423f4f2910ac90098e799d354a2936513242a21e templates/base.mako
--- a/templates/base.mako
+++ b/templates/base.mako
@@ -26,7 +26,7 @@
## <!--[if lt IE 7]>
## <script type='text/javascript' src="/static/scripts/IE7.js"></script>
## <![endif]-->
- ${h.js( "jquery", "galaxy.base", "libs/underscore", "libs/backbone", "libs/handlebars.runtime", "backbone/ui" )}
+ ${h.js( "jquery", "galaxy.base", "libs/underscore", "libs/backbone", "libs/backbone-relational", "libs/handlebars.runtime", "backbone/ui" )}
<script type="text/javascript">
// Set up needed paths.
var galaxy_paths = new GalaxyPaths({
diff -r 0a957e499a3c9b7ba11f88d337e4878260d73d6d -r 423f4f2910ac90098e799d354a2936513242a21e templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -47,7 +47,7 @@
<!--[if lt IE 7]>
${h.js( 'IE7', 'ie7-recalc' )}
<![endif]-->
- ${h.js( 'jquery', 'libs/underscore', 'libs/backbone', 'libs/handlebars.runtime', 'backbone/ui' )}
+ ${h.js( 'jquery', 'libs/underscore', 'libs/backbone', 'libs/backbone-relational', 'libs/handlebars.runtime', 'backbone/ui' )}
<script type="text/javascript">
// Set up needed paths.
var galaxy_paths = new GalaxyPaths({
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Basic functionality for accessing the API through session (if available) instead of via API key. Needs refactoring to combine _ensure_valid_session and transaction types.
by Bitbucket 11 Apr '12
by Bitbucket 11 Apr '12
11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bf88ce609370/
changeset: bf88ce609370
user: dannon
date: 2012-04-11 16:00:16
summary: Basic functionality for accessing the API through session (if available) instead of via API key. Needs refactoring to combine _ensure_valid_session and transaction types.
affected #: 4 files
diff -r 3f12abbd23941683e44b34a21f0cf82d14bf02ad -r bf88ce609370a7829337dad758fafe388b3fd117 lib/galaxy/web/api/histories.py
--- a/lib/galaxy/web/api/histories.py
+++ b/lib/galaxy/web/api/histories.py
@@ -20,28 +20,27 @@
GET /api/histories
GET /api/histories/deleted
Displays a collection (list) of histories.
- """
+ """
rval = []
deleted = util.string_as_bool( deleted )
-
try:
- query = trans.sa_session.query( trans.app.model.History ).filter_by( user=trans.user, deleted=deleted ).order_by(
- desc(trans.app.model.History.table.c.update_time)).all()
+ if trans.user:
+ query = trans.sa_session.query(trans.app.model.History ).filter_by( user=trans.user, deleted=deleted ).order_by(
+ desc(trans.app.model.History.table.c.update_time)).all()
+ for history in query:
+ item = history.get_api_value(value_mapper={'id':trans.security.encode_id})
+ item['url'] = url_for( 'history', id=trans.security.encode_id( history.id ) )
+ rval.append( item )
+ elif trans.galaxy_session.current_history:
+ #No user, this must be session authentication with an anonymous user.
+ history = trans.galaxy_session.current_history
+ item = history.get_api_value(value_mapper={'id':trans.security.encode_id})
+ item['url'] = url_for( 'history', id=trans.security.encode_id( history.id ) )
+ rval.append(item)
except Exception, e:
rval = "Error in history API"
log.error( rval + ": %s" % str(e) )
trans.response.status = 500
-
- if not rval:
- try:
- for history in query:
- item = history.get_api_value(value_mapper={'id':trans.security.encode_id})
- item['url'] = url_for( 'history', id=trans.security.encode_id( history.id ) )
- rval.append( item )
- except Exception, e:
- rval = "Error in history API at constructing return list"
- log.error( rval + ": %s" % str(e) )
- trans.response.status = 500
return rval
@web.expose_api
@@ -54,7 +53,7 @@
history_id = id
params = util.Params( kwd )
deleted = util.string_as_bool( deleted )
-
+
def traverse( datasets ):
rval = {}
states = trans.app.model.Dataset.states
@@ -65,15 +64,13 @@
if not item['deleted']:
rval[item['state']] = rval[item['state']] + 1
return rval
-
try:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True, deleted=deleted )
except Exception, e:
return str( e )
-
try:
item = history.get_api_value(view='element', value_mapper={'id':trans.security.encode_id})
- num_sets = len( [hda.id for hda in history.datasets if not hda.deleted] )
+ num_sets = len( [hda.id for hda in history.datasets if not hda.deleted] )
states = trans.app.model.Dataset.states
state = states.ERROR
if num_sets == 0:
@@ -87,7 +84,7 @@
elif summary[states.QUEUED] > 0:
state = states.QUEUED
elif summary[states.OK] == num_sets:
- state = states.OK
+ state = states.OK
item['contents_url'] = url_for( 'history_contents', history_id=history_id )
item['state_details'] = summary
item['state'] = state
@@ -108,7 +105,7 @@
if payload.get( 'name', None ):
hist_name = util.restore_text( payload['name'] )
new_history = trans.app.model.History( user=trans.user, name=hist_name )
-
+
trans.sa_session.add( new_history )
trans.sa_session.flush()
item = new_history.get_api_value(view='element', value_mapper={'id':trans.security.encode_id})
@@ -124,8 +121,8 @@
# a request body is optional here
purge = False
if kwd.get( 'payload', None ):
- purge = util.string_as_bool( kwd['payload'].get( 'purge', False ) )
-
+ purge = util.string_as_bool( kwd['payload'].get( 'purge', False ) )
+
try:
history = self.get_history( trans, history_id, check_ownership=True, check_accessible=False, deleted=True )
except Exception, e:
diff -r 3f12abbd23941683e44b34a21f0cf82d14bf02ad -r bf88ce609370a7829337dad758fafe388b3fd117 lib/galaxy/web/api/requests.py
--- a/lib/galaxy/web/api/requests.py
+++ b/lib/galaxy/web/api/requests.py
@@ -1,19 +1,17 @@
"""
API operations on a sample tracking system.
"""
-import logging, os, string, shutil, urllib, re, socket
-from cgi import escape, FieldStorage
-from galaxy import util, datatypes, jobs, web, util
+import logging
+from galaxy import util, web
from galaxy.web.base.controller import *
-from galaxy.util.sanitize_html import sanitize_html
from galaxy.model.orm import *
from galaxy.util.bunch import Bunch
log = logging.getLogger( __name__ )
class RequestsAPIController( BaseAPIController ):
- update_types = Bunch( REQUEST = 'request_state' )
- update_type_values = [v[1] for v in update_types.items()]
+ _update_types = Bunch( REQUEST = 'request_state' )
+ _update_type_values = [v[1] for v in _update_types.items()]
@web.expose_api
def index( self, trans, **kwd ):
"""
@@ -77,7 +75,7 @@
return "Missing required 'update_type' parameter. Please consult the API documentation for help."
else:
update_type = payload.pop( 'update_type' )
- if update_type not in self.update_type_values:
+ if update_type not in self._update_type_values:
trans.response.status = 400
return "Invalid value for 'update_type' parameter ( %s ) specified. Please consult the API documentation for help." % update_type
try:
diff -r 3f12abbd23941683e44b34a21f0cf82d14bf02ad -r bf88ce609370a7829337dad758fafe388b3fd117 lib/galaxy/web/api/workflows.py
--- a/lib/galaxy/web/api/workflows.py
+++ b/lib/galaxy/web/api/workflows.py
@@ -18,9 +18,9 @@
def index(self, trans, **kwd):
"""
GET /api/workflows
- Displays a collection of workflows
+
+ Displays a collection of workflows.
"""
- # List parameters of a specific workflow
rval = []
for wf in trans.sa_session.query(trans.app.model.StoredWorkflow).filter_by(
user=trans.user, deleted=False).order_by(
@@ -38,10 +38,12 @@
item['url'] = url_for('workflow', id=encoded_id)
rval.append(item)
return rval
+
@web.expose_api
def show(self, trans, id, **kwd):
"""
GET /api/workflows/{encoded_workflow_id}
+
Displays information needed to run a workflow from the command line.
"""
workflow_id = id
@@ -72,28 +74,29 @@
# p = step.get_required_parameters()
item['inputs'] = inputs
return item
+
@web.expose_api
def create(self, trans, payload, **kwd):
"""
POST /api/workflows
+
We're not creating workflows from the api. Just execute for now.
+
However, we will import them if installed_repository_file is specified
"""
if 'workflow_id' not in payload:
# create new
if 'installed_repository_file' in payload:
workflow_controller = trans.webapp.controllers[ 'workflow' ]
- result = workflow_controller.import_workflow( trans=trans,
+ result = workflow_controller.import_workflow( trans=trans,
cntrller='api',
**payload)
return result
trans.response.status = 403
return "Either workflow_id or installed_repository_file must be specified"
-
if 'installed_repository_file' in payload:
trans.response.status = 403
return "installed_repository_file may not be specified with workflow_id"
-
stored_workflow = trans.sa_session.query(self.app.model.StoredWorkflow).get(
trans.security.decode_id(payload['workflow_id']))
if stored_workflow.user != trans.user and not trans.user_is_admin():
@@ -215,3 +218,4 @@
trans.sa_session.add( workflow_invocation )
trans.sa_session.flush()
return rval
+
diff -r 3f12abbd23941683e44b34a21f0cf82d14bf02ad -r bf88ce609370a7829337dad758fafe388b3fd117 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -9,6 +9,7 @@
from Cheetah.Template import Template
import base
import pickle
+from functools import wraps
from galaxy import util
from galaxy.exceptions import MessageException
from galaxy.util.json import to_json_string, from_json_string
@@ -62,6 +63,7 @@
return func
def json( func ):
+ @wraps(func)
def decorator( self, trans, *args, **kwargs ):
trans.response.set_content_type( "text/javascript" )
return simplejson.dumps( func( self, trans, *args, **kwargs ) )
@@ -71,6 +73,7 @@
return decorator
def json_pretty( func ):
+ @wraps(func)
def decorator( self, trans, *args, **kwargs ):
trans.response.set_content_type( "text/javascript" )
return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
@@ -81,37 +84,46 @@
def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
def argcatcher( func ):
+ @wraps(func)
def decorator( self, trans, *args, **kwargs ):
if trans.get_user():
return func( self, trans, *args, **kwargs )
else:
return trans.show_error_message(
'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
- % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
+ % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
return decorator
return argcatcher
-
+
def expose_api( func ):
+ @wraps(func)
def decorator( self, trans, *args, **kwargs ):
def error( environ, start_response ):
start_response( error_status, [('Content-type', 'text/plain')] )
return error_message
error_status = '403 Forbidden'
- if 'key' not in kwargs:
- error_message = 'No API key provided with request, please consult the API documentation.'
- return error
- try:
- provided_key = trans.sa_session.query( trans.app.model.APIKeys ).filter( trans.app.model.APIKeys.table.c.key == kwargs['key'] ).one()
- except NoResultFound:
- error_message = 'Provided API key is not valid.'
- return error
- if provided_key.user.deleted:
- error_message = 'User account is deactivated, please contact an administrator.'
- return error
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- error_message = 'Provided API key has expired.'
- return error
+ ## If there is a user, we've authenticated a session.
+ if not trans.user and isinstance(trans.galaxy_session, Bunch):
+ # If trans.user is already set, don't check for a key.
+ # This happens when we're authenticating using session instead of an API key.
+ # The Bunch clause is used to prevent the case where there's no user, but there is a real session.
+ # DBTODO: This needs to be fixed when merging transaction types.
+ if 'key' not in kwargs:
+ error_message = 'No API key provided with request, please consult the API documentation.'
+ return error
+ try:
+ provided_key = trans.sa_session.query( trans.app.model.APIKeys ).filter( trans.app.model.APIKeys.table.c.key == kwargs['key'] ).one()
+ except NoResultFound:
+ error_message = 'Provided API key is not valid.'
+ return error
+ if provided_key.user.deleted:
+ error_message = 'User account is deactivated, please contact an administrator.'
+ return error
+ newest_key = provided_key.user.api_keys[0]
+ if newest_key.key != provided_key.key:
+ error_message = 'Provided API key has expired.'
+ return error
+ trans.set_user( provided_key.user )
if trans.request.body:
try:
payload = util.recursively_stringify_dictionary_keys( simplejson.loads( trans.request.body ) )
@@ -121,7 +133,6 @@
error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
return error
trans.response.set_content_type( "application/json" )
- trans.set_user( provided_key.user )
# Perform api_run_as processing, possibly changing identity
if 'run_as' in kwargs:
if not trans.user_can_do_run_as():
@@ -139,7 +150,6 @@
except:
trans.response.status = 400
return "That user does not exist."
-
try:
if trans.debug:
return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True )
@@ -156,6 +166,7 @@
return decorator
def require_admin( func ):
+ @wraps(func)
def decorator( self, trans, *args, **kwargs ):
if not trans.user_is_admin():
msg = "You must be an administrator to access this feature."
@@ -202,10 +213,10 @@
return base.WebApplication.make_body_iterable( self, trans, body )
def transaction_chooser( self, environ, galaxy_app, session_cookie ):
if 'is_api_request' in environ:
- return GalaxyWebAPITransaction( environ, galaxy_app, self )
+ return GalaxyWebAPITransaction( environ, galaxy_app, self, session_cookie )
else:
return GalaxyWebUITransaction( environ, galaxy_app, self, session_cookie )
-
+
class GalaxyWebTransaction( base.DefaultWebTransaction ):
"""
Encapsulates web transaction specific state for the Galaxy application
@@ -262,7 +273,7 @@
except:
action.user = None
try:
- action.session_id = self.galaxy_session.id
+ action.session_id = self.galaxy_session.id
except:
action.session_id = None
self.sa_session.add( action )
@@ -317,7 +328,7 @@
tstamp = time.localtime ( time.time() + 3600 * 24 * age )
self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
self.response.cookies[name]['version'] = version
- def _ensure_valid_session( self, session_cookie ):
+ def _ensure_valid_session( self, session_cookie, create=True):
"""
Ensure that a valid Galaxy session exists and is available as
trans.session (part of initialization)
@@ -822,7 +833,7 @@
def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
return self
-
+
class FormInput( object ):
"""
Simple class describing a form input element
@@ -837,15 +848,89 @@
self.use_label = use_label
class GalaxyWebAPITransaction( GalaxyWebTransaction ):
- def __init__( self, environ, app, webapp ):
+ """
+ TODO: Unify this with WebUITransaction, since we allow session auth now.
+ Enable functionality of 'create' parameter in parent _ensure_valid_session
+ """
+ def __init__( self, environ, app, webapp, session_cookie):
GalaxyWebTransaction.__init__( self, environ, app, webapp )
self.__user = None
- self._ensure_valid_session( None )
+ self._ensure_valid_session( session_cookie )
def _ensure_valid_session( self, session_cookie ):
- self.galaxy_session = Bunch()
- self.galaxy_session.history = self.galaxy_session.current_history = Bunch()
- self.galaxy_session.history.genome_build = None
- self.galaxy_session.is_api = True
+ #Check to see if there is an existing session. Never create a new one.
+ # Try to load an existing session
+ secure_id = self.get_cookie( name=session_cookie )
+ galaxy_session = None
+ prev_galaxy_session = None
+ user_for_new_session = None
+ invalidate_existing_session = False
+ # Track whether the session has changed so we can avoid calling flush
+ # in the most common case (session exists and is valid).
+ galaxy_session_requires_flush = False
+ if secure_id:
+ # Decode the cookie value to get the session_key
+ session_key = self.security.decode_guid( secure_id )
+ try:
+ # Make sure we have a valid UTF-8 string
+ session_key = session_key.encode( 'utf8' )
+ except UnicodeDecodeError:
+ # We'll end up creating a new galaxy_session
+ session_key = None
+ if session_key:
+ # Retrieve the galaxy_session id via the unique session_key
+ galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
+ .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key,
+ self.app.model.GalaxySession.table.c.is_valid==True ) ) \
+ .first()
+ if galaxy_session:
+ # If remote user is in use it can invalidate the session, so we need to to check some things now.
+ if self.app.config.use_remote_user:
+ assert "HTTP_REMOTE_USER" in self.environ, \
+ "use_remote_user is set but no HTTP_REMOTE_USER variable"
+ remote_user_email = self.environ[ 'HTTP_REMOTE_USER' ]
+ # An existing session, make sure correct association exists
+ if galaxy_session.user is None:
+ # No user, associate
+ galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
+ galaxy_session_requires_flush = True
+ elif galaxy_session.user.email != remote_user_email:
+ # Session exists but is not associated with the correct remote user
+ log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
+ remote_user_email, galaxy_session.user.email )
+ galaxy_session = None
+ else:
+ if galaxy_session.user and galaxy_session.user.external:
+ # Remote user support is not enabled, but there is an existing
+ # session with an external user, invalidate
+ invalidate_existing_session = True
+ log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
+ galaxy_session.user.email )
+ galaxy_session = None
+ elif galaxy_session.user is not None and galaxy_session.user.deleted:
+ invalidate_existing_session = True
+ log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
+ galaxy_session = None
+ # No relevant cookies, or couldn't find, or invalid, so create a new session
+ if galaxy_session:
+ self.galaxy_session = galaxy_session
+ self.user = galaxy_session.user
+ # Do we need to flush the session?
+ if galaxy_session_requires_flush:
+ self.sa_session.add( galaxy_session )
+ # FIXME: If prev_session is a proper relation this would not
+ # be needed.
+ if prev_galaxy_session:
+ self.sa_session.add( prev_galaxy_session )
+ self.sa_session.flush()
+ # If the old session was invalid, get a new history with our new session
+ if not galaxy_session:
+ #Failed to find a session. Set up fake stuff for API transaction
+ self.user = None
+ self.galaxy_session = Bunch()
+ self.galaxy_session.history = self.galaxy_session.current_history = Bunch()
+ self.galaxy_session.history.genome_build = None
+ self.galaxy_session.is_api = True
+
def get_user( self ):
"""Return the current user (the expose_api decorator ensures that it is set)."""
return self.__user
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: dannon: Cloud Launch - Password Field for AWS Secret instead of plain text input
by Bitbucket 11 Apr '12
by Bitbucket 11 Apr '12
11 Apr '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3f12abbd2394/
changeset: 3f12abbd2394
user: dannon
date: 2012-04-11 15:16:20
summary: Cloud Launch - Password Field for AWS Secret instead of plain text input
affected #: 1 file
diff -r 37c43980034e744d05647c3aad3631f5a1dd9c1c -r 3f12abbd23941683e44b34a21f0cf82d14bf02ad templates/cloud/index.mako
--- a/templates/cloud/index.mako
+++ b/templates/cloud/index.mako
@@ -71,7 +71,7 @@
</div><div class="form-row"><label for="id_secret">Secret Key</label>
- <input type="text" size="120" name="secret" id="id_secret"/><br/>
+ <input type="password" size="120" name="secret" id="id_secret"/><br/></div><div class="form-row"><label for="id_instance_type">Instance Type</label>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0