galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
September 2014
- 2 participants
- 236 discussions
commit/galaxy-central: dannon: Pep8 fixes in galaxy.util.json prior to work there.
by commits-noreply@bitbucket.org 04 Sep '14
by commits-noreply@bitbucket.org 04 Sep '14
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6a718b389bf1/
Changeset: 6a718b389bf1
User: dannon
Date: 2014-09-04 22:58:57
Summary: Pep8 fixes in galaxy.util.json prior to work there.
Affected #: 1 file
diff -r 83e45a9e4cc994daed44e406a334660d9db7ed30 -r 6a718b389bf1caa5461c5ff7a7dd644f1279cd69 lib/galaxy/util/json.py
--- a/lib/galaxy/util/json.py
+++ b/lib/galaxy/util/json.py
@@ -5,7 +5,6 @@
import json
import logging
import random
-import socket
import string
to_json_string = json.dumps
@@ -13,6 +12,7 @@
log = logging.getLogger( __name__ )
+
def json_fix( val ):
if isinstance( val, list ):
return [ json_fix( v ) for v in val ]
@@ -25,33 +25,44 @@
# Methods for handling JSON-RPC
+
def validate_jsonrpc_request( request, regular_methods, notification_methods ):
try:
request = from_json_string( request )
except Exception, e:
- return False, request, jsonrpc_response( id = None, error = dict( code = -32700, message = 'Parse error', data = str( e ) ) )
+ return False, request, jsonrpc_response( id=None,
+ error=dict( code=-32700,
+ message='Parse error',
+ data=str( e ) ) )
try:
assert 'jsonrpc' in request, \
'This server requires JSON-RPC 2.0 and no "jsonrpc" member was sent with the Request object as per the JSON-RPC 2.0 Specification.'
assert request['jsonrpc'] == '2.0', \
- 'Requested JSON-RPC version "%s" != required version "2.0".' % request['jsonrpc']
+ 'Requested JSON-RPC version "%s" != required version "2.0".' % request['jsonrpc']
assert 'method' in request, 'No "method" member was sent with the Request object'
except AssertionError, e:
- return False, request, jsonrpc_response( request = request, error = dict( code = -32600, message = 'Invalid Request', data = str( e ) ) )
+ return False, request, jsonrpc_response( request=request,
+ error=dict( code=-32600,
+ message='Invalid Request',
+ data=str( e ) ) )
try:
assert request['method'] in ( regular_methods + notification_methods )
except AssertionError, e:
- return False, request, jsonrpc_response( request = request,
- error = dict( code = -32601,
- message = 'Method not found',
- data = 'Valid methods are: %s' % ', '.join( regular_methods + notification_methods ) ) )
+ return False, request, jsonrpc_response( request=request,
+ error=dict( code=-32601,
+ message='Method not found',
+ data='Valid methods are: %s' % ', '.join( regular_methods + notification_methods ) ) )
try:
if request['method'] in regular_methods:
assert 'id' in request, 'No "id" member was sent with the Request object and the requested method "%s" is not a notification method' % request['method']
except AssertionError, e:
- return False, request, jsonrpc_response( request = request, error = dict( code = -32600, message = 'Invalid Request', data = str( e ) ) )
+ return False, request, jsonrpc_response( request=request,
+ error=dict( code=-32600,
+ message='Invalid Request',
+ data=str( e ) ) )
return True, request, None
+
def validate_jsonrpc_response( response, id=None ):
try:
response = from_json_string( response )
@@ -81,11 +92,12 @@
return False, response
return True, response
+
def jsonrpc_request( method, params=None, id=None, jsonrpc='2.0' ):
if method is None:
log.error( 'jsonrpc_request(): "method" parameter cannot be None' )
return None
- request = dict( jsonrpc = jsonrpc, method = method )
+ request = dict( jsonrpc=jsonrpc, method=method )
if params:
request['params'] = params
if id is not None and id is True:
@@ -94,15 +106,16 @@
request['id'] = id
return request
+
def jsonrpc_response( request=None, id=None, result=None, error=None, jsonrpc='2.0' ):
if result:
- rval = dict( jsonrpc = jsonrpc, result = result )
+ rval = dict( jsonrpc=jsonrpc, result=result )
elif error:
- rval = dict( jsonrpc = jsonrpc, error = error )
+ rval = dict( jsonrpc=jsonrpc, error=error )
else:
msg = 'jsonrpc_response() called with out a "result" or "error" parameter'
log.error( msg )
- rval = dict( jsonrpc = jsonrpc, error = msg )
+ rval = dict( jsonrpc=jsonrpc, error=msg )
if id is not None:
rval['id'] = id
elif request is not None and 'id' in request:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/901144086f2e/
Changeset: 901144086f2e
Branch: venv
User: dannon
Date: 2014-09-04 22:44:25
Summary: Close feature branch
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Merged in carlfeberhard/galaxy-central-fork0 (pull request #484)
by commits-noreply@bitbucket.org 04 Sep '14
by commits-noreply@bitbucket.org 04 Sep '14
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/83e45a9e4cc9/
Changeset: 83e45a9e4cc9
User: dannon
Date: 2014-09-04 22:30:50
Summary: Merged in carlfeberhard/galaxy-central-fork0 (pull request #484)
Break up web/framework/__init__ into more focused modules
Affected #: 20 files
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -6,7 +6,6 @@
import copy
import cPickle
import json
-import logging
import os
import shutil
import sys
@@ -15,13 +14,19 @@
from os.path import abspath
-import galaxy.model
-from galaxy.util import listify, stringify_dictionary_keys, string_as_bool
-from galaxy.util.odict import odict
-from galaxy.util import in_directory
-from galaxy.web import form_builder
+from galaxy import eggs
+eggs.require( "SQLAlchemy >= 0.4" )
from sqlalchemy.orm import object_session
+import galaxy.model
+from galaxy.util import listify
+from galaxy.util import stringify_dictionary_keys
+from galaxy.util import string_as_bool
+from galaxy.util import in_directory
+from galaxy.util.odict import odict
+from galaxy.web import form_builder
+
+import logging
log = logging.getLogger(__name__)
STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/managers/context.py
--- /dev/null
+++ b/lib/galaxy/managers/context.py
@@ -0,0 +1,179 @@
+"""
+Mixins for transaction-like objects.
+"""
+
+import os
+
+from galaxy.util.json import to_json_string
+from galaxy.util import bunch
+
+class ProvidesAppContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ database and event handling.
+
+ Mixed in class must provide `app` property.
+ """
+
+ def log_action( self, user=None, action=None, context=None, params=None):
+ """
+ Application-level logging of user actions.
+ """
+ if self.app.config.log_actions:
+ action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
+ try:
+ if user:
+ action.user = user
+ else:
+ action.user = self.user
+ except:
+ action.user = None
+ try:
+ action.session_id = self.galaxy_session.id
+ except:
+ action.session_id = None
+ self.sa_session.add( action )
+ self.sa_session.flush()
+
+ def log_event( self, message, tool_id=None, **kwargs ):
+ """
+ Application level logging. Still needs fleshing out (log levels and such)
+ Logging events is a config setting - if False, do not log.
+ """
+ if self.app.config.log_events:
+ event = self.app.model.Event()
+ event.tool_id = tool_id
+ try:
+ event.message = message % kwargs
+ except:
+ event.message = message
+ try:
+ event.history = self.get_history()
+ except:
+ event.history = None
+ try:
+ event.history_id = self.history.id
+ except:
+ event.history_id = None
+ try:
+ event.user = self.user
+ except:
+ event.user = None
+ try:
+ event.session_id = self.galaxy_session.id
+ except:
+ event.session_id = None
+ self.sa_session.add( event )
+ self.sa_session.flush()
+
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session -- currently just gets the current
+ session from the threadlocal session context, but this is provided
+ to allow migration toward a more SQLAlchemy 0.4 style of use.
+ """
+ return self.app.model.context.current
+
+ def expunge_all( self ):
+ app = self.app
+ context = app.model.context
+ context.expunge_all()
+ # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
+ if hasattr(app, 'install_model'):
+ install_model = app.install_model
+ if install_model != app.model:
+ install_model.context.expunge_all()
+
+ def get_toolbox(self):
+ """Returns the application toolbox"""
+ return self.app.toolbox
+
+ @property
+ def model( self ):
+ return self.app.model
+
+ @property
+ def install_model( self ):
+ return self.app.install_model
+
+ def request_types(self):
+ if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
+ return True
+ return False
+
+
+class ProvidesUserContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about users.
+
+ Mixed in class must provide `user`, `api_inherit_admin`, and `app`
+ properties.
+ """
+
+ @property
+ def anonymous( self ):
+ return self.user is None and not self.api_inherit_admin
+
+ def get_current_user_roles( self ):
+ user = self.user
+ if user:
+ roles = user.all_roles()
+ else:
+ roles = []
+ return roles
+
+ def user_is_admin( self ):
+ if self.api_inherit_admin:
+ return True
+ return self.user and self.user.email in self.app.config.admin_users_list
+
+ def user_can_do_run_as( self ):
+ run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
+ if not run_as_users:
+ return False
+ user_in_run_as_users = self.user and self.user.email in run_as_users
+ # Can do if explicitly in list or master_api_key supplied.
+ can_do_run_as = user_in_run_as_users or self.api_inherit_admin
+ return can_do_run_as
+
+ @property
+ def user_ftp_dir( self ):
+ identifier = self.app.config.ftp_upload_dir_identifier
+ return os.path.join( self.app.config.ftp_upload_dir, getattr( self.user, identifier ) )
+
+
+class ProvidesHistoryContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about histories.
+
+ Mixed in class must provide `user`, `history`, and `app`
+ properties.
+ """
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ # If no history, return None.
+ if self.history is None:
+ return None
+#TODO: when does this happen? is it Bunch or util.bunch.Bunch?
+ if isinstance( self.history, bunch.Bunch ):
+ # The API presents a Bunch for a history. Until the API is
+ # more fully featured for handling this, also return None.
+ return None
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ # FIXME: This method should be removed
+ return self.app.genome_builds.get_genome_build_names( trans=self )
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -1,23 +1,25 @@
"""
The Galaxy web application framework
"""
-from framework import expose
-from framework import json
-from framework import json_pretty
-from framework import require_login
-from framework import require_admin
from framework import url_for
-from framework import error
-from framework import form
-from framework import FormBuilder
-from framework import expose_api
-from framework import expose_api_anonymous
-from framework import expose_api_raw
-from framework import expose_api_raw_anonymous
-from framework.base import httpexceptions
+from framework.decorators import error
+from framework.decorators import expose
+from framework.decorators import json
+from framework.decorators import json_pretty
+from framework.decorators import require_login
+from framework.decorators import require_admin
+from framework.decorators import expose_api
+from framework.decorators import expose_api_anonymous
+from framework.decorators import expose_api_raw
+from framework.decorators import expose_api_raw_anonymous
# TODO: Drop and make these the default.
-from framework import _future_expose_api
-from framework import _future_expose_api_anonymous
-from framework import _future_expose_api_raw
-from framework import _future_expose_api_raw_anonymous
+from framework.decorators import _future_expose_api
+from framework.decorators import _future_expose_api_anonymous
+from framework.decorators import _future_expose_api_raw
+from framework.decorators import _future_expose_api_raw_anonymous
+
+from framework.formbuilder import form
+from framework.formbuilder import FormBuilder
+
+from framework.base import httpexceptions
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -579,7 +579,7 @@
"""
Pass control over to trans and render ``template_filename``.
- :type trans: ``galaxy.web.framework.GalaxyWebTransaction``
+ :type trans: ``galaxy.web.framework.webapp.GalaxyWebTransaction``
:param trans: transaction doing the rendering
:type plugin: ``util.bunch.Bunch``
:param plugin: the plugin containing the template to render
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -2,1395 +2,9 @@
Galaxy web application framework
"""
-import hashlib
-import inspect
-import os
-import random
-import socket
-import string
-import time
-from traceback import format_exc
-from Cookie import CookieError
-from functools import wraps
-
from galaxy import eggs
-
-eggs.require( "Cheetah" )
-from Cheetah.Template import Template
-
-from galaxy import util
-from galaxy.exceptions import error_codes
-from galaxy.exceptions import MessageException
-from galaxy.util import asbool
-from galaxy.util import safe_str_cmp
-from galaxy.util.backports.importlib import import_module
-from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.web.framework import base, helpers
-
-import paste.httpexceptions
-
-eggs.require( "Mako" )
-import mako.template
-import mako.lookup
-import mako.runtime
-
-eggs.require( "pytz" ) # Used by Babel.
-eggs.require( "Babel" )
-from babel.support import Translations
-from babel import Locale
-
-eggs.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import and_
-from sqlalchemy.orm.exc import NoResultFound
-
eggs.require( "pexpect" )
eggs.require( "amqp" )
-import logging
-log = logging.getLogger( __name__ )
-
+import base
url_for = base.routes.url_for
-
-UCSC_SERVERS = (
- 'hgw1.cse.ucsc.edu',
- 'hgw2.cse.ucsc.edu',
- 'hgw3.cse.ucsc.edu',
- 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu',
- 'hgw6.cse.ucsc.edu',
- 'hgw7.cse.ucsc.edu',
- 'hgw8.cse.ucsc.edu',
-)
-
-JSON_CONTENT_TYPE = "application/json"
-
-
-# ----------------------------------------------------------------------------- web controller decorators
-def expose( func ):
- """
- Decorator: mark a function as 'exposed' and thus web accessible
- """
- func.exposed = True
- return func
-
-def json( func, **json_kwargs ):
- """
- Format the response as JSON and set the response content type to
- JSON_CONTENT_TYPE.
- """
- @wraps(func)
- def call_and_format( self, trans, *args, **kwargs ):
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- return to_json_string( func( self, trans, *args, **kwargs ), **json_kwargs )
- if not hasattr(func, '_orig'):
- call_and_format._orig = func
- call_and_format.exposed = True
- return call_and_format
-
-def json_pretty( func ):
- """
- Indent and sort returned JSON.
- """
- return json( func, indent=4, sort_keys=True )
-
-def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
- def argcatcher( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.get_user():
- return func( self, trans, *args, **kwargs )
- else:
- return trans.show_error_message(
- 'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
- % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
- return decorator
- return argcatcher
-
-def require_admin( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if not trans.user_is_admin():
- msg = "You must be an administrator to access this feature."
- user = trans.get_user()
- if not trans.app.config.admin_users_list:
- msg = "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration."
- elif not user:
- msg = "You must be logged in as an administrator to access this feature."
- trans.response.status = 403
- if trans.response.get_content_type() == 'application/json':
- return msg
- else:
- return trans.show_error_message( msg )
- return func( self, trans, *args, **kwargs )
- return decorator
-
-
-# ----------------------------------------------------------------------------- (original) api decorators
-def expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- def error( environ, start_response ):
- start_response( error_status, [('Content-type', 'text/plain')] )
- return error_message
- error_status = '403 Forbidden'
- if trans.error_message:
- return trans.error_message
- if user_required and trans.anonymous:
- error_message = "API Authentication Required for this request"
- return error
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_status = '400 Bad Request'
- error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
- return error
- trans.response.set_content_type( "application/json" )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_message = 'User does not have permissions to run jobs as another user'
- return error
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- trans.response.status = 400
- return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- trans.response.status = 400
- return "That user does not exist."
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except paste.httpexceptions.HTTPException:
- raise # handled
- except:
- log.exception( 'Uncaught exception in exposed API method:' )
- raise paste.httpexceptions.HTTPServerError()
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __extract_payload_from_request(trans, func, kwargs):
- content_type = trans.request.headers['content-type']
- if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
- # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
- # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
- # are not a part of the request body. This is a problem because it's not possible to differentiate between values
- # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
- # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
- # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
- payload = kwargs.copy()
- named_args, _, _, _ = inspect.getargspec(func)
- for arg in named_args:
- payload.pop(arg, None)
- for k, v in payload.iteritems():
- if isinstance(v, (str, unicode)):
- try:
- payload[k] = from_json_string(v)
- except:
- # may not actually be json, just continue
- pass
- payload = util.recursively_stringify_dictionary_keys( payload )
- else:
- # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
- # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
- # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
- return payload
-
-def expose_api_raw( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False )
-
-def expose_api_raw_anonymous( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False, user_required=False )
-
-def expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return expose_api( func, to_json=to_json, user_required=False )
-
-
-# ----------------------------------------------------------------------------- (new) api decorators
-# TODO: rename as expose_api and make default.
-def _future_expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.error_message:
- # TODO: Document this branch, when can this happen,
- # I don't understand it.
- return __api_error_response( trans, err_msg=trans.error_message )
- if user_required and trans.anonymous:
- error_code = error_codes.USER_NO_API_KEY
- # Use error codes default error message.
- err_msg = "API authentication required for this request"
- return __api_error_response( trans, err_code=error_code, err_msg=err_msg, status_code=403 )
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_code = error_codes.USER_INVALID_JSON
- return __api_error_response( trans, status_code=400, err_code=error_code )
-
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # TODO: Refactor next block out into a helper procedure.
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_code = error_codes.USER_CANNOT_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=403 )
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=400 )
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except MessageException as e:
- traceback_string = format_exc()
- return __api_error_response( trans, exception=e, traceback=traceback_string )
- except paste.httpexceptions.HTTPException:
- # TODO: Allow to pass or format for the API???
- raise # handled
- except Exception as e:
- traceback_string = format_exc()
- error_message = 'Uncaught exception in exposed API method:'
- log.exception( error_message )
- return __api_error_response(
- trans,
- status_code=500,
- exception=e,
- traceback=traceback_string,
- err_msg=error_message,
- err_code=error_codes.UNKNOWN
- )
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __api_error_message( trans, **kwds ):
- exception = kwds.get( "exception", None )
- if exception:
- # If we are passed a MessageException use err_msg.
- default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
- default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
- extra_error_info = getattr( exception, 'extra_error_info', {} )
- if not isinstance( extra_error_info, dict ):
- extra_error_info = {}
- else:
- default_error_message = "Error processing API request."
- default_error_code = error_codes.UNKNOWN
- extra_error_info = {}
- traceback_string = kwds.get( "traceback", "No traceback available." )
- err_msg = kwds.get( "err_msg", default_error_message )
- error_code_object = kwds.get( "err_code", default_error_code )
- try:
- error_code = error_code_object.code
- except AttributeError:
- # Some sort of bad error code sent in, logic failure on part of
- # Galaxy developer.
- error_code = error_codes.UNKNOWN.code
- # Would prefer the terminology of error_code and error_message, but
- # err_msg used a good number of places already. Might as well not change
- # it?
- error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
- if trans.debug: # TODO: Should admins get to see traceback as well?
- error_response[ "traceback" ] = traceback_string
- return error_response
-
-def __api_error_response( trans, **kwds ):
- error_dict = __api_error_message( trans, **kwds )
- exception = kwds.get( "exception", None )
- # If we are given an status code directly - use it - otherwise check
- # the exception for a status_code attribute.
- if "status_code" in kwds:
- status_code = int( kwds.get( "status_code" ) )
- elif hasattr( exception, "status_code" ):
- status_code = int( exception.status_code )
- else:
- status_code = 500
- response = trans.response
- if not response.status or str(response.status).startswith("20"):
- # Unset status code appears to be string '200 OK', if anything
- # non-success (i.e. not 200 or 201) has been set, do not override
- # underlying controller.
- response.status = status_code
- return to_json_string( error_dict )
-
-
-# TODO: rename as expose_api and make default.
-def _future_expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return _future_expose_api( func, to_json=to_json, user_required=False )
-
-
-def _future_expose_api_raw( func ):
- return _future_expose_api( func, to_json=False, user_required=True )
-
-
-def _future_expose_api_raw_anonymous( func ):
- return _future_expose_api( func, to_json=False, user_required=False )
-
-
-
-NOT_SET = object()
-
-
-def error( message ):
- raise MessageException( message, type='error' )
-
-
-def form( *args, **kwargs ):
- return FormBuilder( *args, **kwargs )
-
-
-class WebApplication( base.WebApplication ):
-
- def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
- self.name = name
- base.WebApplication.__init__( self )
- self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
- # Mako support
- self.mako_template_lookup = self.create_mako_template_lookup( galaxy_app, name )
- # Security helper
- self.security = galaxy_app.security
-
- def create_mako_template_lookup( self, galaxy_app, name ):
- paths = []
- # First look in webapp specific directory
- if name is not None:
- paths.append( os.path.join( galaxy_app.config.template_path, 'webapps', name ) )
- # Then look in root directory
- paths.append( galaxy_app.config.template_path )
- # Create TemplateLookup with a small cache
- return mako.lookup.TemplateLookup(directories=paths,
- module_directory=galaxy_app.config.template_cache,
- collection_size=500,
- output_encoding='utf-8' )
-
- def handle_controller_exception( self, e, trans, **kwargs ):
- if isinstance( e, MessageException ):
- # In the case of a controller exception, sanitize to make sure
- # unsafe html input isn't reflected back to the user
- return trans.show_message( sanitize_html(e.err_msg), e.type )
-
- def make_body_iterable( self, trans, body ):
- if isinstance( body, FormBuilder ):
- body = trans.show_form( body )
- return base.WebApplication.make_body_iterable( self, trans, body )
-
- def transaction_chooser( self, environ, galaxy_app, session_cookie ):
- return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
-
- def add_ui_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseUIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- # Look for a controller inside the modules
- for key in dir( module ):
- T = getattr( module, key )
- if inspect.isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
- controller = self._instantiate_controller( T, app )
- self.add_ui_controller( name, controller )
-
- def add_api_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseAPIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- for key in dir( module ):
- T = getattr( module, key )
- # Exclude classes such as BaseAPIController and BaseTagItemsController
- if inspect.isclass( T ) and not key.startswith("Base") and issubclass( T, BaseAPIController ):
- # By default use module_name, but allow controller to override name
- controller_name = getattr( T, "controller_name", name )
- controller = self._instantiate_controller( T, app )
- self.add_api_controller( controller_name, controller )
-
- def _instantiate_controller( self, T, app ):
- """ Extension point, allow apps to contstruct controllers differently,
- really just used to stub out actual controllers for routes testing.
- """
- return T( app )
-
-
-class ProvidesAppContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- database and event handling.
-
- Mixed in class must provide `app` property.
- """
-
- def log_action( self, user=None, action=None, context=None, params=None):
- """
- Application-level logging of user actions.
- """
- if self.app.config.log_actions:
- action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
- try:
- if user:
- action.user = user
- else:
- action.user = self.user
- except:
- action.user = None
- try:
- action.session_id = self.galaxy_session.id
- except:
- action.session_id = None
- self.sa_session.add( action )
- self.sa_session.flush()
-
- def log_event( self, message, tool_id=None, **kwargs ):
- """
- Application level logging. Still needs fleshing out (log levels and such)
- Logging events is a config setting - if False, do not log.
- """
- if self.app.config.log_events:
- event = self.app.model.Event()
- event.tool_id = tool_id
- try:
- event.message = message % kwargs
- except:
- event.message = message
- try:
- event.history = self.get_history()
- except:
- event.history = None
- try:
- event.history_id = self.history.id
- except:
- event.history_id = None
- try:
- event.user = self.user
- except:
- event.user = None
- try:
- event.session_id = self.galaxy_session.id
- except:
- event.session_id = None
- self.sa_session.add( event )
- self.sa_session.flush()
-
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.app.model.context.current
-
- def expunge_all( self ):
- app = self.app
- context = app.model.context
- context.expunge_all()
- # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
- if hasattr(app, 'install_model'):
- install_model = app.install_model
- if install_model != app.model:
- install_model.context.expunge_all()
-
- def get_toolbox(self):
- """Returns the application toolbox"""
- return self.app.toolbox
-
- @property
- def model( self ):
- return self.app.model
-
- @property
- def install_model( self ):
- return self.app.install_model
-
- def request_types(self):
- if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
- return True
- return False
-
-
-class ProvidesUserContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about users.
-
- Mixed in class must provide `user`, `api_inherit_admin`, and `app`
- properties.
- """
-
- @property
- def anonymous( self ):
- return self.user is None and not self.api_inherit_admin
-
- def get_current_user_roles( self ):
- user = self.user
- if user:
- roles = user.all_roles()
- else:
- roles = []
- return roles
-
- def user_is_admin( self ):
- if self.api_inherit_admin:
- return True
- return self.user and self.user.email in self.app.config.admin_users_list
-
- def user_can_do_run_as( self ):
- run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
- if not run_as_users:
- return False
- user_in_run_as_users = self.user and self.user.email in run_as_users
- # Can do if explicitly in list or master_api_key supplied.
- can_do_run_as = user_in_run_as_users or self.api_inherit_admin
- return can_do_run_as
-
- @property
- def user_ftp_dir( self ):
- identifier = self.app.config.ftp_upload_dir_identifier
- return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
-
-
-class ProvidesHistoryContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about histories.
-
- Mixed in class must provide `user`, `history`, and `app`
- properties.
- """
-
- def db_dataset_for( self, dbkey ):
- """
- Returns the db_file dataset associated/needed by `dataset`, or `None`.
- """
- # If no history, return None.
- if self.history is None:
- return None
- if isinstance(self.history, Bunch):
- # The API presents a Bunch for a history. Until the API is
- # more fully featured for handling this, also return None.
- return None
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for ds in datasets:
- if dbkey == ds.dbkey:
- return ds
- return None
-
- @property
- def db_builds( self ):
- """
- Returns the builds defined by galaxy and the builds defined by
- the user (chromInfo in history).
- """
- # FIXME: This method should be removed
- return self.app.genome_builds.get_genome_build_names( trans=self )
-
-
-class GalaxyWebTransaction( base.DefaultWebTransaction, ProvidesAppContext, ProvidesUserContext, ProvidesHistoryContext ):
- """
- Encapsulates web transaction specific state for the Galaxy application
- (specifically the user's "cookie" session and history)
- """
-
- def __init__( self, environ, app, webapp, session_cookie=None):
- self.app = app
- self.webapp = webapp
- self.security = webapp.security
- base.DefaultWebTransaction.__init__( self, environ )
- self.setup_i18n()
- self.expunge_all()
- self.debug = asbool( self.app.config.get( 'debug', False ) )
- # Flag indicating whether we are in workflow building mode (means
- # that the current history should not be used for parameter values
- # and such).
- self.workflow_building_mode = False
- # Flag indicating whether this is an API call and the API key user is an administrator
- self.api_inherit_admin = False
- self.__user = None
- self.galaxy_session = None
- self.error_message = None
-
- if self.environ.get('is_api_request', False):
- # With API requests, if there's a key, use it and associate the
- # user with the transaction.
- # If not, check for an active session but do not create one.
- # If an error message is set here, it's sent back using
- # trans.show_error in the response -- in expose_api.
- self.error_message = self._authenticate_api( session_cookie )
- elif self.app.name == "reports":
- self.galaxy_session = None
- else:
- # This is a web request, get or create session.
- self._ensure_valid_session( session_cookie )
- if self.galaxy_session:
- # When we've authenticated by session, we have to check the
- # following.
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self._ensure_logged_in_user( environ, session_cookie )
-
- def setup_i18n( self ):
- locales = []
- if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
- # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
- client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
- for locale in client_locales:
- try:
- locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
- except Exception, e:
- log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
- if not locales:
- # Default to English
- locales = 'en'
- t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
- self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
-
- def get_user( self ):
- """Return the current user if logged in or None."""
- if self.galaxy_session:
- return self.galaxy_session.user
- else:
- return self.__user
-
- def set_user( self, user ):
- """Set the current user."""
- if self.galaxy_session:
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
- self.__user = user
-
- user = property( get_user, set_user )
-
- def get_cookie( self, name='galaxysession' ):
- """Convenience method for getting a session cookie"""
- try:
- # If we've changed the cookie during the request return the new value
- if name in self.response.cookies:
- return self.response.cookies[name].value
- else:
- return self.request.cookies[name].value
- except:
- return None
-
- def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
- """Convenience method for setting a session cookie"""
- # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
- # using a server secret key. Any other value is invalid and could pose security issues.
- self.response.cookies[name] = value
- self.response.cookies[name]['path'] = path
- self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
- tstamp = time.localtime( time.time() + 3600 * 24 * age )
- self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
- self.response.cookies[name]['version'] = version
- try:
- self.response.cookies[name]['httponly'] = True
- except CookieError, e:
- log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
-
- def _authenticate_api( self, session_cookie ):
- """
- Authenticate for the API via key or session (if available).
- """
- api_key = self.request.params.get('key', None)
- secure_id = self.get_cookie( name=session_cookie )
- api_key_supplied = self.environ.get('is_api_request', False) and api_key
- if api_key_supplied and self._check_master_api_key( api_key ):
- self.api_inherit_admin = True
- log.info( "Session authenticated using Galaxy master api key" )
- self.user = None
- self.galaxy_session = None
- elif api_key_supplied:
- # Sessionless API transaction, we just need to associate a user.
- try:
- provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
- except NoResultFound:
- return 'Provided API key is not valid.'
- if provided_key.user.deleted:
- return 'User account is deactivated, please contact an administrator.'
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- return 'Provided API key has expired.'
- self.set_user( provided_key.user )
- elif secure_id:
- # API authentication via active session
- # Associate user using existing session
- self._ensure_valid_session( session_cookie )
- else:
- # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
- self.user = None
- self.galaxy_session = None
-
- def _check_master_api_key( self, api_key ):
- master_api_key = getattr( self.app.config, 'master_api_key', None )
- if not master_api_key:
- return False
- # Hash keys to make them the same size, so we can do safe comparison.
- master_hash = hashlib.sha256( master_api_key ).hexdigest()
- provided_hash = hashlib.sha256( api_key ).hexdigest()
- return safe_str_cmp( master_hash, provided_hash )
-
- def _ensure_valid_session( self, session_cookie, create=True):
- """
- Ensure that a valid Galaxy session exists and is available as
- trans.session (part of initialization)
-
- Support for universe_session and universe_user cookies has been
- removed as of 31 Oct 2008.
- """
- # Try to load an existing session
- secure_id = self.get_cookie( name=session_cookie )
- galaxy_session = None
- prev_galaxy_session = None
- user_for_new_session = None
- invalidate_existing_session = False
- # Track whether the session has changed so we can avoid calling flush
- # in the most common case (session exists and is valid).
- galaxy_session_requires_flush = False
- if secure_id:
- # Decode the cookie value to get the session_key
- session_key = self.security.decode_guid( secure_id )
- try:
- # Make sure we have a valid UTF-8 string
- session_key = session_key.encode( 'utf8' )
- except UnicodeDecodeError:
- # We'll end up creating a new galaxy_session
- session_key = None
- if session_key:
- # Retrieve the galaxy_session id via the unique session_key
- galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
- .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
- # If remote user is in use it can invalidate the session and in some
- # cases won't have a cookie set above, so we need to to check some
- # things now.
- if self.app.config.use_remote_user:
- # If this is an api request, and they've passed a key, we let this go.
- assert self.app.config.remote_user_header in self.environ, \
- "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
- remote_user_email = self.environ[ self.app.config.remote_user_header ]
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- if galaxy_session:
- # An existing session, make sure correct association exists
- if galaxy_session.user is None:
- # No user, associate
- galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
- galaxy_session_requires_flush = True
- elif ((galaxy_session.user.email != remote_user_email) and
- ((not self.app.config.allow_user_impersonation) or
- (remote_user_email not in self.app.config.admin_users_list))):
- # Session exists but is not associated with the correct
- # remote user, and the currently set remote_user is not a
- # potentially impersonating admin.
- invalidate_existing_session = True
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, galaxy_session.user.email )
- else:
- # No session exists, get/create user for new session
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- else:
- if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
- # Remote user support is not enabled, but there is an existing
- # session with an external user, invalidate
- invalidate_existing_session = True
- log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
- galaxy_session.user.email )
- elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
- invalidate_existing_session = True
- log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
- # Do we need to invalidate the session for some reason?
- if invalidate_existing_session:
- prev_galaxy_session = galaxy_session
- prev_galaxy_session.is_valid = False
- galaxy_session = None
- # No relevant cookies, or couldn't find, or invalid, so create a new session
- if galaxy_session is None:
- galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
- galaxy_session_requires_flush = True
- self.galaxy_session = galaxy_session
- self.__update_session_cookie( name=session_cookie )
- else:
- self.galaxy_session = galaxy_session
- # Do we need to flush the session?
- if galaxy_session_requires_flush:
- self.sa_session.add( galaxy_session )
- # FIXME: If prev_session is a proper relation this would not
- # be needed.
- if prev_galaxy_session:
- self.sa_session.add( prev_galaxy_session )
- self.sa_session.flush()
- # If the old session was invalid, get a new history with our new session
- if invalidate_existing_session:
- self.new_history()
-
- def _ensure_logged_in_user( self, environ, session_cookie ):
- # The value of session_cookie can be one of
- # 'galaxysession' or 'galaxycommunitysession'
- # Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
- # TODO: re-engineer to eliminate the use of allowed_paths
- # as maintenance overhead is far too high.
- allowed_paths = (
- url_for( controller='root', action='index' ),
- url_for( controller='root', action='tool_menu' ),
- url_for( controller='root', action='masthead' ),
- url_for( controller='root', action='history' ),
- url_for( controller='user', action='api_keys' ),
- url_for( controller='user', action='create' ),
- url_for( controller='user', action='index' ),
- url_for( controller='user', action='login' ),
- url_for( controller='user', action='logout' ),
- url_for( controller='user', action='manage_user_info' ),
- url_for( controller='user', action='set_default_permissions' ),
- url_for( controller='user', action='reset_password' ),
- url_for( controller='user', action='openid_auth' ),
- url_for( controller='user', action='openid_process' ),
- url_for( controller='user', action='openid_associate' ),
- url_for( controller='library', action='browse' ),
- url_for( controller='history', action='list' ),
- url_for( controller='dataset', action='list' )
- )
- display_as = url_for( controller='root', action='display_as' )
- if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
- return
- external_display_path = url_for( controller='', action='display_application' )
- if self.request.path.startswith( external_display_path ):
- request_path_split = self.request.path.split( '/' )
- try:
- if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
- and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
- and request_path_split[-3] != 'None'):
- return
- except IndexError:
- pass
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
-
- def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
- """
- Create a new GalaxySession for this request, possibly with a connection
- to a previous session (in `prev_galaxy_session`) and an existing user
- (in `user_for_new_session`).
-
- Caller is responsible for flushing the returned session.
- """
- session_key = self.security.get_new_guid()
- galaxy_session = self.app.model.GalaxySession(
- session_key=session_key,
- is_valid=True,
- remote_host=self.request.remote_host,
- remote_addr=self.request.remote_addr,
- referer=self.request.headers.get( 'Referer', None ) )
- if prev_galaxy_session:
- # Invalidated an existing session for some reason, keep track
- galaxy_session.prev_session_id = prev_galaxy_session.id
- if user_for_new_session:
- # The new session should be associated with the user
- galaxy_session.user = user_for_new_session
- return galaxy_session
-
- def get_or_create_remote_user( self, remote_user_email ):
- """
- Create a remote user with the email remote_user_email and return it
- """
- if not self.app.config.use_remote_user:
- return None
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- user = self.sa_session.query( self.app.model.User
- ).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
- if user:
- # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
- # role and default user / history permissions were not set for remote users. When a
- # remote user authenticates, we'll look for this information, and if missing, create it.
- if not self.app.security_agent.get_private_user_role( user ):
- self.app.security_agent.create_private_user_role( user )
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- if not user.default_permissions:
- self.app.security_agent.user_set_default_permissions( user )
- self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
- elif user is None:
- username = remote_user_email.split( '@', 1 )[0].lower()
- random.seed()
- user = self.app.model.User( email=remote_user_email )
- user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
- user.external = True
- # Replace invalid characters in the username
- for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
- username = username.replace( char, '-' )
- # Find a unique username - user can change it later
- if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
- i = 1
- while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
- i += 1
- username += '-' + str(i)
- user.username = username
- self.sa_session.add( user )
- self.sa_session.flush()
- self.app.security_agent.create_private_user_role( user )
- # We set default user permissions, before we log in and set the default history permissions
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- self.app.security_agent.user_set_default_permissions( user )
- # self.log_event( "Automatically created account '%s'", user.email )
- return user
-
- def __update_session_cookie( self, name='galaxysession' ):
- """
- Update the session cookie to match the current session.
- """
- self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
- name=name, path=self.app.config.cookie_path )
-
- def handle_user_login( self, user ):
- """
- Login a new user (possibly newly created)
-
- - create a new session
- - associate new session with user
- - if old session had a history and it was not associated with a user, associate it with the new session,
- otherwise associate the current session's history with the user
- - add the disk usage of the current session to the user's total disk usage
- """
- # Set the previous session
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- # Define a new current_session
- self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
- if self.webapp.name == 'galaxy':
- cookie_name = 'galaxysession'
- # Associated the current user's last accessed history (if exists) with their new session
- history = None
- try:
- users_last_session = user.galaxy_sessions[0]
- last_accessed = True
- except:
- users_last_session = None
- last_accessed = False
- if (prev_galaxy_session.current_history and not
- prev_galaxy_session.current_history.deleted and
- prev_galaxy_session.current_history.datasets):
- if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
- # If the previous galaxy session had a history, associate it with the new
- # session, but only if it didn't belong to a different user.
- history = prev_galaxy_session.current_history
- if prev_galaxy_session.user is None:
- # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
- for hda in history.datasets:
- user.total_disk_usage += hda.quota_amount( user )
- elif self.galaxy_session.current_history:
- history = self.galaxy_session.current_history
- if (not history and users_last_session and
- users_last_session.current_history and not
- users_last_session.current_history.deleted):
- history = users_last_session.current_history
- elif not history:
- history = self.get_history( create=True )
- if history not in self.galaxy_session.histories:
- self.galaxy_session.add_history( history )
- if history.user is None:
- history.user = user
- self.galaxy_session.current_history = history
- if not last_accessed:
- # Only set default history permissions if current history is not from a previous session
- self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
- else:
- cookie_name = 'galaxycommunitysession'
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- self.sa_session.flush()
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name=cookie_name )
-
- def handle_user_logout( self, logout_all=False ):
- """
- Logout the current user:
- - invalidate the current session
- - create a new session with no user associated
- """
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- galaxy_user_id = prev_galaxy_session.user_id
- if logout_all and galaxy_user_id is not None:
- for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
- ).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True, #noqa
- self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
- other_galaxy_session.is_valid = False
- self.sa_session.add( other_galaxy_session )
- self.sa_session.flush()
- if self.webapp.name == 'galaxy':
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name='galaxysession' )
- elif self.webapp.name == 'tool_shed':
- self.__update_session_cookie( name='galaxycommunitysession' )
-
- def get_galaxy_session( self ):
- """
- Return the current galaxy session
- """
- return self.galaxy_session
-
- def get_history( self, create=False ):
- """
- Load the current history, creating a new one only if there is not
- current history and we're told to create.
- Transactions will not always have an active history (API requests), so
- None is a valid response.
- """
- history = None
- if self.galaxy_session:
- history = self.galaxy_session.current_history
- if not history and util.string_as_bool( create ):
- history = self.new_history()
- return history
-
- def set_history( self, history ):
- if history and not history.deleted:
- self.galaxy_session.current_history = history
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
-
- history = property( get_history, set_history )
-
- def get_or_create_default_history( self ):
- """
- Gets or creates a default history and associates it with the current
- session.
- """
-
- # There must be a user to fetch a default history.
- if not self.galaxy_session.user:
- return self.new_history()
-
- # Look for default history that (a) has default name + is not deleted and
- # (b) has no datasets. If suitable history found, use it; otherwise, create
- # new history.
- unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
- user=self.galaxy_session.user,
- name=self.app.model.History.default_name,
- deleted=False )
- default_history = None
- for history in unnamed_histories:
- if len( history.datasets ) == 0:
- # Found suitable default history.
- default_history = history
- break
-
- # Set or create hsitory.
- if default_history:
- history = default_history
- self.set_history( history )
- else:
- history = self.new_history()
-
- return history
-
- def new_history( self, name=None ):
- """
- Create a new history and associate it with the current session and
- its associated user (if set).
- """
- # Create new history
- history = self.app.model.History()
- if name:
- history.name = name
- # Associate with session
- history.add_galaxy_session( self.galaxy_session )
- # Make it the session's current history
- self.galaxy_session.current_history = history
- # Associate with user
- if self.galaxy_session.user:
- history.user = self.galaxy_session.user
- # Track genome_build with history
- history.genome_build = self.app.genome_builds.default_value
- # Set the user's default history permissions
- self.app.security_agent.history_set_default_permissions( history )
- # Save
- self.sa_session.add_all( ( self.galaxy_session, history ) )
- self.sa_session.flush()
- return history
-
- @base.lazy_property
- def template_context( self ):
- return dict()
-
- def make_form_data( self, name, **kwargs ):
- rval = self.template_context[name] = FormData()
- rval.values.update( kwargs )
- return rval
-
- def set_message( self, message, type=None ):
- """
- Convenience method for setting the 'message' and 'message_type'
- element of the template context.
- """
- self.template_context['message'] = message
- if type:
- self.template_context['status'] = type
-
- def get_message( self ):
- """
- Convenience method for getting the 'message' element of the template
- context.
- """
- return self.template_context['message']
-
- def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single message.
-
- `type`: one of "error", "warning", "info", or "done"; determines the
- type of dialog box and icon displayed with the message
-
- `refresh_frames`: names of frames in the interface that should be
- refreshed when the message is displayed
- """
- return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
-
- def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an error message. See `show_message`.
- """
- return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an ok message. See `show_message`.
- """
- return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an warn message. See `show_message`.
- """
- return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single HTML
- form.
- """
- return self.fill_template( template, form=form, header=header,
- use_panels=( form.use_panels or use_panels ),
- active_view=active_view )
-
- def fill_template(self, filename, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- # call get_user so we can invalidate sessions from external users,
- # if external auth has been disabled.
- self.get_user()
- if filename.endswith( ".mako" ):
- return self.fill_template_mako( filename, **kwargs )
- else:
- template = Template( file=os.path.join(self.app.config.template_path, filename),
- searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
- return str( template )
-
- def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
- template_lookup = template_lookup or self.webapp.mako_template_lookup
- template = template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
-
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
- request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
- return template.render( **data )
-
- def stream_template_mako( self, filename, **kwargs ):
- template = self.webapp.mako_template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
-
- def render( environ, start_response ):
- response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
-
- class StreamBuffer( object ):
- def write( self, d ):
- response_write( d.encode( 'utf-8' ) )
- buffer = StreamBuffer()
- context = mako.runtime.Context( buffer, **data )
- template.render_context( context )
- return []
- return render
-
- def fill_template_string(self, template_string, context=None, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- template = Template( source=template_string,
- searchList=[context or kwargs, dict(caller=self)] )
- return str(template)
-
-
-class FormBuilder( object ):
- """
- Simple class describing an HTML form
- """
- def __init__( self, action="", title="", name="form", submit_text="submit", use_panels=False ):
- self.title = title
- self.name = name
- self.action = action
- self.submit_text = submit_text
- self.inputs = []
- self.use_panels = use_panels
-
- def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
- self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
- return self
-
- def add_text( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'text', label, name, value, error, help )
-
- def add_password( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'password', label, name, value, error, help )
-
- def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
- return self
-
-
-class FormInput( object ):
- """
- Simple class describing a form input element
- """
- def __init__( self, type, name, label, value=None, error=None, help=None, use_label=True, extra_attributes={}, **kwargs ):
- self.type = type
- self.name = name
- self.label = label
- self.value = value
- self.error = error
- self.help = help
- self.use_label = use_label
- self.extra_attributes = extra_attributes
-
-
-class DatalistInput( FormInput ):
- """ Data list input """
-
- def __init__( self, name, *args, **kwargs ):
- if 'extra_attributes' not in kwargs:
- kwargs[ 'extra_attributes' ] = {}
- kwargs[ 'extra_attributes' ][ 'list' ] = name
- FormInput.__init__( self, None, name, *args, **kwargs )
- self.options = kwargs.get( 'options', {} )
-
- def body_html( self ):
- options = "".join( [ "<option value='%s'>%s</option>" % ( key, value ) for key, value in self.options.iteritems() ] )
- return """<datalist id="%s">%s</datalist>""" % ( self.name, options )
-
-
-class SelectInput( FormInput ):
- """ A select form input. """
- def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
- self.options = options
-
-
-class FormData( object ):
- """
- Class for passing data about a form to a template, very rudimentary, could
- be combined with the tool form handling to build something more general.
- """
- def __init__( self ):
- self.values = Bunch()
- self.errors = Bunch()
-
-
-class Bunch( dict ):
- """
- Bunch based on a dict
- """
- def __getattr__( self, key ):
- if key not in self:
- raise AttributeError(key)
- return self[key]
-
- def __setattr__( self, key, value ):
- self[key] = value
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
14 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d57655849ac4/
Changeset: d57655849ac4
User: carlfeberhard
Date: 2014-09-03 14:57:25
Summary: Empty out web/framework/__init__ into decorator, transaction, webapp, and formbuilder, fix related imports
Affected #: 18 files
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -6,7 +6,6 @@
import copy
import cPickle
import json
-import logging
import os
import shutil
import sys
@@ -15,13 +14,19 @@
from os.path import abspath
-import galaxy.model
-from galaxy.util import listify, stringify_dictionary_keys, string_as_bool
-from galaxy.util.odict import odict
-from galaxy.util import in_directory
-from galaxy.web import form_builder
+from galaxy import eggs
+eggs.require( "SQLAlchemy >= 0.4" )
from sqlalchemy.orm import object_session
+import galaxy.model
+from galaxy.util import listify
+from galaxy.util import stringify_dictionary_keys
+from galaxy.util import string_as_bool
+from galaxy.util import in_directory
+from galaxy.util.odict import odict
+from galaxy.web import form_builder
+
+import logging
log = logging.getLogger(__name__)
STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -1,23 +1,25 @@
"""
The Galaxy web application framework
"""
-from framework import expose
-from framework import json
-from framework import json_pretty
-from framework import require_login
-from framework import require_admin
from framework import url_for
-from framework import error
-from framework import form
-from framework import FormBuilder
-from framework import expose_api
-from framework import expose_api_anonymous
-from framework import expose_api_raw
-from framework import expose_api_raw_anonymous
-from framework.base import httpexceptions
+from framework.decorators import error
+from framework.decorators import expose
+from framework.decorators import json
+from framework.decorators import json_pretty
+from framework.decorators import require_login
+from framework.decorators import require_admin
+from framework.decorators import expose_api
+from framework.decorators import expose_api_anonymous
+from framework.decorators import expose_api_raw
+from framework.decorators import expose_api_raw_anonymous
# TODO: Drop and make these the default.
-from framework import _future_expose_api
-from framework import _future_expose_api_anonymous
-from framework import _future_expose_api_raw
-from framework import _future_expose_api_raw_anonymous
+from framework.decorators import _future_expose_api
+from framework.decorators import _future_expose_api_anonymous
+from framework.decorators import _future_expose_api_raw
+from framework.decorators import _future_expose_api_raw_anonymous
+
+from framework.formbuilder import form
+from framework.formbuilder import FormBuilder
+
+from framework.base import httpexceptions
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -579,7 +579,7 @@
"""
Pass control over to trans and render ``template_filename``.
- :type trans: ``galaxy.web.framework.GalaxyWebTransaction``
+ :type trans: ``galaxy.web.framework.transaction.GalaxyWebTransaction``
:param trans: transaction doing the rendering
:type plugin: ``util.bunch.Bunch``
:param plugin: the plugin containing the template to render
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -2,1395 +2,9 @@
Galaxy web application framework
"""
-import hashlib
-import inspect
-import os
-import random
-import socket
-import string
-import time
-from traceback import format_exc
-from Cookie import CookieError
-from functools import wraps
-
from galaxy import eggs
-
-eggs.require( "Cheetah" )
-from Cheetah.Template import Template
-
-from galaxy import util
-from galaxy.exceptions import error_codes
-from galaxy.exceptions import MessageException
-from galaxy.util import asbool
-from galaxy.util import safe_str_cmp
-from galaxy.util.backports.importlib import import_module
-from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.web.framework import base, helpers
-
-import paste.httpexceptions
-
-eggs.require( "Mako" )
-import mako.template
-import mako.lookup
-import mako.runtime
-
-eggs.require( "pytz" ) # Used by Babel.
-eggs.require( "Babel" )
-from babel.support import Translations
-from babel import Locale
-
-eggs.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import and_
-from sqlalchemy.orm.exc import NoResultFound
-
eggs.require( "pexpect" )
eggs.require( "amqp" )
-import logging
-log = logging.getLogger( __name__ )
-
+import base
url_for = base.routes.url_for
-
-UCSC_SERVERS = (
- 'hgw1.cse.ucsc.edu',
- 'hgw2.cse.ucsc.edu',
- 'hgw3.cse.ucsc.edu',
- 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu',
- 'hgw6.cse.ucsc.edu',
- 'hgw7.cse.ucsc.edu',
- 'hgw8.cse.ucsc.edu',
-)
-
-JSON_CONTENT_TYPE = "application/json"
-
-
-# ----------------------------------------------------------------------------- web controller decorators
-def expose( func ):
- """
- Decorator: mark a function as 'exposed' and thus web accessible
- """
- func.exposed = True
- return func
-
-def json( func, **json_kwargs ):
- """
- Format the response as JSON and set the response content type to
- JSON_CONTENT_TYPE.
- """
- @wraps(func)
- def call_and_format( self, trans, *args, **kwargs ):
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- return to_json_string( func( self, trans, *args, **kwargs ), **json_kwargs )
- if not hasattr(func, '_orig'):
- call_and_format._orig = func
- call_and_format.exposed = True
- return call_and_format
-
-def json_pretty( func ):
- """
- Indent and sort returned JSON.
- """
- return json( func, indent=4, sort_keys=True )
-
-def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
- def argcatcher( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.get_user():
- return func( self, trans, *args, **kwargs )
- else:
- return trans.show_error_message(
- 'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
- % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
- return decorator
- return argcatcher
-
-def require_admin( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if not trans.user_is_admin():
- msg = "You must be an administrator to access this feature."
- user = trans.get_user()
- if not trans.app.config.admin_users_list:
- msg = "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration."
- elif not user:
- msg = "You must be logged in as an administrator to access this feature."
- trans.response.status = 403
- if trans.response.get_content_type() == 'application/json':
- return msg
- else:
- return trans.show_error_message( msg )
- return func( self, trans, *args, **kwargs )
- return decorator
-
-
-# ----------------------------------------------------------------------------- (original) api decorators
-def expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- def error( environ, start_response ):
- start_response( error_status, [('Content-type', 'text/plain')] )
- return error_message
- error_status = '403 Forbidden'
- if trans.error_message:
- return trans.error_message
- if user_required and trans.anonymous:
- error_message = "API Authentication Required for this request"
- return error
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_status = '400 Bad Request'
- error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
- return error
- trans.response.set_content_type( "application/json" )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_message = 'User does not have permissions to run jobs as another user'
- return error
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- trans.response.status = 400
- return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- trans.response.status = 400
- return "That user does not exist."
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except paste.httpexceptions.HTTPException:
- raise # handled
- except:
- log.exception( 'Uncaught exception in exposed API method:' )
- raise paste.httpexceptions.HTTPServerError()
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __extract_payload_from_request(trans, func, kwargs):
- content_type = trans.request.headers['content-type']
- if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
- # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
- # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
- # are not a part of the request body. This is a problem because it's not possible to differentiate between values
- # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
- # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
- # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
- payload = kwargs.copy()
- named_args, _, _, _ = inspect.getargspec(func)
- for arg in named_args:
- payload.pop(arg, None)
- for k, v in payload.iteritems():
- if isinstance(v, (str, unicode)):
- try:
- payload[k] = from_json_string(v)
- except:
- # may not actually be json, just continue
- pass
- payload = util.recursively_stringify_dictionary_keys( payload )
- else:
- # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
- # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
- # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
- return payload
-
-def expose_api_raw( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False )
-
-def expose_api_raw_anonymous( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False, user_required=False )
-
-def expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return expose_api( func, to_json=to_json, user_required=False )
-
-
-# ----------------------------------------------------------------------------- (new) api decorators
-# TODO: rename as expose_api and make default.
-def _future_expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.error_message:
- # TODO: Document this branch, when can this happen,
- # I don't understand it.
- return __api_error_response( trans, err_msg=trans.error_message )
- if user_required and trans.anonymous:
- error_code = error_codes.USER_NO_API_KEY
- # Use error codes default error message.
- err_msg = "API authentication required for this request"
- return __api_error_response( trans, err_code=error_code, err_msg=err_msg, status_code=403 )
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_code = error_codes.USER_INVALID_JSON
- return __api_error_response( trans, status_code=400, err_code=error_code )
-
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # TODO: Refactor next block out into a helper procedure.
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_code = error_codes.USER_CANNOT_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=403 )
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=400 )
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except MessageException as e:
- traceback_string = format_exc()
- return __api_error_response( trans, exception=e, traceback=traceback_string )
- except paste.httpexceptions.HTTPException:
- # TODO: Allow to pass or format for the API???
- raise # handled
- except Exception as e:
- traceback_string = format_exc()
- error_message = 'Uncaught exception in exposed API method:'
- log.exception( error_message )
- return __api_error_response(
- trans,
- status_code=500,
- exception=e,
- traceback=traceback_string,
- err_msg=error_message,
- err_code=error_codes.UNKNOWN
- )
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __api_error_message( trans, **kwds ):
- exception = kwds.get( "exception", None )
- if exception:
- # If we are passed a MessageException use err_msg.
- default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
- default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
- extra_error_info = getattr( exception, 'extra_error_info', {} )
- if not isinstance( extra_error_info, dict ):
- extra_error_info = {}
- else:
- default_error_message = "Error processing API request."
- default_error_code = error_codes.UNKNOWN
- extra_error_info = {}
- traceback_string = kwds.get( "traceback", "No traceback available." )
- err_msg = kwds.get( "err_msg", default_error_message )
- error_code_object = kwds.get( "err_code", default_error_code )
- try:
- error_code = error_code_object.code
- except AttributeError:
- # Some sort of bad error code sent in, logic failure on part of
- # Galaxy developer.
- error_code = error_codes.UNKNOWN.code
- # Would prefer the terminology of error_code and error_message, but
- # err_msg used a good number of places already. Might as well not change
- # it?
- error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
- if trans.debug: # TODO: Should admins get to see traceback as well?
- error_response[ "traceback" ] = traceback_string
- return error_response
-
-def __api_error_response( trans, **kwds ):
- error_dict = __api_error_message( trans, **kwds )
- exception = kwds.get( "exception", None )
- # If we are given an status code directly - use it - otherwise check
- # the exception for a status_code attribute.
- if "status_code" in kwds:
- status_code = int( kwds.get( "status_code" ) )
- elif hasattr( exception, "status_code" ):
- status_code = int( exception.status_code )
- else:
- status_code = 500
- response = trans.response
- if not response.status or str(response.status).startswith("20"):
- # Unset status code appears to be string '200 OK', if anything
- # non-success (i.e. not 200 or 201) has been set, do not override
- # underlying controller.
- response.status = status_code
- return to_json_string( error_dict )
-
-
-# TODO: rename as expose_api and make default.
-def _future_expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return _future_expose_api( func, to_json=to_json, user_required=False )
-
-
-def _future_expose_api_raw( func ):
- return _future_expose_api( func, to_json=False, user_required=True )
-
-
-def _future_expose_api_raw_anonymous( func ):
- return _future_expose_api( func, to_json=False, user_required=False )
-
-
-
-NOT_SET = object()
-
-
-def error( message ):
- raise MessageException( message, type='error' )
-
-
-def form( *args, **kwargs ):
- return FormBuilder( *args, **kwargs )
-
-
-class WebApplication( base.WebApplication ):
-
- def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
- self.name = name
- base.WebApplication.__init__( self )
- self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
- # Mako support
- self.mako_template_lookup = self.create_mako_template_lookup( galaxy_app, name )
- # Security helper
- self.security = galaxy_app.security
-
- def create_mako_template_lookup( self, galaxy_app, name ):
- paths = []
- # First look in webapp specific directory
- if name is not None:
- paths.append( os.path.join( galaxy_app.config.template_path, 'webapps', name ) )
- # Then look in root directory
- paths.append( galaxy_app.config.template_path )
- # Create TemplateLookup with a small cache
- return mako.lookup.TemplateLookup(directories=paths,
- module_directory=galaxy_app.config.template_cache,
- collection_size=500,
- output_encoding='utf-8' )
-
- def handle_controller_exception( self, e, trans, **kwargs ):
- if isinstance( e, MessageException ):
- # In the case of a controller exception, sanitize to make sure
- # unsafe html input isn't reflected back to the user
- return trans.show_message( sanitize_html(e.err_msg), e.type )
-
- def make_body_iterable( self, trans, body ):
- if isinstance( body, FormBuilder ):
- body = trans.show_form( body )
- return base.WebApplication.make_body_iterable( self, trans, body )
-
- def transaction_chooser( self, environ, galaxy_app, session_cookie ):
- return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
-
- def add_ui_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseUIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- # Look for a controller inside the modules
- for key in dir( module ):
- T = getattr( module, key )
- if inspect.isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
- controller = self._instantiate_controller( T, app )
- self.add_ui_controller( name, controller )
-
- def add_api_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseAPIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- for key in dir( module ):
- T = getattr( module, key )
- # Exclude classes such as BaseAPIController and BaseTagItemsController
- if inspect.isclass( T ) and not key.startswith("Base") and issubclass( T, BaseAPIController ):
- # By default use module_name, but allow controller to override name
- controller_name = getattr( T, "controller_name", name )
- controller = self._instantiate_controller( T, app )
- self.add_api_controller( controller_name, controller )
-
- def _instantiate_controller( self, T, app ):
- """ Extension point, allow apps to contstruct controllers differently,
- really just used to stub out actual controllers for routes testing.
- """
- return T( app )
-
-
-class ProvidesAppContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- database and event handling.
-
- Mixed in class must provide `app` property.
- """
-
- def log_action( self, user=None, action=None, context=None, params=None):
- """
- Application-level logging of user actions.
- """
- if self.app.config.log_actions:
- action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
- try:
- if user:
- action.user = user
- else:
- action.user = self.user
- except:
- action.user = None
- try:
- action.session_id = self.galaxy_session.id
- except:
- action.session_id = None
- self.sa_session.add( action )
- self.sa_session.flush()
-
- def log_event( self, message, tool_id=None, **kwargs ):
- """
- Application level logging. Still needs fleshing out (log levels and such)
- Logging events is a config setting - if False, do not log.
- """
- if self.app.config.log_events:
- event = self.app.model.Event()
- event.tool_id = tool_id
- try:
- event.message = message % kwargs
- except:
- event.message = message
- try:
- event.history = self.get_history()
- except:
- event.history = None
- try:
- event.history_id = self.history.id
- except:
- event.history_id = None
- try:
- event.user = self.user
- except:
- event.user = None
- try:
- event.session_id = self.galaxy_session.id
- except:
- event.session_id = None
- self.sa_session.add( event )
- self.sa_session.flush()
-
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.app.model.context.current
-
- def expunge_all( self ):
- app = self.app
- context = app.model.context
- context.expunge_all()
- # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
- if hasattr(app, 'install_model'):
- install_model = app.install_model
- if install_model != app.model:
- install_model.context.expunge_all()
-
- def get_toolbox(self):
- """Returns the application toolbox"""
- return self.app.toolbox
-
- @property
- def model( self ):
- return self.app.model
-
- @property
- def install_model( self ):
- return self.app.install_model
-
- def request_types(self):
- if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
- return True
- return False
-
-
-class ProvidesUserContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about users.
-
- Mixed in class must provide `user`, `api_inherit_admin`, and `app`
- properties.
- """
-
- @property
- def anonymous( self ):
- return self.user is None and not self.api_inherit_admin
-
- def get_current_user_roles( self ):
- user = self.user
- if user:
- roles = user.all_roles()
- else:
- roles = []
- return roles
-
- def user_is_admin( self ):
- if self.api_inherit_admin:
- return True
- return self.user and self.user.email in self.app.config.admin_users_list
-
- def user_can_do_run_as( self ):
- run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
- if not run_as_users:
- return False
- user_in_run_as_users = self.user and self.user.email in run_as_users
- # Can do if explicitly in list or master_api_key supplied.
- can_do_run_as = user_in_run_as_users or self.api_inherit_admin
- return can_do_run_as
-
- @property
- def user_ftp_dir( self ):
- identifier = self.app.config.ftp_upload_dir_identifier
- return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
-
-
-class ProvidesHistoryContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about histories.
-
- Mixed in class must provide `user`, `history`, and `app`
- properties.
- """
-
- def db_dataset_for( self, dbkey ):
- """
- Returns the db_file dataset associated/needed by `dataset`, or `None`.
- """
- # If no history, return None.
- if self.history is None:
- return None
- if isinstance(self.history, Bunch):
- # The API presents a Bunch for a history. Until the API is
- # more fully featured for handling this, also return None.
- return None
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for ds in datasets:
- if dbkey == ds.dbkey:
- return ds
- return None
-
- @property
- def db_builds( self ):
- """
- Returns the builds defined by galaxy and the builds defined by
- the user (chromInfo in history).
- """
- # FIXME: This method should be removed
- return self.app.genome_builds.get_genome_build_names( trans=self )
-
-
-class GalaxyWebTransaction( base.DefaultWebTransaction, ProvidesAppContext, ProvidesUserContext, ProvidesHistoryContext ):
- """
- Encapsulates web transaction specific state for the Galaxy application
- (specifically the user's "cookie" session and history)
- """
-
- def __init__( self, environ, app, webapp, session_cookie=None):
- self.app = app
- self.webapp = webapp
- self.security = webapp.security
- base.DefaultWebTransaction.__init__( self, environ )
- self.setup_i18n()
- self.expunge_all()
- self.debug = asbool( self.app.config.get( 'debug', False ) )
- # Flag indicating whether we are in workflow building mode (means
- # that the current history should not be used for parameter values
- # and such).
- self.workflow_building_mode = False
- # Flag indicating whether this is an API call and the API key user is an administrator
- self.api_inherit_admin = False
- self.__user = None
- self.galaxy_session = None
- self.error_message = None
-
- if self.environ.get('is_api_request', False):
- # With API requests, if there's a key, use it and associate the
- # user with the transaction.
- # If not, check for an active session but do not create one.
- # If an error message is set here, it's sent back using
- # trans.show_error in the response -- in expose_api.
- self.error_message = self._authenticate_api( session_cookie )
- elif self.app.name == "reports":
- self.galaxy_session = None
- else:
- # This is a web request, get or create session.
- self._ensure_valid_session( session_cookie )
- if self.galaxy_session:
- # When we've authenticated by session, we have to check the
- # following.
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self._ensure_logged_in_user( environ, session_cookie )
-
- def setup_i18n( self ):
- locales = []
- if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
- # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
- client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
- for locale in client_locales:
- try:
- locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
- except Exception, e:
- log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
- if not locales:
- # Default to English
- locales = 'en'
- t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
- self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
-
- def get_user( self ):
- """Return the current user if logged in or None."""
- if self.galaxy_session:
- return self.galaxy_session.user
- else:
- return self.__user
-
- def set_user( self, user ):
- """Set the current user."""
- if self.galaxy_session:
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
- self.__user = user
-
- user = property( get_user, set_user )
-
- def get_cookie( self, name='galaxysession' ):
- """Convenience method for getting a session cookie"""
- try:
- # If we've changed the cookie during the request return the new value
- if name in self.response.cookies:
- return self.response.cookies[name].value
- else:
- return self.request.cookies[name].value
- except:
- return None
-
- def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
- """Convenience method for setting a session cookie"""
- # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
- # using a server secret key. Any other value is invalid and could pose security issues.
- self.response.cookies[name] = value
- self.response.cookies[name]['path'] = path
- self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
- tstamp = time.localtime( time.time() + 3600 * 24 * age )
- self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
- self.response.cookies[name]['version'] = version
- try:
- self.response.cookies[name]['httponly'] = True
- except CookieError, e:
- log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
-
- def _authenticate_api( self, session_cookie ):
- """
- Authenticate for the API via key or session (if available).
- """
- api_key = self.request.params.get('key', None)
- secure_id = self.get_cookie( name=session_cookie )
- api_key_supplied = self.environ.get('is_api_request', False) and api_key
- if api_key_supplied and self._check_master_api_key( api_key ):
- self.api_inherit_admin = True
- log.info( "Session authenticated using Galaxy master api key" )
- self.user = None
- self.galaxy_session = None
- elif api_key_supplied:
- # Sessionless API transaction, we just need to associate a user.
- try:
- provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
- except NoResultFound:
- return 'Provided API key is not valid.'
- if provided_key.user.deleted:
- return 'User account is deactivated, please contact an administrator.'
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- return 'Provided API key has expired.'
- self.set_user( provided_key.user )
- elif secure_id:
- # API authentication via active session
- # Associate user using existing session
- self._ensure_valid_session( session_cookie )
- else:
- # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
- self.user = None
- self.galaxy_session = None
-
- def _check_master_api_key( self, api_key ):
- master_api_key = getattr( self.app.config, 'master_api_key', None )
- if not master_api_key:
- return False
- # Hash keys to make them the same size, so we can do safe comparison.
- master_hash = hashlib.sha256( master_api_key ).hexdigest()
- provided_hash = hashlib.sha256( api_key ).hexdigest()
- return safe_str_cmp( master_hash, provided_hash )
-
- def _ensure_valid_session( self, session_cookie, create=True):
- """
- Ensure that a valid Galaxy session exists and is available as
- trans.session (part of initialization)
-
- Support for universe_session and universe_user cookies has been
- removed as of 31 Oct 2008.
- """
- # Try to load an existing session
- secure_id = self.get_cookie( name=session_cookie )
- galaxy_session = None
- prev_galaxy_session = None
- user_for_new_session = None
- invalidate_existing_session = False
- # Track whether the session has changed so we can avoid calling flush
- # in the most common case (session exists and is valid).
- galaxy_session_requires_flush = False
- if secure_id:
- # Decode the cookie value to get the session_key
- session_key = self.security.decode_guid( secure_id )
- try:
- # Make sure we have a valid UTF-8 string
- session_key = session_key.encode( 'utf8' )
- except UnicodeDecodeError:
- # We'll end up creating a new galaxy_session
- session_key = None
- if session_key:
- # Retrieve the galaxy_session id via the unique session_key
- galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
- .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
- # If remote user is in use it can invalidate the session and in some
- # cases won't have a cookie set above, so we need to to check some
- # things now.
- if self.app.config.use_remote_user:
- # If this is an api request, and they've passed a key, we let this go.
- assert self.app.config.remote_user_header in self.environ, \
- "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
- remote_user_email = self.environ[ self.app.config.remote_user_header ]
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- if galaxy_session:
- # An existing session, make sure correct association exists
- if galaxy_session.user is None:
- # No user, associate
- galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
- galaxy_session_requires_flush = True
- elif ((galaxy_session.user.email != remote_user_email) and
- ((not self.app.config.allow_user_impersonation) or
- (remote_user_email not in self.app.config.admin_users_list))):
- # Session exists but is not associated with the correct
- # remote user, and the currently set remote_user is not a
- # potentially impersonating admin.
- invalidate_existing_session = True
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, galaxy_session.user.email )
- else:
- # No session exists, get/create user for new session
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- else:
- if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
- # Remote user support is not enabled, but there is an existing
- # session with an external user, invalidate
- invalidate_existing_session = True
- log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
- galaxy_session.user.email )
- elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
- invalidate_existing_session = True
- log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
- # Do we need to invalidate the session for some reason?
- if invalidate_existing_session:
- prev_galaxy_session = galaxy_session
- prev_galaxy_session.is_valid = False
- galaxy_session = None
- # No relevant cookies, or couldn't find, or invalid, so create a new session
- if galaxy_session is None:
- galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
- galaxy_session_requires_flush = True
- self.galaxy_session = galaxy_session
- self.__update_session_cookie( name=session_cookie )
- else:
- self.galaxy_session = galaxy_session
- # Do we need to flush the session?
- if galaxy_session_requires_flush:
- self.sa_session.add( galaxy_session )
- # FIXME: If prev_session is a proper relation this would not
- # be needed.
- if prev_galaxy_session:
- self.sa_session.add( prev_galaxy_session )
- self.sa_session.flush()
- # If the old session was invalid, get a new history with our new session
- if invalidate_existing_session:
- self.new_history()
-
- def _ensure_logged_in_user( self, environ, session_cookie ):
- # The value of session_cookie can be one of
- # 'galaxysession' or 'galaxycommunitysession'
- # Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
- # TODO: re-engineer to eliminate the use of allowed_paths
- # as maintenance overhead is far too high.
- allowed_paths = (
- url_for( controller='root', action='index' ),
- url_for( controller='root', action='tool_menu' ),
- url_for( controller='root', action='masthead' ),
- url_for( controller='root', action='history' ),
- url_for( controller='user', action='api_keys' ),
- url_for( controller='user', action='create' ),
- url_for( controller='user', action='index' ),
- url_for( controller='user', action='login' ),
- url_for( controller='user', action='logout' ),
- url_for( controller='user', action='manage_user_info' ),
- url_for( controller='user', action='set_default_permissions' ),
- url_for( controller='user', action='reset_password' ),
- url_for( controller='user', action='openid_auth' ),
- url_for( controller='user', action='openid_process' ),
- url_for( controller='user', action='openid_associate' ),
- url_for( controller='library', action='browse' ),
- url_for( controller='history', action='list' ),
- url_for( controller='dataset', action='list' )
- )
- display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
- return
- external_display_path = url_for( controller='', action='display_application' )
- if self.request.path.startswith( external_display_path ):
- request_path_split = self.request.path.split( '/' )
- try:
- if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
- and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
- and request_path_split[-3] != 'None'):
- return
- except IndexError:
- pass
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
-
- def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
- """
- Create a new GalaxySession for this request, possibly with a connection
- to a previous session (in `prev_galaxy_session`) and an existing user
- (in `user_for_new_session`).
-
- Caller is responsible for flushing the returned session.
- """
- session_key = self.security.get_new_guid()
- galaxy_session = self.app.model.GalaxySession(
- session_key=session_key,
- is_valid=True,
- remote_host=self.request.remote_host,
- remote_addr=self.request.remote_addr,
- referer=self.request.headers.get( 'Referer', None ) )
- if prev_galaxy_session:
- # Invalidated an existing session for some reason, keep track
- galaxy_session.prev_session_id = prev_galaxy_session.id
- if user_for_new_session:
- # The new session should be associated with the user
- galaxy_session.user = user_for_new_session
- return galaxy_session
-
- def get_or_create_remote_user( self, remote_user_email ):
- """
- Create a remote user with the email remote_user_email and return it
- """
- if not self.app.config.use_remote_user:
- return None
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- user = self.sa_session.query( self.app.model.User
- ).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
- if user:
- # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
- # role and default user / history permissions were not set for remote users. When a
- # remote user authenticates, we'll look for this information, and if missing, create it.
- if not self.app.security_agent.get_private_user_role( user ):
- self.app.security_agent.create_private_user_role( user )
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- if not user.default_permissions:
- self.app.security_agent.user_set_default_permissions( user )
- self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
- elif user is None:
- username = remote_user_email.split( '@', 1 )[0].lower()
- random.seed()
- user = self.app.model.User( email=remote_user_email )
- user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
- user.external = True
- # Replace invalid characters in the username
- for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
- username = username.replace( char, '-' )
- # Find a unique username - user can change it later
- if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
- i = 1
- while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
- i += 1
- username += '-' + str(i)
- user.username = username
- self.sa_session.add( user )
- self.sa_session.flush()
- self.app.security_agent.create_private_user_role( user )
- # We set default user permissions, before we log in and set the default history permissions
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- self.app.security_agent.user_set_default_permissions( user )
- # self.log_event( "Automatically created account '%s'", user.email )
- return user
-
- def __update_session_cookie( self, name='galaxysession' ):
- """
- Update the session cookie to match the current session.
- """
- self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
- name=name, path=self.app.config.cookie_path )
-
- def handle_user_login( self, user ):
- """
- Login a new user (possibly newly created)
-
- - create a new session
- - associate new session with user
- - if old session had a history and it was not associated with a user, associate it with the new session,
- otherwise associate the current session's history with the user
- - add the disk usage of the current session to the user's total disk usage
- """
- # Set the previous session
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- # Define a new current_session
- self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
- if self.webapp.name == 'galaxy':
- cookie_name = 'galaxysession'
- # Associated the current user's last accessed history (if exists) with their new session
- history = None
- try:
- users_last_session = user.galaxy_sessions[0]
- last_accessed = True
- except:
- users_last_session = None
- last_accessed = False
- if (prev_galaxy_session.current_history and not
- prev_galaxy_session.current_history.deleted and
- prev_galaxy_session.current_history.datasets):
- if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
- # If the previous galaxy session had a history, associate it with the new
- # session, but only if it didn't belong to a different user.
- history = prev_galaxy_session.current_history
- if prev_galaxy_session.user is None:
- # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
- for hda in history.datasets:
- user.total_disk_usage += hda.quota_amount( user )
- elif self.galaxy_session.current_history:
- history = self.galaxy_session.current_history
- if (not history and users_last_session and
- users_last_session.current_history and not
- users_last_session.current_history.deleted):
- history = users_last_session.current_history
- elif not history:
- history = self.get_history( create=True )
- if history not in self.galaxy_session.histories:
- self.galaxy_session.add_history( history )
- if history.user is None:
- history.user = user
- self.galaxy_session.current_history = history
- if not last_accessed:
- # Only set default history permissions if current history is not from a previous session
- self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
- else:
- cookie_name = 'galaxycommunitysession'
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- self.sa_session.flush()
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name=cookie_name )
-
- def handle_user_logout( self, logout_all=False ):
- """
- Logout the current user:
- - invalidate the current session
- - create a new session with no user associated
- """
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- galaxy_user_id = prev_galaxy_session.user_id
- if logout_all and galaxy_user_id is not None:
- for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
- ).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True, #noqa
- self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
- other_galaxy_session.is_valid = False
- self.sa_session.add( other_galaxy_session )
- self.sa_session.flush()
- if self.webapp.name == 'galaxy':
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name='galaxysession' )
- elif self.webapp.name == 'tool_shed':
- self.__update_session_cookie( name='galaxycommunitysession' )
-
- def get_galaxy_session( self ):
- """
- Return the current galaxy session
- """
- return self.galaxy_session
-
- def get_history( self, create=False ):
- """
- Load the current history, creating a new one only if there is not
- current history and we're told to create.
- Transactions will not always have an active history (API requests), so
- None is a valid response.
- """
- history = None
- if self.galaxy_session:
- history = self.galaxy_session.current_history
- if not history and util.string_as_bool( create ):
- history = self.new_history()
- return history
-
- def set_history( self, history ):
- if history and not history.deleted:
- self.galaxy_session.current_history = history
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
-
- history = property( get_history, set_history )
-
- def get_or_create_default_history( self ):
- """
- Gets or creates a default history and associates it with the current
- session.
- """
-
- # There must be a user to fetch a default history.
- if not self.galaxy_session.user:
- return self.new_history()
-
- # Look for default history that (a) has default name + is not deleted and
- # (b) has no datasets. If suitable history found, use it; otherwise, create
- # new history.
- unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
- user=self.galaxy_session.user,
- name=self.app.model.History.default_name,
- deleted=False )
- default_history = None
- for history in unnamed_histories:
- if len( history.datasets ) == 0:
- # Found suitable default history.
- default_history = history
- break
-
- # Set or create hsitory.
- if default_history:
- history = default_history
- self.set_history( history )
- else:
- history = self.new_history()
-
- return history
-
- def new_history( self, name=None ):
- """
- Create a new history and associate it with the current session and
- its associated user (if set).
- """
- # Create new history
- history = self.app.model.History()
- if name:
- history.name = name
- # Associate with session
- history.add_galaxy_session( self.galaxy_session )
- # Make it the session's current history
- self.galaxy_session.current_history = history
- # Associate with user
- if self.galaxy_session.user:
- history.user = self.galaxy_session.user
- # Track genome_build with history
- history.genome_build = self.app.genome_builds.default_value
- # Set the user's default history permissions
- self.app.security_agent.history_set_default_permissions( history )
- # Save
- self.sa_session.add_all( ( self.galaxy_session, history ) )
- self.sa_session.flush()
- return history
-
- @base.lazy_property
- def template_context( self ):
- return dict()
-
- def make_form_data( self, name, **kwargs ):
- rval = self.template_context[name] = FormData()
- rval.values.update( kwargs )
- return rval
-
- def set_message( self, message, type=None ):
- """
- Convenience method for setting the 'message' and 'message_type'
- element of the template context.
- """
- self.template_context['message'] = message
- if type:
- self.template_context['status'] = type
-
- def get_message( self ):
- """
- Convenience method for getting the 'message' element of the template
- context.
- """
- return self.template_context['message']
-
- def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single message.
-
- `type`: one of "error", "warning", "info", or "done"; determines the
- type of dialog box and icon displayed with the message
-
- `refresh_frames`: names of frames in the interface that should be
- refreshed when the message is displayed
- """
- return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
-
- def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an error message. See `show_message`.
- """
- return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an ok message. See `show_message`.
- """
- return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an warn message. See `show_message`.
- """
- return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single HTML
- form.
- """
- return self.fill_template( template, form=form, header=header,
- use_panels=( form.use_panels or use_panels ),
- active_view=active_view )
-
- def fill_template(self, filename, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- # call get_user so we can invalidate sessions from external users,
- # if external auth has been disabled.
- self.get_user()
- if filename.endswith( ".mako" ):
- return self.fill_template_mako( filename, **kwargs )
- else:
- template = Template( file=os.path.join(self.app.config.template_path, filename),
- searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
- return str( template )
-
- def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
- template_lookup = template_lookup or self.webapp.mako_template_lookup
- template = template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
-
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
- request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
- return template.render( **data )
-
- def stream_template_mako( self, filename, **kwargs ):
- template = self.webapp.mako_template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
-
- def render( environ, start_response ):
- response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
-
- class StreamBuffer( object ):
- def write( self, d ):
- response_write( d.encode( 'utf-8' ) )
- buffer = StreamBuffer()
- context = mako.runtime.Context( buffer, **data )
- template.render_context( context )
- return []
- return render
-
- def fill_template_string(self, template_string, context=None, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- template = Template( source=template_string,
- searchList=[context or kwargs, dict(caller=self)] )
- return str(template)
-
-
-class FormBuilder( object ):
- """
- Simple class describing an HTML form
- """
- def __init__( self, action="", title="", name="form", submit_text="submit", use_panels=False ):
- self.title = title
- self.name = name
- self.action = action
- self.submit_text = submit_text
- self.inputs = []
- self.use_panels = use_panels
-
- def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
- self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
- return self
-
- def add_text( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'text', label, name, value, error, help )
-
- def add_password( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'password', label, name, value, error, help )
-
- def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
- return self
-
-
-class FormInput( object ):
- """
- Simple class describing a form input element
- """
- def __init__( self, type, name, label, value=None, error=None, help=None, use_label=True, extra_attributes={}, **kwargs ):
- self.type = type
- self.name = name
- self.label = label
- self.value = value
- self.error = error
- self.help = help
- self.use_label = use_label
- self.extra_attributes = extra_attributes
-
-
-class DatalistInput( FormInput ):
- """ Data list input """
-
- def __init__( self, name, *args, **kwargs ):
- if 'extra_attributes' not in kwargs:
- kwargs[ 'extra_attributes' ] = {}
- kwargs[ 'extra_attributes' ][ 'list' ] = name
- FormInput.__init__( self, None, name, *args, **kwargs )
- self.options = kwargs.get( 'options', {} )
-
- def body_html( self ):
- options = "".join( [ "<option value='%s'>%s</option>" % ( key, value ) for key, value in self.options.iteritems() ] )
- return """<datalist id="%s">%s</datalist>""" % ( self.name, options )
-
-
-class SelectInput( FormInput ):
- """ A select form input. """
- def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
- self.options = options
-
-
-class FormData( object ):
- """
- Class for passing data about a form to a template, very rudimentary, could
- be combined with the tool form handling to build something more general.
- """
- def __init__( self ):
- self.values = Bunch()
- self.errors = Bunch()
-
-
-class Bunch( dict ):
- """
- Bunch based on a dict
- """
- def __getattr__( self, key ):
- if key not in self:
- raise AttributeError(key)
- return self[key]
-
- def __setattr__( self, key, value ):
- self[key] = value
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/framework/decorators.py
--- /dev/null
+++ b/lib/galaxy/web/framework/decorators.py
@@ -0,0 +1,334 @@
+import inspect
+from traceback import format_exc
+from functools import wraps
+
+from galaxy import eggs
+eggs.require( "Paste" )
+import paste.httpexceptions
+
+from galaxy import util
+from galaxy.exceptions import error_codes
+from galaxy.exceptions import MessageException
+from galaxy.util.json import from_json_string, to_json_string
+
+import logging
+log = logging.getLogger( __name__ )
+
+
+JSON_CONTENT_TYPE = "application/json"
+
+
+def error( message ):
+ raise MessageException( message, type='error' )
+
+
+# ----------------------------------------------------------------------------- web controller decorators
+def _save_orig_fn( wrapped, orig ):
+ if not hasattr( orig, '_orig' ):
+ wrapped._orig = orig
+ return wrapped
+
+def expose( func ):
+ """
+ Decorator: mark a function as 'exposed' and thus web accessible
+ """
+ func.exposed = True
+ return func
+
+def json( func, **json_kwargs ):
+ """
+ Format the response as JSON and set the response content type to
+ JSON_CONTENT_TYPE.
+ """
+ @wraps(func)
+ def call_and_format( self, trans, *args, **kwargs ):
+ trans.response.set_content_type( JSON_CONTENT_TYPE )
+ return to_json_string( func( self, trans, *args, **kwargs ), **json_kwargs )
+ if not hasattr( func, '_orig' ):
+ call_and_format._orig = func
+ return expose( _save_orig_fn( call_and_format, func ) )
+
+def json_pretty( func ):
+ """
+ Indent and sort returned JSON.
+ """
+ return json( func, indent=4, sort_keys=True )
+
+def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
+ def argcatcher( func ):
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ if trans.get_user():
+ return func( self, trans, *args, **kwargs )
+ else:
+ return trans.show_error_message(
+ 'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
+ % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
+ return decorator
+ return argcatcher
+
+def require_admin( func ):
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ if not trans.user_is_admin():
+ msg = "You must be an administrator to access this feature."
+ user = trans.get_user()
+ if not trans.app.config.admin_users_list:
+ msg = "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration."
+ elif not user:
+ msg = "You must be logged in as an administrator to access this feature."
+ trans.response.status = 403
+ if trans.response.get_content_type() == 'application/json':
+ return msg
+ else:
+ return trans.show_error_message( msg )
+ return func( self, trans, *args, **kwargs )
+ return decorator
+
+
+# ----------------------------------------------------------------------------- (original) api decorators
+def expose_api( func, to_json=True, user_required=True ):
+ """
+ Expose this function via the API.
+ """
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ def error( environ, start_response ):
+ start_response( error_status, [('Content-type', 'text/plain')] )
+ return error_message
+ error_status = '403 Forbidden'
+ if trans.error_message:
+ return trans.error_message
+ if user_required and trans.anonymous:
+ error_message = "API Authentication Required for this request"
+ return error
+ if trans.request.body:
+ try:
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+ except ValueError:
+ error_status = '400 Bad Request'
+ error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
+ return error
+ trans.response.set_content_type( "application/json" )
+ # send 'do not cache' headers to handle IE's caching of ajax get responses
+ trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+ # Perform api_run_as processing, possibly changing identity
+ if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+ if not trans.user_can_do_run_as():
+ error_message = 'User does not have permissions to run jobs as another user'
+ return error
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
+ trans.set_user(user)
+ except:
+ trans.response.status = 400
+ return "That user does not exist."
+ try:
+ rval = func( self, trans, *args, **kwargs)
+ if to_json and trans.debug:
+ rval = to_json_string( rval, indent=4, sort_keys=True )
+ elif to_json:
+ rval = to_json_string( rval )
+ return rval
+ except paste.httpexceptions.HTTPException:
+ raise # handled
+ except:
+ log.exception( 'Uncaught exception in exposed API method:' )
+ raise paste.httpexceptions.HTTPServerError()
+ return expose( _save_orig_fn( decorator, func ) )
+
+def __extract_payload_from_request(trans, func, kwargs):
+ content_type = trans.request.headers['content-type']
+ if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
+ # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
+ # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
+ # are not a part of the request body. This is a problem because it's not possible to differentiate between values
+ # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
+ # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
+ # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
+ payload = kwargs.copy()
+ named_args, _, _, _ = inspect.getargspec(func)
+ for arg in named_args:
+ payload.pop(arg, None)
+ for k, v in payload.iteritems():
+ if isinstance(v, (str, unicode)):
+ try:
+ payload[k] = from_json_string(v)
+ except:
+ # may not actually be json, just continue
+ pass
+ payload = util.recursively_stringify_dictionary_keys( payload )
+ else:
+ # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
+ # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
+ # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
+ payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
+ return payload
+
+def expose_api_raw( func ):
+ """
+ Expose this function via the API but don't dump the results
+ to JSON.
+ """
+ return expose_api( func, to_json=False )
+
+def expose_api_raw_anonymous( func ):
+ """
+ Expose this function via the API but don't dump the results
+ to JSON.
+ """
+ return expose_api( func, to_json=False, user_required=False )
+
+def expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require a set user.
+ """
+ return expose_api( func, to_json=to_json, user_required=False )
+
+
+# ----------------------------------------------------------------------------- (new) api decorators
+# TODO: rename as expose_api and make default.
+def _future_expose_api( func, to_json=True, user_required=True ):
+ """
+ Expose this function via the API.
+ """
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ if trans.error_message:
+ # TODO: Document this branch, when can this happen,
+ # I don't understand it.
+ return __api_error_response( trans, err_msg=trans.error_message )
+ if user_required and trans.anonymous:
+ error_code = error_codes.USER_NO_API_KEY
+ # Use error codes default error message.
+ err_msg = "API authentication required for this request"
+ return __api_error_response( trans, err_code=error_code, err_msg=err_msg, status_code=403 )
+ if trans.request.body:
+ try:
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+ except ValueError:
+ error_code = error_codes.USER_INVALID_JSON
+ return __api_error_response( trans, status_code=400, err_code=error_code )
+
+ trans.response.set_content_type( JSON_CONTENT_TYPE )
+ # send 'do not cache' headers to handle IE's caching of ajax get responses
+ trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+ # TODO: Refactor next block out into a helper procedure.
+ # Perform api_run_as processing, possibly changing identity
+ if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+ if not trans.user_can_do_run_as():
+ error_code = error_codes.USER_CANNOT_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=403 )
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+ except TypeError:
+ error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
+ trans.set_user(user)
+ except:
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=400 )
+ try:
+ rval = func( self, trans, *args, **kwargs)
+ if to_json and trans.debug:
+ rval = to_json_string( rval, indent=4, sort_keys=True )
+ elif to_json:
+ rval = to_json_string( rval )
+ return rval
+ except MessageException as e:
+ traceback_string = format_exc()
+ return __api_error_response( trans, exception=e, traceback=traceback_string )
+ except paste.httpexceptions.HTTPException:
+ # TODO: Allow to pass or format for the API???
+ raise # handled
+ except Exception as e:
+ traceback_string = format_exc()
+ error_message = 'Uncaught exception in exposed API method:'
+ log.exception( error_message )
+ return __api_error_response(
+ trans,
+ status_code=500,
+ exception=e,
+ traceback=traceback_string,
+ err_msg=error_message,
+ err_code=error_codes.UNKNOWN
+ )
+ if not hasattr(func, '_orig'):
+ decorator._orig = func
+ decorator.exposed = True
+ return decorator
+
+def __api_error_message( trans, **kwds ):
+ exception = kwds.get( "exception", None )
+ if exception:
+ # If we are passed a MessageException use err_msg.
+ default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
+ default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
+ extra_error_info = getattr( exception, 'extra_error_info', {} )
+ if not isinstance( extra_error_info, dict ):
+ extra_error_info = {}
+ else:
+ default_error_message = "Error processing API request."
+ default_error_code = error_codes.UNKNOWN
+ extra_error_info = {}
+ traceback_string = kwds.get( "traceback", "No traceback available." )
+ err_msg = kwds.get( "err_msg", default_error_message )
+ error_code_object = kwds.get( "err_code", default_error_code )
+ try:
+ error_code = error_code_object.code
+ except AttributeError:
+ # Some sort of bad error code sent in, logic failure on part of
+ # Galaxy developer.
+ error_code = error_codes.UNKNOWN.code
+ # Would prefer the terminology of error_code and error_message, but
+ # err_msg used a good number of places already. Might as well not change
+ # it?
+ error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
+ if trans.debug: # TODO: Should admins get to see traceback as well?
+ error_response[ "traceback" ] = traceback_string
+ return error_response
+
+def __api_error_response( trans, **kwds ):
+ error_dict = __api_error_message( trans, **kwds )
+ exception = kwds.get( "exception", None )
+ # If we are given an status code directly - use it - otherwise check
+ # the exception for a status_code attribute.
+ if "status_code" in kwds:
+ status_code = int( kwds.get( "status_code" ) )
+ elif hasattr( exception, "status_code" ):
+ status_code = int( exception.status_code )
+ else:
+ status_code = 500
+ response = trans.response
+ if not response.status or str(response.status).startswith("20"):
+ # Unset status code appears to be string '200 OK', if anything
+ # non-success (i.e. not 200 or 201) has been set, do not override
+ # underlying controller.
+ response.status = status_code
+ return to_json_string( error_dict )
+
+
+# TODO: rename as expose_api and make default.
+def _future_expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require a set user.
+ """
+ return _future_expose_api( func, to_json=to_json, user_required=False )
+
+
+def _future_expose_api_raw( func ):
+ return _future_expose_api( func, to_json=False, user_required=True )
+
+
+def _future_expose_api_raw_anonymous( func ):
+ return _future_expose_api( func, to_json=False, user_required=False )
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/framework/formbuilder.py
--- /dev/null
+++ b/lib/galaxy/web/framework/formbuilder.py
@@ -0,0 +1,93 @@
+from galaxy.util import bunch
+
+import logging
+log = logging.getLogger( __name__ )
+
+#class Bunch( dict ):
+# """
+# Bunch based on a dict
+# """
+# def __getattr__( self, key ):
+# if key not in self:
+# raise AttributeError(key)
+# return self[key]
+#
+# def __setattr__( self, key, value ):
+# self[key] = value
+
+def form( *args, **kwargs ):
+ return FormBuilder( *args, **kwargs )
+
+class FormBuilder( object ):
+ """
+ Simple class describing an HTML form
+ """
+ def __init__( self, action="", title="", name="form", submit_text="submit", use_panels=False ):
+ self.title = title
+ self.name = name
+ self.action = action
+ self.submit_text = submit_text
+ self.inputs = []
+ self.use_panels = use_panels
+
+ def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
+ self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
+ return self
+
+ def add_text( self, name, label, value=None, error=None, help=None ):
+ return self.add_input( 'text', label, name, value, error, help )
+
+ def add_password( self, name, label, value=None, error=None, help=None ):
+ return self.add_input( 'password', label, name, value, error, help )
+
+ def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
+ self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
+ return self
+
+
+class FormInput( object ):
+ """
+ Simple class describing a form input element
+ """
+ def __init__( self, type, name, label, value=None, error=None, help=None, use_label=True, extra_attributes={}, **kwargs ):
+ self.type = type
+ self.name = name
+ self.label = label
+ self.value = value
+ self.error = error
+ self.help = help
+ self.use_label = use_label
+ self.extra_attributes = extra_attributes
+
+
+class DatalistInput( FormInput ):
+ """ Data list input """
+
+ def __init__( self, name, *args, **kwargs ):
+ if 'extra_attributes' not in kwargs:
+ kwargs[ 'extra_attributes' ] = {}
+ kwargs[ 'extra_attributes' ][ 'list' ] = name
+ FormInput.__init__( self, None, name, *args, **kwargs )
+ self.options = kwargs.get( 'options', {} )
+
+ def body_html( self ):
+ options = "".join( [ "<option value='%s'>%s</option>" % ( key, value ) for key, value in self.options.iteritems() ] )
+ return """<datalist id="%s">%s</datalist>""" % ( self.name, options )
+
+
+class SelectInput( FormInput ):
+ """ A select form input. """
+ def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
+ FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
+ self.options = options
+
+
+class FormData( object ):
+ """
+ Class for passing data about a form to a template, very rudimentary, could
+ be combined with the tool form handling to build something more general.
+ """
+ def __init__( self ):
+#TODO: galaxy's two Bunchs are defined differently. Is this right?
+ self.values = bunch.Bunch()
+ self.errors = bunch.Bunch()
diff -r 012e6eb765684020eb597d8925e4ca5b58b9d904 -r d57655849ac41b2b1f90b73e2835d618df45e985 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py
+++ b/lib/galaxy/web/framework/helpers/grids.py
@@ -5,7 +5,8 @@
from galaxy.util import sanitize_text
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.odict import odict
-from galaxy.web.framework import error, url_for
+from galaxy.web.framework import decorators
+from galaxy.web.framework import url_for
from galaxy.web.framework.helpers import iff
from sqlalchemy.sql.expression import and_, func, or_
@@ -317,7 +318,7 @@
try:
id = map( int, id )
except:
- error( "Invalid id" )
+ decorators.error( "Invalid id" )
return id
# ---- Override these ----------------------------------------------------
def handle_operation( self, trans, operation, ids, **kwargs ):
This diff is so big that we needed to truncate the remainder.
https://bitbucket.org/galaxy/galaxy-central/commits/360d9d7839d7/
Changeset: 360d9d7839d7
User: carlfeberhard
Date: 2014-09-03 15:59:59
Summary: merge central
Affected #: 3 files
diff -r d57655849ac41b2b1f90b73e2835d618df45e985 -r 360d9d7839d74f01a2da5b35fb1be10464956112 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -96,11 +96,9 @@
# Load history import/export tools.
load_history_imp_exp_tools( self.toolbox )
# visualizations registry: associates resources with visualizations, controls how to render
- self.visualizations_registry = None
- if self.config.visualization_plugins_directory:
- self.visualizations_registry = VisualizationsRegistry( self,
- directories_setting=self.config.visualization_plugins_directory,
- template_cache_dir=self.config.template_cache )
+ self.visualizations_registry = VisualizationsRegistry( self,
+ directories_setting=self.config.visualization_plugins_directory,
+ template_cache_dir=self.config.template_cache )
# Load security policy.
self.security_agent = self.model.security_agent
self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions )
diff -r d57655849ac41b2b1f90b73e2835d618df45e985 -r 360d9d7839d74f01a2da5b35fb1be10464956112 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -376,8 +376,9 @@
self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
- # visualization plugin framework
- self.visualization_plugins_directory = kwargs.get( 'visualization_plugins_directory', None )
+ # directory where the visualization/registry searches for plugins
+ self.visualization_plugins_directory = kwargs.get(
+ 'visualization_plugins_directory', 'config/plugins/visualizations' )
# Default chunk size for chunkable datatypes -- 64k
self.display_chunk_size = int( kwargs.get( 'display_chunk_size', 65536) )
diff -r d57655849ac41b2b1f90b73e2835d618df45e985 -r 360d9d7839d74f01a2da5b35fb1be10464956112 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -210,8 +210,8 @@
# Visualizations config directory: where to look for individual visualization plugins.
# The path is relative to the Galaxy root dir. To use an absolute path begin the path
-# with '/'.
-visualization_plugins_directory = config/plugins/visualizations
+# with '/'. Defaults to "config/plugins/visualizations".
+#visualization_plugins_directory = config/plugins/visualizations
# Each job is given a unique empty directory as its current working directory.
# This option defines in what parent directory those directories will be
https://bitbucket.org/galaxy/galaxy-central/commits/a0c16c8930b7/
Changeset: a0c16c8930b7
User: carlfeberhard
Date: 2014-09-03 17:22:15
Summary: Combine transaction into webapp, move trans mixins to managers/context.py
Affected #: 3 files
diff -r 360d9d7839d74f01a2da5b35fb1be10464956112 -r a0c16c8930b76cfe98574b2e8b3311f7b2504361 lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -579,7 +579,7 @@
"""
Pass control over to trans and render ``template_filename``.
- :type trans: ``galaxy.web.framework.transaction.GalaxyWebTransaction``
+ :type trans: ``galaxy.web.framework.webapp.GalaxyWebTransaction``
:param trans: transaction doing the rendering
:type plugin: ``util.bunch.Bunch``
:param plugin: the plugin containing the template to render
diff -r 360d9d7839d74f01a2da5b35fb1be10464956112 -r a0c16c8930b76cfe98574b2e8b3311f7b2504361 lib/galaxy/web/framework/transaction.py
--- a/lib/galaxy/web/framework/transaction.py
+++ /dev/null
@@ -1,868 +0,0 @@
-import hashlib
-import os
-import random
-import socket
-import string
-import time
-from Cookie import CookieError
-
-from galaxy import eggs
-eggs.require( "Cheetah" )
-from Cheetah.Template import Template
-eggs.require( "Mako" )
-import mako.runtime
-eggs.require( "pytz" ) # Used by Babel.
-eggs.require( "Babel" )
-from babel.support import Translations
-from babel import Locale
-eggs.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import and_
-from sqlalchemy.orm.exc import NoResultFound
-
-from galaxy import util
-from galaxy.util import asbool
-from galaxy.util import safe_str_cmp
-from galaxy.util.json import to_json_string
-from galaxy.web.framework import base
-from galaxy.web.framework import helpers
-
-import logging
-log = logging.getLogger( __name__ )
-
-
-UCSC_SERVERS = (
- 'hgw1.cse.ucsc.edu',
- 'hgw2.cse.ucsc.edu',
- 'hgw3.cse.ucsc.edu',
- 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu',
- 'hgw6.cse.ucsc.edu',
- 'hgw7.cse.ucsc.edu',
- 'hgw8.cse.ucsc.edu',
-)
-
-
-class ProvidesAppContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- database and event handling.
-
- Mixed in class must provide `app` property.
- """
-
- def log_action( self, user=None, action=None, context=None, params=None):
- """
- Application-level logging of user actions.
- """
- if self.app.config.log_actions:
- action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
- try:
- if user:
- action.user = user
- else:
- action.user = self.user
- except:
- action.user = None
- try:
- action.session_id = self.galaxy_session.id
- except:
- action.session_id = None
- self.sa_session.add( action )
- self.sa_session.flush()
-
- def log_event( self, message, tool_id=None, **kwargs ):
- """
- Application level logging. Still needs fleshing out (log levels and such)
- Logging events is a config setting - if False, do not log.
- """
- if self.app.config.log_events:
- event = self.app.model.Event()
- event.tool_id = tool_id
- try:
- event.message = message % kwargs
- except:
- event.message = message
- try:
- event.history = self.get_history()
- except:
- event.history = None
- try:
- event.history_id = self.history.id
- except:
- event.history_id = None
- try:
- event.user = self.user
- except:
- event.user = None
- try:
- event.session_id = self.galaxy_session.id
- except:
- event.session_id = None
- self.sa_session.add( event )
- self.sa_session.flush()
-
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.app.model.context.current
-
- def expunge_all( self ):
- app = self.app
- context = app.model.context
- context.expunge_all()
- # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
- if hasattr(app, 'install_model'):
- install_model = app.install_model
- if install_model != app.model:
- install_model.context.expunge_all()
-
- def get_toolbox(self):
- """Returns the application toolbox"""
- return self.app.toolbox
-
- @property
- def model( self ):
- return self.app.model
-
- @property
- def install_model( self ):
- return self.app.install_model
-
- def request_types(self):
- if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
- return True
- return False
-
-
-class ProvidesUserContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about users.
-
- Mixed in class must provide `user`, `api_inherit_admin`, and `app`
- properties.
- """
-
- @property
- def anonymous( self ):
- return self.user is None and not self.api_inherit_admin
-
- def get_current_user_roles( self ):
- user = self.user
- if user:
- roles = user.all_roles()
- else:
- roles = []
- return roles
-
- def user_is_admin( self ):
- if self.api_inherit_admin:
- return True
- return self.user and self.user.email in self.app.config.admin_users_list
-
- def user_can_do_run_as( self ):
- run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
- if not run_as_users:
- return False
- user_in_run_as_users = self.user and self.user.email in run_as_users
- # Can do if explicitly in list or master_api_key supplied.
- can_do_run_as = user_in_run_as_users or self.api_inherit_admin
- return can_do_run_as
-
- @property
- def user_ftp_dir( self ):
- identifier = self.app.config.ftp_upload_dir_identifier
- return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
-
-
-class ProvidesHistoryContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about histories.
-
- Mixed in class must provide `user`, `history`, and `app`
- properties.
- """
-
- def db_dataset_for( self, dbkey ):
- """
- Returns the db_file dataset associated/needed by `dataset`, or `None`.
- """
- # If no history, return None.
- if self.history is None:
- return None
-#TODO: when does this happen is it Bunch or util.bunch.Bunch
- if isinstance(self.history, util.bunch.Bunch):
- # The API presents a Bunch for a history. Until the API is
- # more fully featured for handling this, also return None.
- return None
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for ds in datasets:
- if dbkey == ds.dbkey:
- return ds
- return None
-
- @property
- def db_builds( self ):
- """
- Returns the builds defined by galaxy and the builds defined by
- the user (chromInfo in history).
- """
- # FIXME: This method should be removed
- return self.app.genome_builds.get_genome_build_names( trans=self )
-
-
-class GalaxyWebTransaction( base.DefaultWebTransaction, ProvidesAppContext, ProvidesUserContext, ProvidesHistoryContext ):
- """
- Encapsulates web transaction specific state for the Galaxy application
- (specifically the user's "cookie" session and history)
- """
-
- def __init__( self, environ, app, webapp, session_cookie=None):
- self.app = app
- self.webapp = webapp
- self.security = webapp.security
- base.DefaultWebTransaction.__init__( self, environ )
- self.setup_i18n()
- self.expunge_all()
- self.debug = asbool( self.app.config.get( 'debug', False ) )
- # Flag indicating whether we are in workflow building mode (means
- # that the current history should not be used for parameter values
- # and such).
- self.workflow_building_mode = False
- # Flag indicating whether this is an API call and the API key user is an administrator
- self.api_inherit_admin = False
- self.__user = None
- self.galaxy_session = None
- self.error_message = None
-
- if self.environ.get('is_api_request', False):
- # With API requests, if there's a key, use it and associate the
- # user with the transaction.
- # If not, check for an active session but do not create one.
- # If an error message is set here, it's sent back using
- # trans.show_error in the response -- in expose_api.
- self.error_message = self._authenticate_api( session_cookie )
- elif self.app.name == "reports":
- self.galaxy_session = None
- else:
- # This is a web request, get or create session.
- self._ensure_valid_session( session_cookie )
- if self.galaxy_session:
- # When we've authenticated by session, we have to check the
- # following.
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self._ensure_logged_in_user( environ, session_cookie )
-
- def setup_i18n( self ):
- locales = []
- if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
- # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
- client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
- for locale in client_locales:
- try:
- locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
- except Exception, e:
- log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
- if not locales:
- # Default to English
- locales = 'en'
- t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
- self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
-
- def get_user( self ):
- """Return the current user if logged in or None."""
- if self.galaxy_session:
- return self.galaxy_session.user
- else:
- return self.__user
-
- def set_user( self, user ):
- """Set the current user."""
- if self.galaxy_session:
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
- self.__user = user
-
- user = property( get_user, set_user )
-
- def get_cookie( self, name='galaxysession' ):
- """Convenience method for getting a session cookie"""
- try:
- # If we've changed the cookie during the request return the new value
- if name in self.response.cookies:
- return self.response.cookies[name].value
- else:
- return self.request.cookies[name].value
- except:
- return None
-
- def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
- """Convenience method for setting a session cookie"""
- # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
- # using a server secret key. Any other value is invalid and could pose security issues.
- self.response.cookies[name] = value
- self.response.cookies[name]['path'] = path
- self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
- tstamp = time.localtime( time.time() + 3600 * 24 * age )
- self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
- self.response.cookies[name]['version'] = version
- try:
- self.response.cookies[name]['httponly'] = True
- except CookieError, e:
- log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
-
- def _authenticate_api( self, session_cookie ):
- """
- Authenticate for the API via key or session (if available).
- """
- api_key = self.request.params.get('key', None)
- secure_id = self.get_cookie( name=session_cookie )
- api_key_supplied = self.environ.get('is_api_request', False) and api_key
- if api_key_supplied and self._check_master_api_key( api_key ):
- self.api_inherit_admin = True
- log.info( "Session authenticated using Galaxy master api key" )
- self.user = None
- self.galaxy_session = None
- elif api_key_supplied:
- # Sessionless API transaction, we just need to associate a user.
- try:
- provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
- except NoResultFound:
- return 'Provided API key is not valid.'
- if provided_key.user.deleted:
- return 'User account is deactivated, please contact an administrator.'
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- return 'Provided API key has expired.'
- self.set_user( provided_key.user )
- elif secure_id:
- # API authentication via active session
- # Associate user using existing session
- self._ensure_valid_session( session_cookie )
- else:
- # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
- self.user = None
- self.galaxy_session = None
-
- def _check_master_api_key( self, api_key ):
- master_api_key = getattr( self.app.config, 'master_api_key', None )
- if not master_api_key:
- return False
- # Hash keys to make them the same size, so we can do safe comparison.
- master_hash = hashlib.sha256( master_api_key ).hexdigest()
- provided_hash = hashlib.sha256( api_key ).hexdigest()
- return safe_str_cmp( master_hash, provided_hash )
-
- def _ensure_valid_session( self, session_cookie, create=True):
- """
- Ensure that a valid Galaxy session exists and is available as
- trans.session (part of initialization)
-
- Support for universe_session and universe_user cookies has been
- removed as of 31 Oct 2008.
- """
- # Try to load an existing session
- secure_id = self.get_cookie( name=session_cookie )
- galaxy_session = None
- prev_galaxy_session = None
- user_for_new_session = None
- invalidate_existing_session = False
- # Track whether the session has changed so we can avoid calling flush
- # in the most common case (session exists and is valid).
- galaxy_session_requires_flush = False
- if secure_id:
- # Decode the cookie value to get the session_key
- session_key = self.security.decode_guid( secure_id )
- try:
- # Make sure we have a valid UTF-8 string
- session_key = session_key.encode( 'utf8' )
- except UnicodeDecodeError:
- # We'll end up creating a new galaxy_session
- session_key = None
- if session_key:
- # Retrieve the galaxy_session id via the unique session_key
- galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
- .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
- # If remote user is in use it can invalidate the session and in some
- # cases won't have a cookie set above, so we need to to check some
- # things now.
- if self.app.config.use_remote_user:
- # If this is an api request, and they've passed a key, we let this go.
- assert self.app.config.remote_user_header in self.environ, \
- "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
- remote_user_email = self.environ[ self.app.config.remote_user_header ]
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- if galaxy_session:
- # An existing session, make sure correct association exists
- if galaxy_session.user is None:
- # No user, associate
- galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
- galaxy_session_requires_flush = True
- elif ((galaxy_session.user.email != remote_user_email) and
- ((not self.app.config.allow_user_impersonation) or
- (remote_user_email not in self.app.config.admin_users_list))):
- # Session exists but is not associated with the correct
- # remote user, and the currently set remote_user is not a
- # potentially impersonating admin.
- invalidate_existing_session = True
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, galaxy_session.user.email )
- else:
- # No session exists, get/create user for new session
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- else:
- if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
- # Remote user support is not enabled, but there is an existing
- # session with an external user, invalidate
- invalidate_existing_session = True
- log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
- galaxy_session.user.email )
- elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
- invalidate_existing_session = True
- log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
- # Do we need to invalidate the session for some reason?
- if invalidate_existing_session:
- prev_galaxy_session = galaxy_session
- prev_galaxy_session.is_valid = False
- galaxy_session = None
- # No relevant cookies, or couldn't find, or invalid, so create a new session
- if galaxy_session is None:
- galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
- galaxy_session_requires_flush = True
- self.galaxy_session = galaxy_session
- self.__update_session_cookie( name=session_cookie )
- else:
- self.galaxy_session = galaxy_session
- # Do we need to flush the session?
- if galaxy_session_requires_flush:
- self.sa_session.add( galaxy_session )
- # FIXME: If prev_session is a proper relation this would not
- # be needed.
- if prev_galaxy_session:
- self.sa_session.add( prev_galaxy_session )
- self.sa_session.flush()
- # If the old session was invalid, get a new history with our new session
- if invalidate_existing_session:
- self.new_history()
-
- def _ensure_logged_in_user( self, environ, session_cookie ):
- # The value of session_cookie can be one of
- # 'galaxysession' or 'galaxycommunitysession'
- # Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
- # TODO: re-engineer to eliminate the use of allowed_paths
- # as maintenance overhead is far too high.
- allowed_paths = (
- url_for( controller='root', action='index' ),
- url_for( controller='root', action='tool_menu' ),
- url_for( controller='root', action='masthead' ),
- url_for( controller='root', action='history' ),
- url_for( controller='user', action='api_keys' ),
- url_for( controller='user', action='create' ),
- url_for( controller='user', action='index' ),
- url_for( controller='user', action='login' ),
- url_for( controller='user', action='logout' ),
- url_for( controller='user', action='manage_user_info' ),
- url_for( controller='user', action='set_default_permissions' ),
- url_for( controller='user', action='reset_password' ),
- url_for( controller='user', action='openid_auth' ),
- url_for( controller='user', action='openid_process' ),
- url_for( controller='user', action='openid_associate' ),
- url_for( controller='library', action='browse' ),
- url_for( controller='history', action='list' ),
- url_for( controller='dataset', action='list' )
- )
- display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
- return
- external_display_path = url_for( controller='', action='display_application' )
- if self.request.path.startswith( external_display_path ):
- request_path_split = self.request.path.split( '/' )
- try:
- if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
- and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
- and request_path_split[-3] != 'None'):
- return
- except IndexError:
- pass
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
-
- def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
- """
- Create a new GalaxySession for this request, possibly with a connection
- to a previous session (in `prev_galaxy_session`) and an existing user
- (in `user_for_new_session`).
-
- Caller is responsible for flushing the returned session.
- """
- session_key = self.security.get_new_guid()
- galaxy_session = self.app.model.GalaxySession(
- session_key=session_key,
- is_valid=True,
- remote_host=self.request.remote_host,
- remote_addr=self.request.remote_addr,
- referer=self.request.headers.get( 'Referer', None ) )
- if prev_galaxy_session:
- # Invalidated an existing session for some reason, keep track
- galaxy_session.prev_session_id = prev_galaxy_session.id
- if user_for_new_session:
- # The new session should be associated with the user
- galaxy_session.user = user_for_new_session
- return galaxy_session
-
- def get_or_create_remote_user( self, remote_user_email ):
- """
- Create a remote user with the email remote_user_email and return it
- """
- if not self.app.config.use_remote_user:
- return None
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- user = self.sa_session.query( self.app.model.User
- ).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
- if user:
- # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
- # role and default user / history permissions were not set for remote users. When a
- # remote user authenticates, we'll look for this information, and if missing, create it.
- if not self.app.security_agent.get_private_user_role( user ):
- self.app.security_agent.create_private_user_role( user )
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- if not user.default_permissions:
- self.app.security_agent.user_set_default_permissions( user )
- self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
- elif user is None:
- username = remote_user_email.split( '@', 1 )[0].lower()
- random.seed()
- user = self.app.model.User( email=remote_user_email )
- user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
- user.external = True
- # Replace invalid characters in the username
- for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
- username = username.replace( char, '-' )
- # Find a unique username - user can change it later
- if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
- i = 1
- while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
- i += 1
- username += '-' + str(i)
- user.username = username
- self.sa_session.add( user )
- self.sa_session.flush()
- self.app.security_agent.create_private_user_role( user )
- # We set default user permissions, before we log in and set the default history permissions
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- self.app.security_agent.user_set_default_permissions( user )
- # self.log_event( "Automatically created account '%s'", user.email )
- return user
-
- def __update_session_cookie( self, name='galaxysession' ):
- """
- Update the session cookie to match the current session.
- """
- self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
- name=name, path=self.app.config.cookie_path )
-
- def handle_user_login( self, user ):
- """
- Login a new user (possibly newly created)
-
- - create a new session
- - associate new session with user
- - if old session had a history and it was not associated with a user, associate it with the new session,
- otherwise associate the current session's history with the user
- - add the disk usage of the current session to the user's total disk usage
- """
- # Set the previous session
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- # Define a new current_session
- self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
- if self.webapp.name == 'galaxy':
- cookie_name = 'galaxysession'
- # Associated the current user's last accessed history (if exists) with their new session
- history = None
- try:
- users_last_session = user.galaxy_sessions[0]
- last_accessed = True
- except:
- users_last_session = None
- last_accessed = False
- if (prev_galaxy_session.current_history and not
- prev_galaxy_session.current_history.deleted and
- prev_galaxy_session.current_history.datasets):
- if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
- # If the previous galaxy session had a history, associate it with the new
- # session, but only if it didn't belong to a different user.
- history = prev_galaxy_session.current_history
- if prev_galaxy_session.user is None:
- # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
- for hda in history.datasets:
- user.total_disk_usage += hda.quota_amount( user )
- elif self.galaxy_session.current_history:
- history = self.galaxy_session.current_history
- if (not history and users_last_session and
- users_last_session.current_history and not
- users_last_session.current_history.deleted):
- history = users_last_session.current_history
- elif not history:
- history = self.get_history( create=True )
- if history not in self.galaxy_session.histories:
- self.galaxy_session.add_history( history )
- if history.user is None:
- history.user = user
- self.galaxy_session.current_history = history
- if not last_accessed:
- # Only set default history permissions if current history is not from a previous session
- self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
- else:
- cookie_name = 'galaxycommunitysession'
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- self.sa_session.flush()
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name=cookie_name )
-
- def handle_user_logout( self, logout_all=False ):
- """
- Logout the current user:
- - invalidate the current session
- - create a new session with no user associated
- """
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- galaxy_user_id = prev_galaxy_session.user_id
- if logout_all and galaxy_user_id is not None:
- for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
- ).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True, #noqa
- self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
- other_galaxy_session.is_valid = False
- self.sa_session.add( other_galaxy_session )
- self.sa_session.flush()
- if self.webapp.name == 'galaxy':
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name='galaxysession' )
- elif self.webapp.name == 'tool_shed':
- self.__update_session_cookie( name='galaxycommunitysession' )
-
- def get_galaxy_session( self ):
- """
- Return the current galaxy session
- """
- return self.galaxy_session
-
- def get_history( self, create=False ):
- """
- Load the current history, creating a new one only if there is not
- current history and we're told to create.
- Transactions will not always have an active history (API requests), so
- None is a valid response.
- """
- history = None
- if self.galaxy_session:
- history = self.galaxy_session.current_history
- if not history and util.string_as_bool( create ):
- history = self.new_history()
- return history
-
- def set_history( self, history ):
- if history and not history.deleted:
- self.galaxy_session.current_history = history
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
-
- history = property( get_history, set_history )
-
- def get_or_create_default_history( self ):
- """
- Gets or creates a default history and associates it with the current
- session.
- """
-
- # There must be a user to fetch a default history.
- if not self.galaxy_session.user:
- return self.new_history()
-
- # Look for default history that (a) has default name + is not deleted and
- # (b) has no datasets. If suitable history found, use it; otherwise, create
- # new history.
- unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
- user=self.galaxy_session.user,
- name=self.app.model.History.default_name,
- deleted=False )
- default_history = None
- for history in unnamed_histories:
- if len( history.datasets ) == 0:
- # Found suitable default history.
- default_history = history
- break
-
- # Set or create hsitory.
- if default_history:
- history = default_history
- self.set_history( history )
- else:
- history = self.new_history()
-
- return history
-
- def new_history( self, name=None ):
- """
- Create a new history and associate it with the current session and
- its associated user (if set).
- """
- # Create new history
- history = self.app.model.History()
- if name:
- history.name = name
- # Associate with session
- history.add_galaxy_session( self.galaxy_session )
- # Make it the session's current history
- self.galaxy_session.current_history = history
- # Associate with user
- if self.galaxy_session.user:
- history.user = self.galaxy_session.user
- # Track genome_build with history
- history.genome_build = self.app.genome_builds.default_value
- # Set the user's default history permissions
- self.app.security_agent.history_set_default_permissions( history )
- # Save
- self.sa_session.add_all( ( self.galaxy_session, history ) )
- self.sa_session.flush()
- return history
-
- @base.lazy_property
- def template_context( self ):
- return dict()
-
- def make_form_data( self, name, **kwargs ):
- rval = self.template_context[name] = FormData()
- rval.values.update( kwargs )
- return rval
-
- def set_message( self, message, type=None ):
- """
- Convenience method for setting the 'message' and 'message_type'
- element of the template context.
- """
- self.template_context['message'] = message
- if type:
- self.template_context['status'] = type
-
- def get_message( self ):
- """
- Convenience method for getting the 'message' element of the template
- context.
- """
- return self.template_context['message']
-
- def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single message.
-
- `type`: one of "error", "warning", "info", or "done"; determines the
- type of dialog box and icon displayed with the message
-
- `refresh_frames`: names of frames in the interface that should be
- refreshed when the message is displayed
- """
- return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
-
- def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an error message. See `show_message`.
- """
- return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an ok message. See `show_message`.
- """
- return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an warn message. See `show_message`.
- """
- return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single HTML
- form.
- """
- return self.fill_template( template, form=form, header=header,
- use_panels=( form.use_panels or use_panels ),
- active_view=active_view )
-
- def fill_template(self, filename, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- # call get_user so we can invalidate sessions from external users,
- # if external auth has been disabled.
- self.get_user()
- if filename.endswith( ".mako" ):
- return self.fill_template_mako( filename, **kwargs )
- else:
- template = Template( file=os.path.join(self.app.config.template_path, filename),
- searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
- return str( template )
-
- def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
- template_lookup = template_lookup or self.webapp.mako_template_lookup
- template = template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
-
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
- request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
- return template.render( **data )
-
- def stream_template_mako( self, filename, **kwargs ):
- template = self.webapp.mako_template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
-
- def render( environ, start_response ):
- response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
-
- class StreamBuffer( object ):
- def write( self, d ):
- response_write( d.encode( 'utf-8' ) )
- buffer = StreamBuffer()
- context = mako.runtime.Context( buffer, **data )
- template.render_context( context )
- return []
- return render
-
- def fill_template_string(self, template_string, context=None, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- template = Template( source=template_string,
- searchList=[context or kwargs, dict(caller=self)] )
- return str(template)
-
-
diff -r 360d9d7839d74f01a2da5b35fb1be10464956112 -r a0c16c8930b76cfe98574b2e8b3311f7b2504361 lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -1,16 +1,40 @@
+"""
+"""
import inspect
import os
+import hashlib
+import random
+import socket
+import string
+import time
+from Cookie import CookieError
from galaxy import eggs
eggs.require( "Cheetah" )
from Cheetah.Template import Template
eggs.require( "Mako" )
+import mako.runtime
import mako.lookup
+# pytz is used by Babel.
+eggs.require( "pytz" )
+eggs.require( "Babel" )
+from babel.support import Translations
+from babel import Locale
+eggs.require( "SQLAlchemy >= 0.4" )
+from sqlalchemy import and_
+from sqlalchemy.orm.exc import NoResultFound
from galaxy.exceptions import MessageException
+
+from galaxy import util
+from galaxy.util import asbool
+from galaxy.util import safe_str_cmp
from galaxy.util.backports.importlib import import_module
from galaxy.util.sanitize_html import sanitize_html
+
+from galaxy.managers import context
from galaxy.web.framework import base
+from galaxy.web.framework import helpers
from galaxy.web.framework import transaction
from galaxy.web.framework import formbuilder
@@ -18,8 +42,27 @@
log = logging.getLogger( __name__ )
+UCSC_SERVERS = (
+ 'hgw1.cse.ucsc.edu',
+ 'hgw2.cse.ucsc.edu',
+ 'hgw3.cse.ucsc.edu',
+ 'hgw4.cse.ucsc.edu',
+ 'hgw5.cse.ucsc.edu',
+ 'hgw6.cse.ucsc.edu',
+ 'hgw7.cse.ucsc.edu',
+ 'hgw8.cse.ucsc.edu',
+)
+
+
class WebApplication( base.WebApplication ):
-
+ """
+ A web application that:
+ * adds API and UI controllers by scanning given directories and
+ importing all modules found there.
+ * has a security object.
+ * builds mako template lookups.
+ * generates GalaxyWebTransactions.
+ """
def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
self.name = name
base.WebApplication.__init__( self )
@@ -54,7 +97,7 @@
return base.WebApplication.make_body_iterable( self, trans, body )
def transaction_chooser( self, environ, galaxy_app, session_cookie ):
- return transaction.GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
+ return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
def add_ui_controllers( self, package_name, app ):
"""
@@ -113,3 +156,656 @@
really just used to stub out actual controllers for routes testing.
"""
return T( app )
+
+
+class GalaxyWebTransaction( base.DefaultWebTransaction,
+ context.ProvidesAppContext, context.ProvidesUserContext, context.ProvidesHistoryContext ):
+ """
+ Encapsulates web transaction specific state for the Galaxy application
+ (specifically the user's "cookie" session and history)
+ """
+
+ def __init__( self, environ, app, webapp, session_cookie=None):
+ self.app = app
+ self.webapp = webapp
+ self.security = webapp.security
+ base.DefaultWebTransaction.__init__( self, environ )
+ self.setup_i18n()
+ self.expunge_all()
+ self.debug = asbool( self.app.config.get( 'debug', False ) )
+ # Flag indicating whether we are in workflow building mode (means
+ # that the current history should not be used for parameter values
+ # and such).
+ self.workflow_building_mode = False
+ # Flag indicating whether this is an API call and the API key user is an administrator
+ self.api_inherit_admin = False
+ self.__user = None
+ self.galaxy_session = None
+ self.error_message = None
+
+ if self.environ.get('is_api_request', False):
+ # With API requests, if there's a key, use it and associate the
+ # user with the transaction.
+ # If not, check for an active session but do not create one.
+ # If an error message is set here, it's sent back using
+ # trans.show_error in the response -- in expose_api.
+ self.error_message = self._authenticate_api( session_cookie )
+ elif self.app.name == "reports":
+ self.galaxy_session = None
+ else:
+ # This is a web request, get or create session.
+ self._ensure_valid_session( session_cookie )
+ if self.galaxy_session:
+ # When we've authenticated by session, we have to check the
+ # following.
+ # Prevent deleted users from accessing Galaxy
+ if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
+ self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
+ if self.app.config.require_login:
+ self._ensure_logged_in_user( environ, session_cookie )
+
+ def setup_i18n( self ):
+ locales = []
+ if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
+ # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
+ client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
+ for locale in client_locales:
+ try:
+ locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
+ except Exception, e:
+ log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
+ if not locales:
+ # Default to English
+ locales = 'en'
+ t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
+ self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
+
+ def get_user( self ):
+ """Return the current user if logged in or None."""
+ if self.galaxy_session:
+ return self.galaxy_session.user
+ else:
+ return self.__user
+
+ def set_user( self, user ):
+ """Set the current user."""
+ if self.galaxy_session:
+ self.galaxy_session.user = user
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
+ self.__user = user
+
+ user = property( get_user, set_user )
+
+ def get_cookie( self, name='galaxysession' ):
+ """Convenience method for getting a session cookie"""
+ try:
+ # If we've changed the cookie during the request return the new value
+ if name in self.response.cookies:
+ return self.response.cookies[name].value
+ else:
+ return self.request.cookies[name].value
+ except:
+ return None
+
+ def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
+ """Convenience method for setting a session cookie"""
+ # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
+ # using a server secret key. Any other value is invalid and could pose security issues.
+ self.response.cookies[name] = value
+ self.response.cookies[name]['path'] = path
+ self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
+ tstamp = time.localtime( time.time() + 3600 * 24 * age )
+ self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
+ self.response.cookies[name]['version'] = version
+ try:
+ self.response.cookies[name]['httponly'] = True
+ except CookieError, e:
+ log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
+
+ def _authenticate_api( self, session_cookie ):
+ """
+ Authenticate for the API via key or session (if available).
+ """
+ api_key = self.request.params.get('key', None)
+ secure_id = self.get_cookie( name=session_cookie )
+ api_key_supplied = self.environ.get('is_api_request', False) and api_key
+ if api_key_supplied and self._check_master_api_key( api_key ):
+ self.api_inherit_admin = True
+ log.info( "Session authenticated using Galaxy master api key" )
+ self.user = None
+ self.galaxy_session = None
+ elif api_key_supplied:
+ # Sessionless API transaction, we just need to associate a user.
+ try:
+ provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
+ except NoResultFound:
+ return 'Provided API key is not valid.'
+ if provided_key.user.deleted:
+ return 'User account is deactivated, please contact an administrator.'
+ newest_key = provided_key.user.api_keys[0]
+ if newest_key.key != provided_key.key:
+ return 'Provided API key has expired.'
+ self.set_user( provided_key.user )
+ elif secure_id:
+ # API authentication via active session
+ # Associate user using existing session
+ self._ensure_valid_session( session_cookie )
+ else:
+ # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
+ self.user = None
+ self.galaxy_session = None
+
+ def _check_master_api_key( self, api_key ):
+ master_api_key = getattr( self.app.config, 'master_api_key', None )
+ if not master_api_key:
+ return False
+ # Hash keys to make them the same size, so we can do safe comparison.
+ master_hash = hashlib.sha256( master_api_key ).hexdigest()
+ provided_hash = hashlib.sha256( api_key ).hexdigest()
+ return safe_str_cmp( master_hash, provided_hash )
+
+ def _ensure_valid_session( self, session_cookie, create=True):
+ """
+ Ensure that a valid Galaxy session exists and is available as
+ trans.session (part of initialization)
+
+ Support for universe_session and universe_user cookies has been
+ removed as of 31 Oct 2008.
+ """
+ # Try to load an existing session
+ secure_id = self.get_cookie( name=session_cookie )
+ galaxy_session = None
+ prev_galaxy_session = None
+ user_for_new_session = None
+ invalidate_existing_session = False
+ # Track whether the session has changed so we can avoid calling flush
+ # in the most common case (session exists and is valid).
+ galaxy_session_requires_flush = False
+ if secure_id:
+ # Decode the cookie value to get the session_key
+ session_key = self.security.decode_guid( secure_id )
+ try:
+ # Make sure we have a valid UTF-8 string
+ session_key = session_key.encode( 'utf8' )
+ except UnicodeDecodeError:
+ # We'll end up creating a new galaxy_session
+ session_key = None
+ if session_key:
+ # Retrieve the galaxy_session id via the unique session_key
+ galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
+ .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
+ self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
+ # If remote user is in use it can invalidate the session and in some
+ # cases won't have a cookie set above, so we need to to check some
+ # things now.
+ if self.app.config.use_remote_user:
+ # If this is an api request, and they've passed a key, we let this go.
+ assert self.app.config.remote_user_header in self.environ, \
+ "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
+ remote_user_email = self.environ[ self.app.config.remote_user_header ]
+ if getattr( self.app.config, "normalize_remote_user_email", False ):
+ remote_user_email = remote_user_email.lower()
+ if galaxy_session:
+ # An existing session, make sure correct association exists
+ if galaxy_session.user is None:
+ # No user, associate
+ galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
+ galaxy_session_requires_flush = True
+ elif ((galaxy_session.user.email != remote_user_email) and
+ ((not self.app.config.allow_user_impersonation) or
+ (remote_user_email not in self.app.config.admin_users_list))):
+ # Session exists but is not associated with the correct
+ # remote user, and the currently set remote_user is not a
+ # potentially impersonating admin.
+ invalidate_existing_session = True
+ user_for_new_session = self.get_or_create_remote_user( remote_user_email )
+ log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
+ remote_user_email, galaxy_session.user.email )
+ else:
+ # No session exists, get/create user for new session
+ user_for_new_session = self.get_or_create_remote_user( remote_user_email )
+ else:
+ if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
+ # Remote user support is not enabled, but there is an existing
+ # session with an external user, invalidate
+ invalidate_existing_session = True
+ log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
+ galaxy_session.user.email )
+ elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
+ invalidate_existing_session = True
+ log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
+ # Do we need to invalidate the session for some reason?
+ if invalidate_existing_session:
+ prev_galaxy_session = galaxy_session
+ prev_galaxy_session.is_valid = False
+ galaxy_session = None
+ # No relevant cookies, or couldn't find, or invalid, so create a new session
+ if galaxy_session is None:
+ galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
+ galaxy_session_requires_flush = True
+ self.galaxy_session = galaxy_session
+ self.__update_session_cookie( name=session_cookie )
+ else:
+ self.galaxy_session = galaxy_session
+ # Do we need to flush the session?
+ if galaxy_session_requires_flush:
+ self.sa_session.add( galaxy_session )
+ # FIXME: If prev_session is a proper relation this would not
+ # be needed.
+ if prev_galaxy_session:
+ self.sa_session.add( prev_galaxy_session )
+ self.sa_session.flush()
+ # If the old session was invalid, get a new history with our new session
+ if invalidate_existing_session:
+ self.new_history()
+
+ def _ensure_logged_in_user( self, environ, session_cookie ):
+ # The value of session_cookie can be one of
+ # 'galaxysession' or 'galaxycommunitysession'
+ # Currently this method does nothing unless session_cookie is 'galaxysession'
+ if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
+ # TODO: re-engineer to eliminate the use of allowed_paths
+ # as maintenance overhead is far too high.
+ allowed_paths = (
+ url_for( controller='root', action='index' ),
+ url_for( controller='root', action='tool_menu' ),
+ url_for( controller='root', action='masthead' ),
+ url_for( controller='root', action='history' ),
+ url_for( controller='user', action='api_keys' ),
+ url_for( controller='user', action='create' ),
+ url_for( controller='user', action='index' ),
+ url_for( controller='user', action='login' ),
+ url_for( controller='user', action='logout' ),
+ url_for( controller='user', action='manage_user_info' ),
+ url_for( controller='user', action='set_default_permissions' ),
+ url_for( controller='user', action='reset_password' ),
+ url_for( controller='user', action='openid_auth' ),
+ url_for( controller='user', action='openid_process' ),
+ url_for( controller='user', action='openid_associate' ),
+ url_for( controller='library', action='browse' ),
+ url_for( controller='history', action='list' ),
+ url_for( controller='dataset', action='list' )
+ )
+ display_as = url_for( controller='root', action='display_as' )
+ if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ try:
+ host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
+ except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
+ host = None
+ if host in UCSC_SERVERS:
+ return
+ external_display_path = url_for( controller='', action='display_application' )
+ if self.request.path.startswith( external_display_path ):
+ request_path_split = self.request.path.split( '/' )
+ try:
+ if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
+ and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
+ and request_path_split[-3] != 'None'):
+ return
+ except IndexError:
+ pass
+ if self.request.path not in allowed_paths:
+ self.response.send_redirect( url_for( controller='root', action='index' ) )
+
+ def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
+ """
+ Create a new GalaxySession for this request, possibly with a connection
+ to a previous session (in `prev_galaxy_session`) and an existing user
+ (in `user_for_new_session`).
+
+ Caller is responsible for flushing the returned session.
+ """
+ session_key = self.security.get_new_guid()
+ galaxy_session = self.app.model.GalaxySession(
+ session_key=session_key,
+ is_valid=True,
+ remote_host=self.request.remote_host,
+ remote_addr=self.request.remote_addr,
+ referer=self.request.headers.get( 'Referer', None ) )
+ if prev_galaxy_session:
+ # Invalidated an existing session for some reason, keep track
+ galaxy_session.prev_session_id = prev_galaxy_session.id
+ if user_for_new_session:
+ # The new session should be associated with the user
+ galaxy_session.user = user_for_new_session
+ return galaxy_session
+
+ def get_or_create_remote_user( self, remote_user_email ):
+ """
+ Create a remote user with the email remote_user_email and return it
+ """
+ if not self.app.config.use_remote_user:
+ return None
+ if getattr( self.app.config, "normalize_remote_user_email", False ):
+ remote_user_email = remote_user_email.lower()
+ user = self.sa_session.query( self.app.model.User
+ ).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
+ if user:
+ # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
+ # role and default user / history permissions were not set for remote users. When a
+ # remote user authenticates, we'll look for this information, and if missing, create it.
+ if not self.app.security_agent.get_private_user_role( user ):
+ self.app.security_agent.create_private_user_role( user )
+ if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
+ if not user.default_permissions:
+ self.app.security_agent.user_set_default_permissions( user )
+ self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
+ elif user is None:
+ username = remote_user_email.split( '@', 1 )[0].lower()
+ random.seed()
+ user = self.app.model.User( email=remote_user_email )
+ user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
+ user.external = True
+ # Replace invalid characters in the username
+ for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
+ username = username.replace( char, '-' )
+ # Find a unique username - user can change it later
+ if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
+ i = 1
+ while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
+ i += 1
+ username += '-' + str(i)
+ user.username = username
+ self.sa_session.add( user )
+ self.sa_session.flush()
+ self.app.security_agent.create_private_user_role( user )
+ # We set default user permissions, before we log in and set the default history permissions
+ if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
+ self.app.security_agent.user_set_default_permissions( user )
+ # self.log_event( "Automatically created account '%s'", user.email )
+ return user
+
+ def __update_session_cookie( self, name='galaxysession' ):
+ """
+ Update the session cookie to match the current session.
+ """
+ self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
+ name=name, path=self.app.config.cookie_path )
+
+ def handle_user_login( self, user ):
+ """
+ Login a new user (possibly newly created)
+
+ - create a new session
+ - associate new session with user
+ - if old session had a history and it was not associated with a user, associate it with the new session,
+ otherwise associate the current session's history with the user
+ - add the disk usage of the current session to the user's total disk usage
+ """
+ # Set the previous session
+ prev_galaxy_session = self.galaxy_session
+ prev_galaxy_session.is_valid = False
+ # Define a new current_session
+ self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
+ if self.webapp.name == 'galaxy':
+ cookie_name = 'galaxysession'
+ # Associated the current user's last accessed history (if exists) with their new session
+ history = None
+ try:
+ users_last_session = user.galaxy_sessions[0]
+ last_accessed = True
+ except:
+ users_last_session = None
+ last_accessed = False
+ if (prev_galaxy_session.current_history and not
+ prev_galaxy_session.current_history.deleted and
+ prev_galaxy_session.current_history.datasets):
+ if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
+ # If the previous galaxy session had a history, associate it with the new
+ # session, but only if it didn't belong to a different user.
+ history = prev_galaxy_session.current_history
+ if prev_galaxy_session.user is None:
+ # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
+ for hda in history.datasets:
+ user.total_disk_usage += hda.quota_amount( user )
+ elif self.galaxy_session.current_history:
+ history = self.galaxy_session.current_history
+ if (not history and users_last_session and
+ users_last_session.current_history and not
+ users_last_session.current_history.deleted):
+ history = users_last_session.current_history
+ elif not history:
+ history = self.get_history( create=True )
+ if history not in self.galaxy_session.histories:
+ self.galaxy_session.add_history( history )
+ if history.user is None:
+ history.user = user
+ self.galaxy_session.current_history = history
+ if not last_accessed:
+ # Only set default history permissions if current history is not from a previous session
+ self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
+ else:
+ cookie_name = 'galaxycommunitysession'
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+ self.sa_session.flush()
+ # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
+ self.__update_session_cookie( name=cookie_name )
+
+ def handle_user_logout( self, logout_all=False ):
+ """
+ Logout the current user:
+ - invalidate the current session
+ - create a new session with no user associated
+ """
+ prev_galaxy_session = self.galaxy_session
+ prev_galaxy_session.is_valid = False
+ self.galaxy_session = self.__create_new_session( prev_galaxy_session )
+ self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
+ galaxy_user_id = prev_galaxy_session.user_id
+ if logout_all and galaxy_user_id is not None:
+ for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
+ ).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
+ self.app.model.GalaxySession.table.c.is_valid==True, #noqa
+ self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
+ other_galaxy_session.is_valid = False
+ self.sa_session.add( other_galaxy_session )
+ self.sa_session.flush()
+ if self.webapp.name == 'galaxy':
+ # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
+ self.__update_session_cookie( name='galaxysession' )
+ elif self.webapp.name == 'tool_shed':
+ self.__update_session_cookie( name='galaxycommunitysession' )
+
+ def get_galaxy_session( self ):
+ """
+ Return the current galaxy session
+ """
+ return self.galaxy_session
+
+ def get_history( self, create=False ):
+ """
+ Load the current history, creating a new one only if there is not
+ current history and we're told to create.
+ Transactions will not always have an active history (API requests), so
+ None is a valid response.
+ """
+ history = None
+ if self.galaxy_session:
+ history = self.galaxy_session.current_history
+ if not history and util.string_as_bool( create ):
+ history = self.new_history()
+ return history
+
+ def set_history( self, history ):
+ if history and not history.deleted:
+ self.galaxy_session.current_history = history
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
+
+ history = property( get_history, set_history )
+
+ def get_or_create_default_history( self ):
+ """
+ Gets or creates a default history and associates it with the current
+ session.
+ """
+
+ # There must be a user to fetch a default history.
+ if not self.galaxy_session.user:
+ return self.new_history()
+
+ # Look for default history that (a) has default name + is not deleted and
+ # (b) has no datasets. If suitable history found, use it; otherwise, create
+ # new history.
+ unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
+ user=self.galaxy_session.user,
+ name=self.app.model.History.default_name,
+ deleted=False )
+ default_history = None
+ for history in unnamed_histories:
+ if len( history.datasets ) == 0:
+ # Found suitable default history.
+ default_history = history
+ break
+
+ # Set or create hsitory.
+ if default_history:
+ history = default_history
+ self.set_history( history )
+ else:
+ history = self.new_history()
+
+ return history
+
+ def new_history( self, name=None ):
+ """
+ Create a new history and associate it with the current session and
+ its associated user (if set).
+ """
+ # Create new history
+ history = self.app.model.History()
+ if name:
+ history.name = name
+ # Associate with session
+ history.add_galaxy_session( self.galaxy_session )
+ # Make it the session's current history
+ self.galaxy_session.current_history = history
+ # Associate with user
+ if self.galaxy_session.user:
+ history.user = self.galaxy_session.user
+ # Track genome_build with history
+ history.genome_build = self.app.genome_builds.default_value
+ # Set the user's default history permissions
+ self.app.security_agent.history_set_default_permissions( history )
+ # Save
+ self.sa_session.add_all( ( self.galaxy_session, history ) )
+ self.sa_session.flush()
+ return history
+
+ @base.lazy_property
+ def template_context( self ):
+ return dict()
+
+ def make_form_data( self, name, **kwargs ):
+ rval = self.template_context[name] = FormData()
+ rval.values.update( kwargs )
+ return rval
+
+ def set_message( self, message, type=None ):
+ """
+ Convenience method for setting the 'message' and 'message_type'
+ element of the template context.
+ """
+ self.template_context['message'] = message
+ if type:
+ self.template_context['status'] = type
+
+ def get_message( self ):
+ """
+ Convenience method for getting the 'message' element of the template
+ context.
+ """
+ return self.template_context['message']
+
+ def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
+ """
+ Convenience method for displaying a simple page with a single message.
+
+ `type`: one of "error", "warning", "info", or "done"; determines the
+ type of dialog box and icon displayed with the message
+
+ `refresh_frames`: names of frames in the interface that should be
+ refreshed when the message is displayed
+ """
+ return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
+
+ def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+ """
+ Convenience method for displaying an error message. See `show_message`.
+ """
+ return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+ def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+ """
+ Convenience method for displaying an ok message. See `show_message`.
+ """
+ return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+ def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
+ """
+ Convenience method for displaying an warn message. See `show_message`.
+ """
+ return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
+
+ def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
+ """
+ Convenience method for displaying a simple page with a single HTML
+ form.
+ """
+ return self.fill_template( template, form=form, header=header,
+ use_panels=( form.use_panels or use_panels ),
+ active_view=active_view )
+
+ def fill_template(self, filename, **kwargs):
+ """
+ Fill in a template, putting any keyword arguments on the context.
+ """
+ # call get_user so we can invalidate sessions from external users,
+ # if external auth has been disabled.
+ self.get_user()
+ if filename.endswith( ".mako" ):
+ return self.fill_template_mako( filename, **kwargs )
+ else:
+ template = Template( file=os.path.join(self.app.config.template_path, filename),
+ searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
+ return str( template )
+
+ def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
+ template_lookup = template_lookup or self.webapp.mako_template_lookup
+ template = template_lookup.get_template( filename )
+ template.output_encoding = 'utf-8'
+
+ data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
+ request=self.request, response=self.response, app=self.app )
+ data.update( self.template_context )
+ data.update( kwargs )
+ return template.render( **data )
+
+ def stream_template_mako( self, filename, **kwargs ):
+ template = self.webapp.mako_template_lookup.get_template( filename )
+ template.output_encoding = 'utf-8'
+ data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
+ data.update( self.template_context )
+ data.update( kwargs )
+
+ def render( environ, start_response ):
+ response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
+
+ class StreamBuffer( object ):
+ def write( self, d ):
+ response_write( d.encode( 'utf-8' ) )
+ buffer = StreamBuffer()
+ context = mako.runtime.Context( buffer, **data )
+ template.render_context( context )
+ return []
+ return render
+
+ def fill_template_string(self, template_string, context=None, **kwargs):
+ """
+ Fill in a template, putting any keyword arguments on the context.
+ """
+ template = Template( source=template_string,
+ searchList=[context or kwargs, dict(caller=self)] )
+ return str(template)
https://bitbucket.org/galaxy/galaxy-central/commits/d66fefbc6bd6/
Changeset: d66fefbc6bd6
User: carlfeberhard
Date: 2014-09-03 17:22:29
Summary: merge central
Affected #: 1 file
diff -r a0c16c8930b76cfe98574b2e8b3311f7b2504361 -r d66fefbc6bd67201f8833e7c5d5a881d4830951f lib/galaxy/managers/folders.py
--- a/lib/galaxy/managers/folders.py
+++ b/lib/galaxy/managers/folders.py
@@ -83,7 +83,7 @@
Check whether the folder is accessible to current user.
By default every folder is accessible (contents have their own permissions).
"""
- return True
+ return folder
def get_folder_dict( self, trans, folder ):
"""
@@ -156,6 +156,19 @@
add_library_item_role_list = [ ( add_role.name, trans.security.encode_id( add_role.id ) ) for add_role in add_roles ]
return dict( modify_folder_role_list=modify_folder_role_list, manage_folder_role_list=manage_folder_role_list, add_library_item_role_list=add_library_item_role_list )
+ def can_add_item( self, trans, folder ):
+ """
+ Return true if the user has permissions to add item to the given folder.
+ """
+ if trans.user_is_admin:
+ return True
+ current_user_roles = trans.get_current_user_roles()
+ add_roles = set( trans.app.security_agent.get_roles_for_action( folder, trans.app.security_agent.permitted_actions.LIBRARY_ADD ) )
+ for role in current_user_roles:
+ if role in add_roles:
+ return True
+ return False
+
def cut_the_prefix( self, encoded_folder_id ):
"""
Remove the prefix from the encoded folder id.
https://bitbucket.org/galaxy/galaxy-central/commits/5bc83c22c468/
Changeset: 5bc83c22c468
User: carlfeberhard
Date: 2014-09-03 17:44:25
Summary: Add managers/context.py, remove import of transaction
Affected #: 1 file
diff -r d66fefbc6bd67201f8833e7c5d5a881d4830951f -r 5bc83c22c4685bae000f6ff7bd2b43a695138022 lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -35,7 +35,6 @@
from galaxy.managers import context
from galaxy.web.framework import base
from galaxy.web.framework import helpers
-from galaxy.web.framework import transaction
from galaxy.web.framework import formbuilder
import logging
https://bitbucket.org/galaxy/galaxy-central/commits/efd4b01d8ae6/
Changeset: efd4b01d8ae6
User: carlfeberhard
Date: 2014-09-03 17:46:38
Summary: Fix to 5bc83c2
Affected #: 1 file
diff -r 5bc83c22c4685bae000f6ff7bd2b43a695138022 -r efd4b01d8ae64f6a7d25011295a919fea3324f1d lib/galaxy/managers/context.py
--- /dev/null
+++ b/lib/galaxy/managers/context.py
@@ -0,0 +1,179 @@
+"""
+Mixins for transaction-like objects.
+"""
+
+import os
+
+from galaxy.util.json import to_json_string
+from galaxy.util import bunch
+
+class ProvidesAppContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ database and event handling.
+
+ Mixed in class must provide `app` property.
+ """
+
+ def log_action( self, user=None, action=None, context=None, params=None):
+ """
+ Application-level logging of user actions.
+ """
+ if self.app.config.log_actions:
+ action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
+ try:
+ if user:
+ action.user = user
+ else:
+ action.user = self.user
+ except:
+ action.user = None
+ try:
+ action.session_id = self.galaxy_session.id
+ except:
+ action.session_id = None
+ self.sa_session.add( action )
+ self.sa_session.flush()
+
+ def log_event( self, message, tool_id=None, **kwargs ):
+ """
+ Application level logging. Still needs fleshing out (log levels and such)
+ Logging events is a config setting - if False, do not log.
+ """
+ if self.app.config.log_events:
+ event = self.app.model.Event()
+ event.tool_id = tool_id
+ try:
+ event.message = message % kwargs
+ except:
+ event.message = message
+ try:
+ event.history = self.get_history()
+ except:
+ event.history = None
+ try:
+ event.history_id = self.history.id
+ except:
+ event.history_id = None
+ try:
+ event.user = self.user
+ except:
+ event.user = None
+ try:
+ event.session_id = self.galaxy_session.id
+ except:
+ event.session_id = None
+ self.sa_session.add( event )
+ self.sa_session.flush()
+
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session -- currently just gets the current
+ session from the threadlocal session context, but this is provided
+ to allow migration toward a more SQLAlchemy 0.4 style of use.
+ """
+ return self.app.model.context.current
+
+ def expunge_all( self ):
+ app = self.app
+ context = app.model.context
+ context.expunge_all()
+ # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
+ if hasattr(app, 'install_model'):
+ install_model = app.install_model
+ if install_model != app.model:
+ install_model.context.expunge_all()
+
+ def get_toolbox(self):
+ """Returns the application toolbox"""
+ return self.app.toolbox
+
+ @property
+ def model( self ):
+ return self.app.model
+
+ @property
+ def install_model( self ):
+ return self.app.install_model
+
+ def request_types(self):
+ if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
+ return True
+ return False
+
+
+class ProvidesUserContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about users.
+
+ Mixed in class must provide `user`, `api_inherit_admin`, and `app`
+ properties.
+ """
+
+ @property
+ def anonymous( self ):
+ return self.user is None and not self.api_inherit_admin
+
+ def get_current_user_roles( self ):
+ user = self.user
+ if user:
+ roles = user.all_roles()
+ else:
+ roles = []
+ return roles
+
+ def user_is_admin( self ):
+ if self.api_inherit_admin:
+ return True
+ return self.user and self.user.email in self.app.config.admin_users_list
+
+ def user_can_do_run_as( self ):
+ run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
+ if not run_as_users:
+ return False
+ user_in_run_as_users = self.user and self.user.email in run_as_users
+ # Can do if explicitly in list or master_api_key supplied.
+ can_do_run_as = user_in_run_as_users or self.api_inherit_admin
+ return can_do_run_as
+
+ @property
+ def user_ftp_dir( self ):
+ identifier = self.app.config.ftp_upload_dir_identifier
+ return os.path.join( self.app.config.ftp_upload_dir, getattr( self.user, identifier ) )
+
+
+class ProvidesHistoryContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about histories.
+
+ Mixed in class must provide `user`, `history`, and `app`
+ properties.
+ """
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ # If no history, return None.
+ if self.history is None:
+ return None
+#TODO: when does this happen? is it Bunch or util.bunch.Bunch?
+ if isinstance( self.history, bunch.Bunch ):
+ # The API presents a Bunch for a history. Until the API is
+ # more fully featured for handling this, also return None.
+ return None
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ # FIXME: This method should be removed
+ return self.app.genome_builds.get_genome_build_names( trans=self )
https://bitbucket.org/galaxy/galaxy-central/commits/35a335db606f/
Changeset: 35a335db606f
User: carlfeberhard
Date: 2014-09-03 21:18:55
Summary: Fix bunch import; fix unit test imports
Affected #: 3 files
diff -r efd4b01d8ae64f6a7d25011295a919fea3324f1d -r 35a335db606f3f53e5028ef5463ba78fe6f72000 lib/galaxy/webapps/demo_sequencer/framework/__init__.py
--- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
+++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py
@@ -32,7 +32,7 @@
from galaxy.web.framework.formbuilder import FormData
import galaxy.web.framework.base
-from galaxy.web.util.bunch import Bunch
+from galaxy.util.bunch import Bunch
from galaxy.exceptions import MessageException
from galaxy.util import asbool
diff -r efd4b01d8ae64f6a7d25011295a919fea3324f1d -r 35a335db606f3f53e5028ef5463ba78fe6f72000 test/unit/test_galaxy_transactions.py
--- a/test/unit/test_galaxy_transactions.py
+++ b/test/unit/test_galaxy_transactions.py
@@ -1,10 +1,10 @@
from galaxy import model
from galaxy.model import mapping
from galaxy.util import bunch
-from galaxy.web import framework
+from galaxy.managers import context
-class TestTransaction( framework.ProvidesAppContext ):
+class TestTransaction( context.ProvidesAppContext ):
def __init__( self ):
self.app = TestApp()
diff -r efd4b01d8ae64f6a7d25011295a919fea3324f1d -r 35a335db606f3f53e5028ef5463ba78fe6f72000 test/unit/test_routes.py
--- a/test/unit/test_routes.py
+++ b/test/unit/test_routes.py
@@ -1,7 +1,7 @@
#import routes
# Unused but adds some stuff to routes.Mapper.
-from galaxy.web.framework import WebApplication
+from galaxy.web.framework.webapp import WebApplication
from galaxy.webapps.galaxy import buildapp as galaxy_buildapp
from galaxy.web import url_for
https://bitbucket.org/galaxy/galaxy-central/commits/19b109c4caa6/
Changeset: 19b109c4caa6
User: carlfeberhard
Date: 2014-09-03 21:19:09
Summary: merge central
Affected #: 1 file
diff -r 35a335db606f3f53e5028ef5463ba78fe6f72000 -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 test/functional/tools/multi_output_configured.xml
--- a/test/functional/tools/multi_output_configured.xml
+++ b/test/functional/tools/multi_output_configured.xml
@@ -10,6 +10,8 @@
echo "3" > subdir2/CUSTOM_3.txt;
mkdir subdir3;
echo "Foo" > subdir3/Foo;
+ echo "mapped reads" > split_bam_.MAPPED.bam;
+ echo "unmapped reads" > split_bam_.UNMAPPED.bam;
</command><inputs><param name="num_param" type="integer" value="7" />
@@ -17,9 +19,10 @@
</inputs><outputs><data format="txt" name="report">
- <discover_datasets pattern="__designation_and_ext__" directory="subdir1" />
- <discover_datasets pattern="CUSTOM_(?P<designation>.+)\.(?P<ext>.+)" directory="subdir2" />
- <discover_datasets pattern="__designation__" directory="subdir3" ext="input" />
+ <discover_datasets pattern="__designation_and_ext__" directory="subdir1" visible="true" />
+ <discover_datasets pattern="CUSTOM_(?P<designation>.+)\.(?P<ext>.+)" directory="subdir2" visible="true" />
+ <discover_datasets pattern="__designation__" directory="subdir3" ext="input" visible="true" />
+ <discover_datasets pattern="split_bam_\.(?P<designation>([A-Z-])\w+)\.bam" ext="txt" visible="true" /></data></outputs><tests>
@@ -45,6 +48,12 @@
<discovered_dataset designation="Foo" ftype="txt"><assert_contents><has_line line="Foo" /></assert_contents></discovered_dataset>
+ <discovered_dataset designation="MAPPED" ftype="txt">
+ <assert_contents><has_line line="mapped reads" /></assert_contents>
+ </discovered_dataset>
+ <discovered_dataset designation="UNMAPPED" ftype="txt">
+ <assert_contents><has_line line="unmapped reads" /></assert_contents>
+ </discovered_dataset></output></test></tests>
https://bitbucket.org/galaxy/galaxy-central/commits/520eb21f9a1e/
Changeset: 520eb21f9a1e
User: carlfeberhard
Date: 2014-09-04 16:09:13
Summary: merge central
Affected #: 16 files
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -18,4 +18,4 @@
81fbe25bd02edcd53065e8e4476dd1dfb5a72cf2 latest_2013.11.04
2a756ca2cb1826db7796018e77d12e2dd7b67603 latest_2014.02.10
ca45b78adb4152fc6e7395514d46eba6b7d0b838 release_2014.08.11
-ea12550fbc34260ae70bde38db59a4024f35f988 latest_2014.08.11
+20f4fdf1735aeff23a6e7ab00389781fe3f3142c latest_2014.08.11
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -248,6 +248,10 @@
def get_initial_value( self, trans, context, history=None ):
return self.value
+ def to_dict( self, trans, view='collection', value_mapper=None ):
+ d = super(TextToolParameter, self).to_dict(trans)
+ d['area'] = self.area
+ return d
class IntegerToolParameter( TextToolParameter ):
"""
@@ -459,6 +463,13 @@
else:
return self.falsevalue
+ def to_dict( self, trans, view='collection', value_mapper=None ):
+ d = super(BooleanToolParameter, self).to_dict(trans)
+ d['value'] = self.checked
+ d['truevalue'] = self.truevalue
+ d['falsevalue'] = self.falsevalue
+ return d
+
@property
def legal_values( self ):
return [ self.truevalue, self.falsevalue ]
@@ -992,7 +1003,8 @@
value = option[1]
d[ 'value' ] = value
- d[ 'display' ] = self.display
+ d['display'] = self.display
+ d['multiple'] = self.multiple
return d
@@ -1251,7 +1263,10 @@
d = super( ColumnListParameter, self ).to_dict( trans )
# add data reference
- d[ 'data_ref' ] = self.data_ref
+ d['data_ref'] = self.data_ref
+
+ # add numerical flag
+ d['numerical'] = self.numerical
# return
return d
@@ -2008,6 +2023,11 @@
ref = ref()
return ref
+ def to_dict( self, trans, view='collection', value_mapper=None ):
+ d = super( DataToolParameter, self ).to_dict( trans )
+ d['extensions'] = self.extensions
+ d['multiple'] = self.multiple
+ return d
class DataCollectionToolParameter( BaseDataToolParameter ):
"""
@@ -2110,6 +2130,8 @@
elif isinstance( value, basestring ):
if value.startswith( "dce:" ):
rval = trans.sa_session.query( trans.app.model.DatasetCollectionElement ).get( value[ len( "dce:"): ] )
+ elif value.startswith( "hdca:" ):
+ rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value[ len( "hdca:"): ] )
else:
rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value )
if rval and isinstance( rval, trans.app.model.HistoryDatasetCollectionAssociation ):
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/tools/tools-form.js
--- a/static/scripts/mvc/tools/tools-form.js
+++ b/static/scripts/mvc/tools/tools-form.js
@@ -38,9 +38,6 @@
// reset sequential input definition list
this.input_list = {};
- // create data model
- this.data = new Backbone.Model();
-
// initialize datasets
this.datasets = new ToolDatasets({
success: function() {
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/tools/tools-section.js
--- a/static/scripts/mvc/tools/tools-section.js
+++ b/static/scripts/mvc/tools/tools-section.js
@@ -11,12 +11,6 @@
// link inputs
this.inputs = options.inputs;
- // link datasets
- this.datasets = app.datasets;
-
- // link data model
- this.data = app.data;
-
// add table class for tr tag
// this assist in transforming the form into a json structure
options.cls_tr = 'section-row';
@@ -38,12 +32,12 @@
// load settings elements into table
for (var i in this.inputs) {
- this._add(this.inputs[i], this.data);
+ this._add(this.inputs[i]);
}
},
// add table row
- _add: function(input, data) {
+ _add: function(input) {
// link this
var self = this;
@@ -61,7 +55,7 @@
switch(type) {
// conditional field
case 'conditional':
- this._addConditional(input_def, data);
+ this._addConditional(input_def);
break;
// repeat block
case 'repeat':
@@ -69,12 +63,12 @@
break;
// default single element row
default:
- this._addRow(type, input_def, data);
+ this._addRow(type, input_def);
}
},
// add conditional block
- _addConditional: function(input_def, data) {
+ _addConditional: function(input_def) {
// add label to input definition root
input_def.label = input_def.test_param.label;
@@ -82,7 +76,7 @@
input_def.value = input_def.test_param.value;
// build options field
- this._addRow('conditional', input_def, data);
+ this._addRow('conditional', input_def);
// add fields
for (var i in input_def.cases) {
@@ -182,7 +176,7 @@
},
// add table row
- _addRow: function(field_type, input_def, data) {
+ _addRow: function(field_type, input_def) {
// get id
var id = input_def.id;
@@ -193,65 +187,59 @@
switch(field_type) {
// text input field
case 'text' :
- field = this._field_text(input_def, data);
+ field = this._field_text(input_def);
break;
// select field
case 'select' :
- field = this._field_select(input_def, data);
+ field = this._field_select(input_def);
break;
- // radiobox field
- case 'radiobutton' :
- field = this._field_radio(input_def, data);
- break;
-
// dataset
case 'data':
- field = this._field_data(input_def, data);
+ field = this._field_data(input_def);
break;
// dataset column
case 'data_column':
- field = this._field_column(input_def, data);
- break;
-
- // text area field
- case 'textarea' :
- field = this._field_textarea(input_def, data);
+ field = this._field_column(input_def);
break;
// conditional select field
case 'conditional':
- field = this._field_conditional(input_def, data);
+ field = this._field_conditional(input_def);
break;
// hidden field
case 'hidden':
- field = this._field_hidden(input_def, data);
+ field = this._field_hidden(input_def);
break;
// integer field
case 'integer':
- field = this._field_integer(input_def, data);
+ field = this._field_slider(input_def);
break;
+ // float field
+ case 'float':
+ field = this._field_slider(input_def);
+ break;
+
// boolean field
case 'boolean':
- field = this._field_radiobutton(input_def, data);
+ field = this._field_boolean(input_def);
break;
// default
default:
- field = this._field_text(input_def, data);
+ field = this._field_text(input_def);
console.debug('tools-form::_addRow() : Unmatched field type (' + field_type + ').');
}
- // set value
- if (!data.get(id)) {
- data.set(id, input_def.value);
+ // set field value
+ if (input_def.value !== undefined) {
+ field.value(input_def.value);
}
- field.value(data.get(id));
// add to field list
this.app.field_list[id] = field;
@@ -272,7 +260,7 @@
},
// conditional input field
- _field_conditional : function(input_def, data) {
+ _field_conditional : function(input_def) {
// link this
var self = this;
@@ -287,15 +275,10 @@
}
// select field
- var id = input_def.id;
return new Ui.Select.View({
- id : 'field-' + id,
+ id : 'field-' + input_def.id,
data : options,
- value : data.get(id),
onchange : function(value) {
- // update value
- data.set(id, value);
-
// check value in order to hide/show options
for (var i in input_def.cases) {
// get case
@@ -329,7 +312,7 @@
},
// data input field
- _field_data : function(input_def, data) {
+ _field_data : function(input_def) {
// link this
var self = this;
@@ -337,7 +320,7 @@
var id = input_def.id;
// get datasets
- var datasets = this.datasets.filterType();
+ var datasets = this.app.datasets.filterType();
// configure options fields
var options = [];
@@ -353,15 +336,19 @@
id : 'field-' + id,
data : options,
value : options[0].value,
+ multiple : input_def.multiple,
onchange : function(value) {
- // update value
- data.set(id, value);
+ // pick the first dataset if multiple might be selected
+ // TODO: iterate over all datasets and filter common/consistent columns
+ if (input_def.multiple) {
+ value = value[0];
+ }
// get referenced columns
- var column_list = self.app.tree.findReferences(id);
+ var column_list = self.app.tree.findReferences(id, 'data_column');
// find selected dataset
- var dataset = self.datasets.filter(value);
+ var dataset = self.app.datasets.filter(value);
// check dataset
if (dataset && column_list.length > 0) {
@@ -376,19 +363,34 @@
console.debug('tool-form::field_data() - FAILED: Could not find metadata for dataset ' + value + '.');
}
- // load column options
- var columns = [];
- for (var key in meta) {
- // add to selection
- columns.push({
- 'label' : 'Column: ' + (parseInt(key) + 1) + ' [' + meta[key] + ']',
- 'value' : key
- });
- }
-
// update referenced columns
for (var i in column_list) {
- var column_field = self.app.field_list[column_list[i]]
+ // get column input/field
+ var column_input = self.app.input_list[column_list[i]];
+ var column_field = self.app.field_list[column_list[i]];
+ if (!column_input || !column_field) {
+ console.debug('tool-form::field_data() - FAILED: Column not found.');
+ }
+
+ // is numerical?
+ var numerical = column_input.numerical;
+
+ // identify column options
+ var columns = [];
+ for (var key in meta) {
+ // get column type
+ var column_type = meta[key];
+
+ // add to selection
+ if (column_type == 'int' || column_type == 'float' || !numerical) {
+ columns.push({
+ 'label' : 'Column: ' + (parseInt(key) + 1) + ' [' + meta[key] + ']',
+ 'value' : key
+ });
+ }
+ }
+
+ // update field
if (column_field) {
column_field.update(columns);
if (!column_field.exists(column_field.value())) {
@@ -404,20 +406,8 @@
});
},
- // column selection field
- _field_column : function (input_def, data) {
- var id = input_def.id;
- return new Ui.Select.View({
- id : 'field-' + id,
- value : data.get(id),
- onchange : function(value) {
- data.set(id, value);
- }
- });
- },
-
// select field
- _field_select : function (input_def, data) {
+ _field_select : function (input_def) {
// configure options fields
var options = [];
for (var i in input_def.options) {
@@ -430,95 +420,66 @@
// identify display type
var SelectClass = Ui.Select;
- if (input_def.display == 'checkboxes') {
+ switch (input_def.display) {
+ case 'checkboxes':
+ SelectClass = Ui.Checkbox;
+ break;
+ case 'radio':
+ SelectClass = Ui.RadioButton;
+ break;
+ }
+
+ // force checkboxes if multiple has been selected
+ if (input_def.multiple) {
SelectClass = Ui.Checkbox;
}
// select field
- var id = input_def.id;
return new SelectClass.View({
- id : 'field-' + id,
- data : options,
- value : data.get(id),
- onchange : function(value) {
- data.set(id, value);
- }
+ id : 'field-' + input_def.id,
+ data : options
});
},
-
+
+ // column selection field
+ _field_column : function (input_def) {
+ return new Ui.Select.View({
+ id : 'field-' + input_def.id,
+ multiple: input_def.multiple
+ });
+ },
+
// text input field
- _field_text : function(input_def, data) {
- var id = input_def.id;
+ _field_text : function(input_def) {
return new Ui.Input({
- id : 'field-' + id,
- value : data.get(id),
- onchange : function(value) {
- data.set(id, value);
- }
+ id : 'field-' + input_def.id,
+ area : input_def.area
});
},
// integer field
- _field_integer: function(input_def, data) {
- var id = input_def.id;
+ _field_slider: function(input_def) {
return new Ui.Slider.View({
- id : 'field-' + id,
- value : data.get(id),
- onchange : function(value) {
- data.set(id, value);
- }
- });
- },
-
- // text area
- _field_textarea : function(input_def, data) {
- var id = input_def.id;
- return new Ui.Textarea({
- id : 'field-' + id,
- onchange : function() {
- data.set(id, field.value());
- }
- });
- },
-
- // radio field
- _field_radio : function(input_def, data) {
- var id = input_def.id;
- return new Ui.RadioButton({
- id : 'field-' + id,
- data : input_def.data,
- value : data.get(id),
- onchange : function(value) {
- data.set(id, value);
- }
+ id : 'field-' + input_def.id,
+ min : input_def.min || 0,
+ max : input_def.max || 1000,
+ decimal : input_def.type == 'float'
});
},
// hidden field
- _field_hidden : function(input_def, data) {
- var id = input_def.id;
+ _field_hidden : function(input_def) {
return new Ui.Hidden({
- id : 'field-' + id,
- value : data.get(id)
+ id : 'field-' + input_def.id
});
},
- // hidden field
- _field_radiobutton : function(input_def, data) {
- var id = input_def.id;
+ // boolean field
+ _field_boolean : function(input_def) {
return new Ui.RadioButton.View({
- id : 'field-' + id,
- value : data.get(id),
- data : [
- {
- label : 'Yes',
- value : 'true'
- },
- {
- label : 'No',
- value : 'false'
- }
- ]
+ id : 'field-' + input_def.id,
+ data : [ { label : 'Yes', value : true },
+ { label : 'No', value : false }]
});
}
});
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/tools/tools-tree.js
--- a/static/scripts/mvc/tools/tools-tree.js
+++ b/static/scripts/mvc/tools/tools-tree.js
@@ -66,7 +66,7 @@
},
// find referenced elements
- findReferences: function(identifier) {
+ findReferences: function(identifier, type) {
// referenced elements
var referenced = [];
@@ -105,7 +105,7 @@
}
// check for referenced element
- if (input.data_ref == name) {
+ if (input.data_ref == name && input.type == type) {
list.push(id);
}
}
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/ui/ui-misc.js
--- a/static/scripts/mvc/ui/ui-misc.js
+++ b/static/scripts/mvc/ui/ui-misc.js
@@ -312,7 +312,8 @@
placeholder : '',
disabled : false,
visible : true,
- cls : ''
+ cls : '',
+ area : false
},
// initialize
@@ -352,60 +353,11 @@
// element
_template: function(options) {
- return '<input id="' + options.id + '" type="' + options.type + '" value="' + options.value + '" placeholder="' + options.placeholder + '" class="ui-input ' + options.cls + '">';
- }
-});
-
-// plugin
-var Textarea = Backbone.View.extend({
- // options
- optionsDefault: {
- value : '',
- type : 'text',
- placeholder : '',
- disabled : false,
- visible : true,
- cls : ''
- },
-
- // initialize
- initialize : function(options) {
- // configure options
- this.options = Utils.merge(options, this.optionsDefault);
-
- // create new element
- this.setElement(this._template(this.options));
-
- // disable input field
- if (this.options.disabled) {
- this.$el.prop('disabled', true);
+ if (options.area) {
+ return '<textarea id="' + options.id + '" class="ui-textarea ' + options.cls + '"></textarea>';
+ } else {
+ return '<input id="' + options.id + '" type="' + options.type + '" value="' + options.value + '" placeholder="' + options.placeholder + '" class="ui-input ' + options.cls + '">';
}
-
- // hide input field
- if (!this.options.visible) {
- this.$el.hide();
- }
-
- // onchange event handler. fires on user activity.
- var self = this;
- this.$el.on('input', function() {
- if (self.options.onchange) {
- self.options.onchange(self.$el.val());
- }
- });
- },
-
- // value
- value : function (new_val) {
- if (new_val !== undefined) {
- this.$el.val(new_val);
- }
- return this.$el.val();
- },
-
- // element
- _template: function(options) {
- return '<textarea id="' + options.id + '" class="ui-textarea ' + options.cls + '" rows="5"></textarea>';
}
});
@@ -455,7 +407,6 @@
Checkbox : Checkbox,
Searchbox : Searchbox,
Select : Select,
- Textarea : Textarea,
Hidden : Hidden,
Slider : Slider
}
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/ui/ui-select-default.js
--- a/static/scripts/mvc/ui/ui-select-default.js
+++ b/static/scripts/mvc/ui/ui-select-default.js
@@ -7,24 +7,19 @@
var View = Backbone.View.extend({
// options
optionsDefault : {
- id : '',
- cls : '',
- empty : 'No data available',
- visible : true,
- wait : false
+ id : '',
+ cls : '',
+ empty : 'No data available',
+ visible : true,
+ wait : false,
+ multiple : false
},
- // value
- selected : null,
-
// initialize
initialize : function(options) {
// configure options
this.options = Utils.merge(options, this.optionsDefault);
- // initial value
- this.selected = this.options.value;
-
// create new element
this.setElement(this._template(this.options));
@@ -32,21 +27,17 @@
this.$select = this.$el.find('#select');
this.$icon = this.$el.find('#icon');
- // add change event. fires only on user activity
- var self = this;
- this.$select.on('change', function() {
- self.value(self.$select.val());
- });
-
- // add change event. fires on trigger
- this.on('change', function() {
- if (self.options.onchange) {
- self.options.onchange(self.value());
- }
- });
+ // configure multiple
+ if (this.options.multiple) {
+ this.$select.prop('multiple', true);
+ this.$select.addClass('ui-select-multiple');
+ this.$icon.remove();
+ } else {
+ this.$el.addClass('ui-select');
+ }
// refresh
- this._refresh();
+ this.update(this.options.data);
// show/hide
if (!this.options.visible) {
@@ -59,31 +50,25 @@
} else {
this.show();
}
+
+ // add change event. fires only on user activity
+ var self = this;
+ this.$select.on('change', function() {
+ self._change();
+ });
+
+ // add change event. fires on trigger
+ this.on('change', function() {
+ self._change();
+ });
},
// value
value : function (new_value) {
-
- // get current id/value
- var before = this.selected;
-
- // check if new_value is defined
if (new_value !== undefined) {
- this.selected = new_value;
this.$select.val(new_value);
}
-
- // get current id/value
- var after = this.selected;
- if (after) {
- // fire onchange
- if (after != before && this.options.onchange) {
- this.options.onchange(after);
- }
- }
-
- // return
- return after;
+ return this.$select.val();
},
// first
@@ -157,6 +142,9 @@
// render
update: function(options) {
+ // backup current value
+ var current = this.$select.val();
+
// remove all options
this.$select.find('option').remove();
@@ -167,6 +155,14 @@
// refresh
this._refresh();
+
+ // set previous value
+ this.$select.val(current);
+
+ // check if any value was set
+ if (!this.$select.val()) {
+ this.$select.val(this.first());
+ }
},
// set on change event
@@ -179,6 +175,13 @@
return this.$select.find('option[value=' + value + ']').length > 0;
},
+ // change
+ _change: function() {
+ if (this.options.onchange) {
+ this.options.onchange(this.$select.val());
+ }
+ },
+
// refresh
_refresh: function() {
// remove placeholder
@@ -196,41 +199,21 @@
// enable select field
this.enable();
}
-
- // update value
- if (this.selected) {
- this.$select.val(this.selected);
- }
},
- // option
+ // template option
_templateOption: function(options) {
return '<option value="' + options.value + '">' + options.label + '</option>';
},
- // element
+ // template
_template: function(options) {
- var tmpl = '<div id="' + options.id + '" class="ui-select">' +
- '<div class="button">' +
- '<i id="icon"/>' +
- '</div>' +
- '<select id="select" class="select ' + options.cls + ' ' + options.id + '">';
- for (key in options.data) {
- // options
- var item = options.data[key];
-
- // identify selected value
- var tag = '';
- if (item.value == options.value || item.value == '') {
- tag = 'selected';
- }
-
- // add template string
- tmpl += '<option value="' + item.value + '" ' + tag + '>' + item.label + '</option>';
- }
- tmpl += '</select>' +
- '</div>';
- return tmpl;
+ return '<div id="' + options.id + '">' +
+ '<div class="button">' +
+ '<i id="icon"/>' +
+ '</div>' +
+ '<select id="select" class="select ' + options.cls + ' ' + options.id + '"></select>' +
+ '</div>';
}
});
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/mvc/ui/ui-slider.js
--- a/static/scripts/mvc/ui/ui-slider.js
+++ b/static/scripts/mvc/ui/ui-slider.js
@@ -7,7 +7,9 @@
optionsDefault: {
value : '',
min : 1,
- max : 100
+ max : 100,
+ step : 0.1,
+ decimal : false
},
// initialize
@@ -27,6 +29,11 @@
// backup integer field
this.$text = this.$el.find('#text');
+ // set step size
+ if (!this.options.decimal) {
+ this.options.step = 1;
+ }
+
// load slider plugin
this.$slider.slider(this.options);
@@ -38,7 +45,8 @@
// add text field event
this.$text.on('keydown', function (event) {
var v = event.which;
- if (!(v == 13 || v == 8 || v == 189 || (v >= 48 && v <= 57))) {
+ if (!(v == 13 || v == 8 || v == 37 || v == 39 || v == 189 || (v >= 48 && v <= 57)
+ || (self.options.decimal && $(this).val().indexOf('.') == -1) && v == 190)) {
event.preventDefault();
}
});
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/tools/tools-form.js
--- a/static/scripts/packed/mvc/tools/tools-form.js
+++ b/static/scripts/packed/mvc/tools/tools-form.js
@@ -1,1 +1,1 @@
-define(["mvc/ui/ui-portlet","mvc/ui/ui-misc","mvc/citation/citation-model","mvc/citation/citation-view","mvc/tools","mvc/tools/tools-template","mvc/tools/tools-datasets","mvc/tools/tools-section","mvc/tools/tools-tree"],function(g,k,i,a,f,d,h,j,c){var e=Backbone.Model.extend({initialize:function(l){this.url=galaxy_config.root+"api/tools/"+l.id+"?io_details=true"}});var b=Backbone.View.extend({main_el:"body",initialize:function(m){var l=this;this.options=m;this.model=new e({id:m.id});this.tree=new c(this);this.field_list={};this.input_list={};this.data=new Backbone.Model();this.datasets=new h({success:function(){l._initializeToolForm()}})},_initializeToolForm:function(){var l=this;this.model.fetch({error:function(m){console.debug("tools-form::_initializeToolForm() : Attempt to fetch tool model failed.")},success:function(){l.inputs=l.model.get("inputs");l.portlet=new g.View({icon:"fa-wrench",title:"<b>"+l.model.get("name")+"</b> "+l.model.get("description"),buttons:{execute:new k.ButtonIcon({icon:"fa-check",tooltip:"Execute the tool",title:"Execute",floating:"clear",onclick:function(){console.log(l.tree.create(l))}})}});l.message=new k.Message();l.portlet.append(l.message.$el);$(l.main_el).append(l.portlet.$el);if(l.options.help!=""){$(l.main_el).append(d.help(l.options.help))}if(l.options.citations){$(l.main_el).append(d.citations());var m=new i.ToolCitationCollection();m.tool_id=l.options.id;var n=new a.CitationListView({collection:m});n.render();m.fetch()}l.setElement(l.portlet.content());l.section=new j.View(l,{inputs:l.model.get("inputs")});l.portlet.append(l.section.$el);l.refresh()}})},refresh:function(){this.tree.refresh();for(var l in this.field_list){this.field_list[l].trigger("change")}console.debug("tools-form::refresh() - Recreated tree structure. Refresh.")}});return{View:b}});
\ No newline at end of file
+define(["mvc/ui/ui-portlet","mvc/ui/ui-misc","mvc/citation/citation-model","mvc/citation/citation-view","mvc/tools","mvc/tools/tools-template","mvc/tools/tools-datasets","mvc/tools/tools-section","mvc/tools/tools-tree"],function(g,k,i,a,f,d,h,j,c){var e=Backbone.Model.extend({initialize:function(l){this.url=galaxy_config.root+"api/tools/"+l.id+"?io_details=true"}});var b=Backbone.View.extend({main_el:"body",initialize:function(m){var l=this;this.options=m;this.model=new e({id:m.id});this.tree=new c(this);this.field_list={};this.input_list={};this.datasets=new h({success:function(){l._initializeToolForm()}})},_initializeToolForm:function(){var l=this;this.model.fetch({error:function(m){console.debug("tools-form::_initializeToolForm() : Attempt to fetch tool model failed.")},success:function(){l.inputs=l.model.get("inputs");l.portlet=new g.View({icon:"fa-wrench",title:"<b>"+l.model.get("name")+"</b> "+l.model.get("description"),buttons:{execute:new k.ButtonIcon({icon:"fa-check",tooltip:"Execute the tool",title:"Execute",floating:"clear",onclick:function(){console.log(l.tree.create(l))}})}});l.message=new k.Message();l.portlet.append(l.message.$el);$(l.main_el).append(l.portlet.$el);if(l.options.help!=""){$(l.main_el).append(d.help(l.options.help))}if(l.options.citations){$(l.main_el).append(d.citations());var m=new i.ToolCitationCollection();m.tool_id=l.options.id;var n=new a.CitationListView({collection:m});n.render();m.fetch()}l.setElement(l.portlet.content());l.section=new j.View(l,{inputs:l.model.get("inputs")});l.portlet.append(l.section.$el);l.refresh()}})},refresh:function(){this.tree.refresh();for(var l in this.field_list){this.field_list[l].trigger("change")}console.debug("tools-form::refresh() - Recreated tree structure. Refresh.")}});return{View:b}});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/tools/tools-section.js
--- a/static/scripts/packed/mvc/tools/tools-section.js
+++ b/static/scripts/packed/mvc/tools/tools-section.js
@@ -1,1 +1,1 @@
-define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs"],function(c,b,e,a){var d=Backbone.View.extend({initialize:function(g,f){this.app=g;this.inputs=f.inputs;this.datasets=g.datasets;this.data=g.data;f.cls_tr="section-row";this.table=new b.View(f);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var f in this.inputs){this._add(this.inputs[f],this.data)}},_add:function(h,j){var g=this;var f=jQuery.extend(true,{},h);f.id=c.uuid();this.app.input_list[f.id]=f;var i=f.type;switch(i){case"conditional":this._addConditional(f,j);break;case"repeat":this._addRepeat(f);break;default:this._addRow(i,f,j)}},_addConditional:function(f,j){f.label=f.test_param.label;f.value=f.test_param.value;this._addRow("conditional",f,j);for(var h in f.cases){var g=f.id+"-section-"+h;var k=new d(this.app,{inputs:f.cases[h].inputs,cls:"ui-table-plain"});this.table.add("");this.table.add(k.$el);this.table.append(g)}},_addRepeat:function(f){var g=this;var k=new a.View({title_new:"Add "+f.title,max:f.max,onnew:function(){var i=f.id+"-section-"+c.uuid();var m=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:i,title:f.title,$el:m.$el,ondel:function(){k.del(i);k.retitle(f.title);g.app.refresh()}});k.retitle(f.title);k.show(i);g.app.refresh()}});for(var j=0;j<f.min;j++){var h=f.id+"-section-"+c.uuid();var l=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:h,title:f.title,$el:l.$el})}k.retitle(f.title);this.table.add("");this.table.add(k.$el);this.table.append(f.id)},_addRow:function(i,f,g){var k=f.id;var h=null;switch(i){case"text":h=this._field_text(f,g);break;case"select":h=this._field_select(f,g);break;case"radiobutton":h=this._field_radio(f,g);break;case"data":h=this._field_data(f,g);break;case"data_column":h=this._field_column(f,g);break;case"textarea":h=this._field_textarea(f,g);break;case"conditional":h=this._field_conditional(f,g);break;case"hidden":h=this._field_hidden(f,g);break;case"integer":h=this._field_integer(f,g);break;case"boolean":h=this._field_radiobutton(f,g);break;default:h=this._field_text(f,g);console.debug("tools-form::_addRow() : Unmatched field type ("+i+").")}if(!g.get(k)){g.set(k,f.value)}h.value(g.get(k));this.app.field_list[k]=h;var j=$("<div/>");j.append(h.$el);if(f.help){j.append('<div class="ui-table-form-info">'+f.help+"</div>")}this.table.add('<span class="ui-table-form-title">'+f.label+"</span>","20%");this.table.add(j);this.table.append(k)},_field_conditional:function(f,l){var g=this;var h=[];for(var j in f.test_param.options){var k=f.test_param.options[j];h.push({label:k[0],value:k[1]})}var m=f.id;return new e.Select.View({id:"field-"+m,data:h,value:l.get(m),onchange:function(u){l.set(m,u);for(var s in f.cases){var o=f.cases[s];var r=f.id+"-section-"+s;var n=g.table.get(r);var q=false;for(var p in o.inputs){var t=o.inputs[p].type;if(t&&t!=="hidden"){q=true;break}}if(o.value==u&&q){n.fadeIn("fast")}else{n.hide()}}}})},_field_data:function(f,l){var g=this;var m=f.id;var k=this.datasets.filterType();var h=[];for(var j in k){h.push({label:k[j].get("name"),value:k[j].get("id")})}return new e.Select.View({id:"field-"+m,data:h,value:h[0].value,onchange:function(s){l.set(m,s);var o=g.app.tree.findReferences(m);var u=g.datasets.filter(s);if(u&&o.length>0){console.debug("tool-form::field_data() - Selected dataset "+s+".");var t=u.get("metadata_column_types");if(!t){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+s+".")}var r=[];for(var q in t){r.push({label:"Column: "+(parseInt(q)+1)+" ["+t[q]+"]",value:q})}for(var p in o){var n=g.app.field_list[o[p]];if(n){n.update(r);if(!n.exists(n.value())){n.value(n.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+s+".")}}})},_field_column:function(f,g){var h=f.id;return new e.Select.View({id:"field-"+h,value:g.get(h),onchange:function(i){g.set(h,i)}})},_field_select:function(f,l){var g=[];for(var h in f.options){var j=f.options[h];g.push({label:j[0],value:j[1]})}var k=e.Select;if(f.display=="checkboxes"){k=e.Checkbox}var m=f.id;return new k.View({id:"field-"+m,data:g,value:l.get(m),onchange:function(i){l.set(m,i)}})},_field_text:function(f,g){var h=f.id;return new e.Input({id:"field-"+h,value:g.get(h),onchange:function(i){g.set(h,i)}})},_field_integer:function(f,g){var h=f.id;return new e.Slider.View({id:"field-"+h,value:g.get(h),onchange:function(i){g.set(h,i)}})},_field_textarea:function(f,g){var h=f.id;return new e.Textarea({id:"field-"+h,onchange:function(){g.set(h,field.value())}})},_field_radio:function(f,g){var h=f.id;return new e.RadioButton({id:"field-"+h,data:f.data,value:g.get(h),onchange:function(i){g.set(h,i)}})},_field_hidden:function(f,g){var h=f.id;return new e.Hidden({id:"field-"+h,value:g.get(h)})},_field_radiobutton:function(f,g){var h=f.id;return new e.RadioButton.View({id:"field-"+h,value:g.get(h),data:[{label:"Yes",value:"true"},{label:"No",value:"false"}]})}});return{View:d}});
\ No newline at end of file
+define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs"],function(c,b,e,a){var d=Backbone.View.extend({initialize:function(g,f){this.app=g;this.inputs=f.inputs;f.cls_tr="section-row";this.table=new b.View(f);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var f in this.inputs){this._add(this.inputs[f])}},_add:function(h){var g=this;var f=jQuery.extend(true,{},h);f.id=c.uuid();this.app.input_list[f.id]=f;var i=f.type;switch(i){case"conditional":this._addConditional(f);break;case"repeat":this._addRepeat(f);break;default:this._addRow(i,f)}},_addConditional:function(f){f.label=f.test_param.label;f.value=f.test_param.value;this._addRow("conditional",f);for(var h in f.cases){var g=f.id+"-section-"+h;var j=new d(this.app,{inputs:f.cases[h].inputs,cls:"ui-table-plain"});this.table.add("");this.table.add(j.$el);this.table.append(g)}},_addRepeat:function(f){var g=this;var k=new a.View({title_new:"Add "+f.title,max:f.max,onnew:function(){var i=f.id+"-section-"+c.uuid();var m=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:i,title:f.title,$el:m.$el,ondel:function(){k.del(i);k.retitle(f.title);g.app.refresh()}});k.retitle(f.title);k.show(i);g.app.refresh()}});for(var j=0;j<f.min;j++){var h=f.id+"-section-"+c.uuid();var l=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:h,title:f.title,$el:l.$el})}k.retitle(f.title);this.table.add("");this.table.add(k.$el);this.table.append(f.id)},_addRow:function(h,f){var j=f.id;var g=null;switch(h){case"text":g=this._field_text(f);break;case"select":g=this._field_select(f);break;case"data":g=this._field_data(f);break;case"data_column":g=this._field_column(f);break;case"conditional":g=this._field_conditional(f);break;case"hidden":g=this._field_hidden(f);break;case"integer":g=this._field_slider(f);break;case"float":g=this._field_slider(f);break;case"boolean":g=this._field_boolean(f);break;default:g=this._field_text(f);console.debug("tools-form::_addRow() : Unmatched field type ("+h+").")}if(f.value!==undefined){g.value(f.value)}this.app.field_list[j]=g;var i=$("<div/>");i.append(g.$el);if(f.help){i.append('<div class="ui-table-form-info">'+f.help+"</div>")}this.table.add('<span class="ui-table-form-title">'+f.label+"</span>","20%");this.table.add(i);this.table.append(j)},_field_conditional:function(f){var g=this;var h=[];for(var j in f.test_param.options){var k=f.test_param.options[j];h.push({label:k[0],value:k[1]})}return new e.Select.View({id:"field-"+f.id,data:h,onchange:function(s){for(var q in f.cases){var m=f.cases[q];var p=f.id+"-section-"+q;var l=g.table.get(p);var o=false;for(var n in m.inputs){var r=m.inputs[n].type;if(r&&r!=="hidden"){o=true;break}}if(m.value==s&&o){l.fadeIn("fast")}else{l.hide()}}}})},_field_data:function(f){var g=this;var l=f.id;var k=this.app.datasets.filterType();var h=[];for(var j in k){h.push({label:k[j].get("name"),value:k[j].get("id")})}return new e.Select.View({id:"field-"+l,data:h,value:h[0].value,onchange:function(u){var s=g.app.tree.findReferences(l,"data_column");var n=g.app.datasets.filter(u);if(n&&s.length>0){console.debug("tool-form::field_data() - Selected dataset "+u+".");var w=n.get("metadata_column_types");if(!w){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+u+".")}for(var p in s){var q=g.app.input_list[s[p]];var r=g.app.field_list[s[p]];if(!q||!r){console.debug("tool-form::field_data() - FAILED: Column not found.")}var o=q.numerical;var m=[];for(var v in w){var t=w[v];if(t=="int"||t=="float"||!o){m.push({label:"Column: "+(parseInt(v)+1)+" ["+w[v]+"]",value:v})}}if(r){r.update(m);if(!r.exists(r.value())){r.value(r.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+u+".")}}})},_field_select:function(f){var g=[];for(var h in f.options){var j=f.options[h];g.push({label:j[0],value:j[1]})}var k=e.Select;switch(f.display){case"checkboxes":k=e.Checkbox;break;case"radio":k=e.RadioButton;break}if(f.multiple){k=e.Checkbox}return new k.View({id:"field-"+f.id,data:g})},_field_column:function(f){return new e.Select.View({id:"field-"+f.id,multiple:f.multiple})},_field_text:function(f){return new e.Input({id:"field-"+f.id,area:f.area})},_field_slider:function(f){return new e.Slider.View({id:"field-"+f.id,min:f.min||0,max:f.max||1000,decimal:f.type=="float"})},_field_hidden:function(f){return new e.Hidden({id:"field-"+f.id})},_field_boolean:function(f){return new e.RadioButton.View({id:"field-"+f.id,data:[{label:"Yes",value:true},{label:"No",value:false}]})}});return{View:d}});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/tools/tools-tree.js
--- a/static/scripts/packed/mvc/tools/tools-tree.js
+++ b/static/scripts/packed/mvc/tools/tools-tree.js
@@ -1,1 +1,1 @@
-define([],function(){return Backbone.Model.extend({initialize:function(a){this.app=a},refresh:function(){if(!this.app.section){return{}}this.dict={};this.xml=$("<div/>");this._iterate(this.app.section.$el,this.dict,this.xml)},_iterate:function(d,e,b){var a=this;var c=$(d).children();c.each(function(){var i=this;var h=$(i).attr("id");if($(i).hasClass("section-row")||$(i).hasClass("tab-pane")){e[h]={};var f=a.app.input_list[h];if(f){e[h]={input:f}}var g=$('<div id="'+h+'"/>');b.append(g);a._iterate(i,e[h],g)}else{a._iterate(i,e,b)}})},findReferences:function(c){var f=[];var b=this;function d(g,i){var h=$(i).children();var k=[];var j=false;h.each(function(){var n=this;var m=$(n).attr("id");if(m!==c){var l=b.app.input_list[m];if(l){if(l.name==g){j=true;return false}if(l.data_ref==g){k.push(m)}}}});if(!j){f=f.concat(k);h.each(function(){d(g,this)})}}var e=this.xml.find("#"+c);if(e.length>0){var a=this.app.input_list[c];if(a){d(a.name,e.parent())}}return f}})});
\ No newline at end of file
+define([],function(){return Backbone.Model.extend({initialize:function(a){this.app=a},refresh:function(){if(!this.app.section){return{}}this.dict={};this.xml=$("<div/>");this._iterate(this.app.section.$el,this.dict,this.xml)},_iterate:function(d,e,b){var a=this;var c=$(d).children();c.each(function(){var i=this;var h=$(i).attr("id");if($(i).hasClass("section-row")||$(i).hasClass("tab-pane")){e[h]={};var f=a.app.input_list[h];if(f){e[h]={input:f}}var g=$('<div id="'+h+'"/>');b.append(g);a._iterate(i,e[h],g)}else{a._iterate(i,e,b)}})},findReferences:function(c,e){var g=[];var b=this;function d(h,j){var i=$(j).children();var l=[];var k=false;i.each(function(){var o=this;var n=$(o).attr("id");if(n!==c){var m=b.app.input_list[n];if(m){if(m.name==h){k=true;return false}if(m.data_ref==h&&m.type==e){l.push(n)}}}});if(!k){g=g.concat(l);i.each(function(){d(h,this)})}}var f=this.xml.find("#"+c);if(f.length>0){var a=this.app.input_list[c];if(a){d(a.name,f.parent())}}return g}})});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/ui/ui-misc.js
--- a/static/scripts/packed/mvc/ui/ui-misc.js
+++ b/static/scripts/packed/mvc/ui/ui-misc.js
@@ -1,1 +1,1 @@
-define(["utils/utils","mvc/ui/ui-select-default","mvc/ui/ui-slider","mvc/ui/ui-checkbox","mvc/ui/ui-radiobutton","mvc/ui/ui-button-menu","mvc/ui/ui-modal"],function(e,h,k,i,b,r,c){var q=Backbone.View.extend({optionsDefault:{url:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},_template:function(s){return'<img class="ui-image '+s.cls+'" src="'+s.url+'"/>'}});var l=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},title:function(s){this.$el.html(s)},_template:function(s){return'<label class="ui-label '+s.cls+'">'+s.title+"</label>"},value:function(){return options.title}});var d=Backbone.View.extend({optionsDefault:{floating:"right",icon:"",tooltip:"",placement:"bottom",title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){return'<div><span class="fa '+s.icon+'" class="ui-icon"/> '+s.title+"</div>"}});var g=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"btn btn-default",icon:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick);$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){var t='<button id="'+s.id+'" type="submit" style="float: '+s.floating+';" type="button" class="ui-button '+s.cls+'">';if(s.icon){t+='<i class="icon fa '+s.icon+'"></i> '}t+=s.title+"</button>";return t}});var o=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"icon-btn",icon:"",tooltip:"",onclick:null},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick);$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){var t="";if(s.title){t="width: auto;"}var u='<div id="'+s.id+'" style="float: '+s.floating+"; "+t+'" class="ui-button-icon '+s.cls+'">';if(s.title){u+='<div class="button"><i class="icon fa '+s.icon+'"/> <span class="title">'+s.title+"</span></div>"}else{u+='<i class="icon fa '+s.icon+'"/>'}u+="</div>";return u}});var p=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick)},_template:function(s){return'<div><a href="javascript:void(0)" class="ui-anchor '+s.cls+'">'+s.title+"</a></div>"}});var a=Backbone.View.extend({optionsDefault:{message:"",status:"info",persistent:false},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement("<div></div>")},update:function(t){this.options=e.merge(t,this.optionsDefault);if(t.message!=""){this.$el.html(this._template(this.options));this.$el.find(".alert").append(t.message);this.$el.fadeIn();if(!t.persistent){var s=this;window.setTimeout(function(){if(s.$el.is(":visible")){s.$el.fadeOut()}else{s.$el.hide()}},3000)}}else{this.$el.fadeOut()}},_template:function(s){return'<div class="ui-message alert alert-'+s.status+'"/>'}});var f=Backbone.View.extend({optionsDefault:{onclick:null,searchword:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));var s=this;if(this.options.onclick){this.$el.on("submit",function(v){var u=s.$el.find("#search");s.options.onclick(u.val())})}},_template:function(s){return'<div class="ui-search"><form onsubmit="return false;"><input id="search" class="form-control input-sm" type="text" name="search" placeholder="Search..." value="'+s.searchword+'"><button type="submit" class="btn search-btn"><i class="fa fa-search"></i></button></form></div>'}});var n=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var s=this;this.$el.on("input",function(){if(s.options.onchange){s.options.onchange(s.$el.val())}})},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<input id="'+s.id+'" type="'+s.type+'" value="'+s.value+'" placeholder="'+s.placeholder+'" class="ui-input '+s.cls+'">'}});var j=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var s=this;this.$el.on("input",function(){if(s.options.onchange){s.options.onchange(s.$el.val())}})},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<textarea id="'+s.id+'" class="ui-textarea '+s.cls+'" rows="5"></textarea>'}});var m=Backbone.View.extend({optionsDefault:{value:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<hidden id="'+s.id+'" value="'+s.value+'"/>'}});return{Anchor:p,Button:g,ButtonIcon:o,ButtonMenu:r,Icon:d,Image:q,Input:n,Label:l,Message:a,Modal:c,RadioButton:b,Checkbox:i,Searchbox:f,Select:h,Textarea:j,Hidden:m,Slider:k}});
\ No newline at end of file
+define(["utils/utils","mvc/ui/ui-select-default","mvc/ui/ui-slider","mvc/ui/ui-checkbox","mvc/ui/ui-radiobutton","mvc/ui/ui-button-menu","mvc/ui/ui-modal"],function(l,b,f,e,m,q,n){var d=Backbone.View.extend({optionsDefault:{url:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},_template:function(r){return'<img class="ui-image '+r.cls+'" src="'+r.url+'"/>'}});var k=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},title:function(r){this.$el.html(r)},_template:function(r){return'<label class="ui-label '+r.cls+'">'+r.title+"</label>"},value:function(){return options.title}});var c=Backbone.View.extend({optionsDefault:{floating:"right",icon:"",tooltip:"",placement:"bottom",title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){return'<div><span class="fa '+r.icon+'" class="ui-icon"/> '+r.title+"</div>"}});var h=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"btn btn-default",icon:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick);$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){var s='<button id="'+r.id+'" type="submit" style="float: '+r.floating+';" type="button" class="ui-button '+r.cls+'">';if(r.icon){s+='<i class="icon fa '+r.icon+'"></i> '}s+=r.title+"</button>";return s}});var i=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"icon-btn",icon:"",tooltip:"",onclick:null},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick);$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){var s="";if(r.title){s="width: auto;"}var t='<div id="'+r.id+'" style="float: '+r.floating+"; "+s+'" class="ui-button-icon '+r.cls+'">';if(r.title){t+='<div class="button"><i class="icon fa '+r.icon+'"/> <span class="title">'+r.title+"</span></div>"}else{t+='<i class="icon fa '+r.icon+'"/>'}t+="</div>";return t}});var g=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick)},_template:function(r){return'<div><a href="javascript:void(0)" class="ui-anchor '+r.cls+'">'+r.title+"</a></div>"}});var o=Backbone.View.extend({optionsDefault:{message:"",status:"info",persistent:false},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement("<div></div>")},update:function(s){this.options=l.merge(s,this.optionsDefault);if(s.message!=""){this.$el.html(this._template(this.options));this.$el.find(".alert").append(s.message);this.$el.fadeIn();if(!s.persistent){var r=this;window.setTimeout(function(){if(r.$el.is(":visible")){r.$el.fadeOut()}else{r.$el.hide()}},3000)}}else{this.$el.fadeOut()}},_template:function(r){return'<div class="ui-message alert alert-'+r.status+'"/>'}});var a=Backbone.View.extend({optionsDefault:{onclick:null,searchword:""},initialize:function(s){this.options=l.merge(s,this.optionsDefault);this.setElement(this._template(this.options));var r=this;if(this.options.onclick){this.$el.on("submit",function(u){var t=r.$el.find("#search");r.options.onclick(t.val())})}},_template:function(r){return'<div class="ui-search"><form onsubmit="return false;"><input id="search" class="form-control input-sm" type="text" name="search" placeholder="Search..." value="'+r.searchword+'"><button type="submit" class="btn search-btn"><i class="fa fa-search"></i></button></form></div>'}});var j=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:"",area:false},initialize:function(s){this.options=l.merge(s,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var r=this;this.$el.on("input",function(){if(r.options.onchange){r.options.onchange(r.$el.val())}})},value:function(r){if(r!==undefined){this.$el.val(r)}return this.$el.val()},_template:function(r){if(r.area){return'<textarea id="'+r.id+'" class="ui-textarea '+r.cls+'"></textarea>'}else{return'<input id="'+r.id+'" type="'+r.type+'" value="'+r.value+'" placeholder="'+r.placeholder+'" class="ui-input '+r.cls+'">'}}});var p=Backbone.View.extend({optionsDefault:{value:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},value:function(r){if(r!==undefined){this.$el.val(r)}return this.$el.val()},_template:function(r){return'<hidden id="'+r.id+'" value="'+r.value+'"/>'}});return{Anchor:g,Button:h,ButtonIcon:i,ButtonMenu:q,Icon:c,Image:d,Input:j,Label:k,Message:o,Modal:n,RadioButton:m,Checkbox:e,Searchbox:a,Select:b,Hidden:p,Slider:f}});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/ui/ui-select-default.js
--- a/static/scripts/packed/mvc/ui/ui-select-default.js
+++ b/static/scripts/packed/mvc/ui/ui-select-default.js
@@ -1,1 +1,1 @@
-define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{id:"",cls:"",empty:"No data available",visible:true,wait:false},selected:null,initialize:function(d){this.options=a.merge(d,this.optionsDefault);this.selected=this.options.value;this.setElement(this._template(this.options));this.$select=this.$el.find("#select");this.$icon=this.$el.find("#icon");var c=this;this.$select.on("change",function(){c.value(c.$select.val())});this.on("change",function(){if(c.options.onchange){c.options.onchange(c.value())}});this._refresh();if(!this.options.visible){this.hide()}if(this.options.wait){this.wait()}else{this.show()}},value:function(c){var d=this.selected;if(c!==undefined){this.selected=c;this.$select.val(c)}var e=this.selected;if(e){if(e!=d&&this.options.onchange){this.options.onchange(e)}}return e},first:function(){var c=this.$select.find("option");if(c.length>0){return c.val()}else{return undefined}},text:function(){return this.$select.find("option:selected").text()},show:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-caret-down");this.$select.show();this.$el.show()},hide:function(){this.$el.hide()},wait:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-spinner fa-spin");this.$select.hide()},disabled:function(){return this.$select.is(":disabled")},enable:function(){this.$select.prop("disabled",false)},disable:function(){this.$select.prop("disabled",true)},add:function(c){this.$select.append(this._templateOption(c));this._refresh()},del:function(c){this.$select.find("option[value="+c+"]").remove();this.$select.trigger("change");this._refresh()},update:function(c){this.$select.find("option").remove();for(var d in c){this.$select.append(this._templateOption(c[d]))}this._refresh()},setOnChange:function(c){this.options.onchange=c},exists:function(c){return this.$select.find("option[value="+c+"]").length>0},_refresh:function(){this.$select.find("option[value=null]").remove();var c=this.$select.find("option").length;if(c==0){this.disable();this.$select.append(this._templateOption({value:"null",label:this.options.empty}))}else{this.enable()}if(this.selected){this.$select.val(this.selected)}},_templateOption:function(c){return'<option value="'+c.value+'">'+c.label+"</option>"},_template:function(e){var d='<div id="'+e.id+'" class="ui-select"><div class="button"><i id="icon"/></div><select id="select" class="select '+e.cls+" "+e.id+'">';for(key in e.data){var f=e.data[key];var c="";if(f.value==e.value||f.value==""){c="selected"}d+='<option value="'+f.value+'" '+c+">"+f.label+"</option>"}d+="</select></div>";return d}});return{View:b}});
\ No newline at end of file
+define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{id:"",cls:"",empty:"No data available",visible:true,wait:false,multiple:false},initialize:function(d){this.options=a.merge(d,this.optionsDefault);this.setElement(this._template(this.options));this.$select=this.$el.find("#select");this.$icon=this.$el.find("#icon");if(this.options.multiple){this.$select.prop("multiple",true);this.$select.addClass("ui-select-multiple");this.$icon.remove()}else{this.$el.addClass("ui-select")}this.update(this.options.data);if(!this.options.visible){this.hide()}if(this.options.wait){this.wait()}else{this.show()}var c=this;this.$select.on("change",function(){c._change()});this.on("change",function(){c._change()})},value:function(c){if(c!==undefined){this.$select.val(c)}return this.$select.val()},first:function(){var c=this.$select.find("option");if(c.length>0){return c.val()}else{return undefined}},text:function(){return this.$select.find("option:selected").text()},show:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-caret-down");this.$select.show();this.$el.show()},hide:function(){this.$el.hide()},wait:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-spinner fa-spin");this.$select.hide()},disabled:function(){return this.$select.is(":disabled")},enable:function(){this.$select.prop("disabled",false)},disable:function(){this.$select.prop("disabled",true)},add:function(c){this.$select.append(this._templateOption(c));this._refresh()},del:function(c){this.$select.find("option[value="+c+"]").remove();this.$select.trigger("change");this._refresh()},update:function(c){var e=this.$select.val();this.$select.find("option").remove();for(var d in c){this.$select.append(this._templateOption(c[d]))}this._refresh();this.$select.val(e);if(!this.$select.val()){this.$select.val(this.first())}},setOnChange:function(c){this.options.onchange=c},exists:function(c){return this.$select.find("option[value="+c+"]").length>0},_change:function(){if(this.options.onchange){this.options.onchange(this.$select.val())}},_refresh:function(){this.$select.find("option[value=null]").remove();var c=this.$select.find("option").length;if(c==0){this.disable();this.$select.append(this._templateOption({value:"null",label:this.options.empty}))}else{this.enable()}},_templateOption:function(c){return'<option value="'+c.value+'">'+c.label+"</option>"},_template:function(c){return'<div id="'+c.id+'"><div class="button"><i id="icon"/></div><select id="select" class="select '+c.cls+" "+c.id+'"></select></div>'}});return{View:b}});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/scripts/packed/mvc/ui/ui-slider.js
--- a/static/scripts/packed/mvc/ui/ui-slider.js
+++ b/static/scripts/packed/mvc/ui/ui-slider.js
@@ -1,1 +1,1 @@
-define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{value:"",min:1,max:100},initialize:function(d){var c=this;this.options=a.merge(d,this.optionsDefault);this.setElement(this._template(this.options));this.$slider=this.$el.find("#slider");this.$text=this.$el.find("#text");this.$slider.slider(this.options);this.$text.on("change",function(){c.value($(this).val())});this.$text.on("keydown",function(f){var e=f.which;if(!(e==13||e==8||e==189||(e>=48&&e<=57))){f.preventDefault()}});this.$slider.on("slide",function(e,f){c.value(f.value)})},value:function(c){if(c!==undefined){c=Math.max(Math.min(c,this.options.max),this.options.min);if(this.options.onchange){this.options.onchange(c)}this.$slider.slider("value",c);this.$text.val(c)}return this.$text.val()},_template:function(c){return'<div id="'+c.id+'" style=""><input id="text" type="text" class="ui-input" style="width: 50px; float: left"/><div id="slider" style="width: calc(100% - 60px); float: left; top: 8px; left: 10px;"/></div>'}});return{View:b}});
\ No newline at end of file
+define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{value:"",min:1,max:100,step:0.1,decimal:false},initialize:function(d){var c=this;this.options=a.merge(d,this.optionsDefault);this.setElement(this._template(this.options));this.$slider=this.$el.find("#slider");this.$text=this.$el.find("#text");if(!this.options.decimal){this.options.step=1}this.$slider.slider(this.options);this.$text.on("change",function(){c.value($(this).val())});this.$text.on("keydown",function(f){var e=f.which;if(!(e==13||e==8||e==37||e==39||e==189||(e>=48&&e<=57)||(c.options.decimal&&$(this).val().indexOf(".")==-1)&&e==190)){f.preventDefault()}});this.$slider.on("slide",function(e,f){c.value(f.value)})},value:function(c){if(c!==undefined){c=Math.max(Math.min(c,this.options.max),this.options.min);if(this.options.onchange){this.options.onchange(c)}this.$slider.slider("value",c);this.$text.val(c)}return this.$text.val()},_template:function(c){return'<div id="'+c.id+'" style=""><input id="text" type="text" class="ui-input" style="width: 50px; float: left"/><div id="slider" style="width: calc(100% - 60px); float: left; top: 8px; left: 10px;"/></div>'}});return{View:b}});
\ No newline at end of file
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -148,7 +148,7 @@
.form-control::-moz-placeholder{color:#999}
.form-control:-ms-input-placeholder{color:#999}
.form-control::-webkit-input-placeholder{color:#999}
-.form-control,.ui-input{display:block;width:100%;height:27px;padding:4px 10px;font-size:12px;line-height:1.428571429;color:#555;vertical-align:middle;background-color:#fff;border:1px solid #aaa;border-radius:3px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6)}
+.form-control,.ui-input,.ui-textarea{display:block;width:100%;height:27px;padding:4px 10px;font-size:12px;line-height:1.428571429;color:#555;vertical-align:middle;background-color:#fff;border:1px solid #aaa;border-radius:3px;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);box-shadow:inset 0 1px 1px rgba(0,0,0,0.075);-webkit-transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s;transition:border-color ease-in-out .15s, box-shadow ease-in-out .15s}.form-control:focus{border-color:#66afe9;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6);box-shadow:inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(102, 175, 233, 0.6)}
.form-control[disabled],.form-control[readonly],fieldset[disabled] .form-control{cursor:not-allowed;background-color:#eee}
textarea.form-control{height:auto}
.form-group{margin-bottom:15px}
@@ -1282,6 +1282,7 @@
.ui-table tbody{cursor:pointer}
.ui-table-plain tbody td{padding:5px 0px 5px 5px !important;border:none !important}
.ui-table-plain tbody{cursor:auto !important}
+.ui-table-form-info{clear:both !important}
.ui-table-form-separator{font-weight:bold;font-size:0.9em}
.ui-label{font-weight:bold}
.ui-message{padding:2px 2px 2px 10px}
@@ -1289,6 +1290,7 @@
.ui-button-icon{margin-right:5px}.ui-button-icon ul i{font-size:1.2em;margin-right:5px;position:relative;top:1px}
.ui-button-icon .button{margin-right:5px;margin-left:5px}
.ui-button-icon .title{position:relative;font-size:0.8em;font-weight:normal;top:-1px}
+.ui-textarea{height:100px !important}
.ui-tabs .ui-tabs-add{font-size:0.8em;margin-right:5px}
.ui-tabs .ui-tabs-delete{font-size:0.8em;margin-left:5px;cursor:pointer}
.no-highlight{-webkit-user-select:none;-moz-user-select:none;-khtml-user-select:none;-ms-user-select:none;}
@@ -1298,8 +1300,9 @@
.ui-portlet .no-scroll{height:calc(100% - 80px)}
.ui-popover{max-width:700px;display:none}.ui-popover .popover-close{position:absolute;right:10px;top:7px;font-size:1.2em;cursor:pointer}
.ui-popover .popover-title{padding:4px 10px}
-.ui-select{position:relative;height:27px;overflow:hidden;border:1px solid #bfbfbf;-moz-border-radius:3px;border-radius:3px}.ui-select .button{position:relative;width:25px;height:100%;float:right;border-left:1px solid #bfbfbf;padding-left:9px;padding-top:4px;background:#f2f2f2}
+.ui-select,.ui-select-multiple{position:relative;height:27px;overflow:hidden;border:1px solid #bfbfbf;-moz-border-radius:3px;border-radius:3px}.ui-select .button{position:relative;width:25px;height:100%;float:right;border-left:1px solid #bfbfbf;padding-left:9px;padding-top:4px;background:#f2f2f2}
.ui-select select{position:absolute;top:0px;height:100%;width:100%;padding-left:5px;cursor:pointer;background:transparent;border:0;border-radius:0;-webkit-appearance:none}
+.ui-select-multiple{height:100% !important;width:100% !important}
.libraryRow{background-color:#ebd9b2}
.datasetHighlighted{background-color:#f9f9f9}
.libraryItemDeleted-True{font-style:italic}
diff -r 19b109c4caa6b89e5c3e3e7a8f81e4fe1ef21746 -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 static/style/src/less/ui.less
--- a/static/style/src/less/ui.less
+++ b/static/style/src/less/ui.less
@@ -32,6 +32,7 @@
.ui-table-form-info {
&:extend(.toolParamHelp);
+ clear: both !important;
}
.ui-table-form-separator {
@@ -76,6 +77,11 @@
&:extend(.form-control);
}
+.ui-textarea {
+ &:extend(.form-control);
+ height: 100px !important;
+}
+
.ui-tabs {
.ui-tabs-add {
font-size : 0.8em;
@@ -203,4 +209,10 @@
border-radius: 0;
-webkit-appearance: none;
}
+}
+
+.ui-select-multiple {
+ &:extend(.ui-select);
+ height: 100% !important;
+ width: 100% !important;
}
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/commits/b50fbeda0365/
Changeset: b50fbeda0365
User: carlfeberhard
Date: 2014-09-04 16:33:41
Summary: merge central
Affected #: 1 file
diff -r 520eb21f9a1ef33eed6b4451bf6022e1750ff932 -r b50fbeda0365140ea4b91f332f52e56cc4b9d25a lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2466,6 +2466,7 @@
val = getattr( ldda.datatype, name )
rval['metadata_' + name] = val
return rval
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templatesThe get_contents
# param is passed by callers that are inheriting a template - these
@@ -2492,6 +2493,7 @@
else:
return template.get_widgets( trans.user )
return []
+
def templates_dict( self, use_name=False ):
"""
Returns a dict of template info
@@ -2510,6 +2512,7 @@
tmp_dict[ name ] = content.get( field[ 'name' ] )
template_data[template.name] = tmp_dict
return template_data
+
def templates_json( self, use_name=False ):
return json.dumps( self.templates_dict( use_name=use_name ) )
@@ -2533,6 +2536,7 @@
self.info = info
self.inheritable = inheritable
+
class LibraryFolderInfoAssociation( object ):
def __init__( self, folder, form_definition, info, inheritable=False ):
self.folder = folder
@@ -2540,15 +2544,18 @@
self.info = info
self.inheritable = inheritable
+
class LibraryDatasetDatasetInfoAssociation( object ):
def __init__( self, library_dataset_dataset_association, form_definition, info ):
# TODO: need to figure out if this should be inheritable to the associated LibraryDataset
self.library_dataset_dataset_association = library_dataset_dataset_association
self.template = form_definition
self.info = info
+
@property
def inheritable( self ):
- return True #always allow inheriting, used for replacement
+ return True # always allow inheriting, used for replacement
+
class ValidationError( object ):
def __init__( self, message=None, err_type=None, attributes=None ):
@@ -2556,42 +2563,47 @@
self.err_type = err_type
self.attributes = attributes
+
class DatasetToValidationErrorAssociation( object ):
def __init__( self, dataset, validation_error ):
self.dataset = dataset
self.validation_error = validation_error
+
class ImplicitlyConvertedDatasetAssociation( object ):
- def __init__( self, id = None, parent = None, dataset = None, file_type = None, deleted = False, purged = False, metadata_safe = True ):
+
+ def __init__( self, id=None, parent=None, dataset=None, file_type=None, deleted=False, purged=False, metadata_safe=True ):
self.id = id
if isinstance(dataset, HistoryDatasetAssociation):
self.dataset = dataset
elif isinstance(dataset, LibraryDatasetDatasetAssociation):
self.dataset_ldda = dataset
else:
- raise AttributeError, 'Unknown dataset type provided for dataset: %s' % type( dataset )
+ raise AttributeError( 'Unknown dataset type provided for dataset: %s' % type( dataset ) )
if isinstance(parent, HistoryDatasetAssociation):
self.parent_hda = parent
elif isinstance(parent, LibraryDatasetDatasetAssociation):
self.parent_ldda = parent
else:
- raise AttributeError, 'Unknown dataset type provided for parent: %s' % type( parent )
+ raise AttributeError( 'Unknown dataset type provided for parent: %s' % type( parent ) )
self.type = file_type
self.deleted = deleted
self.purged = purged
self.metadata_safe = metadata_safe
- def clear( self, purge = False, delete_dataset = True ):
+ def clear( self, purge=False, delete_dataset=True ):
self.deleted = True
if self.dataset:
if delete_dataset:
self.dataset.deleted = True
if purge:
self.dataset.purged = True
- if purge and self.dataset.deleted: #do something with purging
+ if purge and self.dataset.deleted: # do something with purging
self.purged = True
- try: os.unlink( self.file_name )
- except Exception, e: print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
+ try:
+ os.unlink( self.file_name )
+ except Exception, e:
+ print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
DEFAULT_COLLECTION_NAME = "Unnamed Collection"
@@ -2903,6 +2915,7 @@
self.tool_id = None
self.message = message
+
class GalaxySession( object ):
def __init__( self,
id=None,
@@ -2924,24 +2937,29 @@
self.is_valid = is_valid
self.prev_session_id = prev_session_id
self.histories = []
+
def add_history( self, history, association=None ):
if association is None:
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+
def get_disk_usage( self ):
if self.disk_usage is None:
return 0
return self.disk_usage
+
def set_disk_usage( self, bytes ):
self.disk_usage = bytes
total_disk_usage = property( get_disk_usage, set_disk_usage )
+
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
self.galaxy_session = galaxy_session
self.history = history
+
class UCI( object ):
def __init__( self ):
self.id = None
@@ -2962,14 +2980,14 @@
self.latest_workflow_id = None
self.workflows = []
- def copy_tags_from(self,target_user,source_workflow):
+ def copy_tags_from(self, target_user, source_workflow):
for src_swta in source_workflow.owner_tags:
new_swta = src_swta.copy()
new_swta.user = target_user
self.tags.append(new_swta)
- def to_dict( self, view='collection', value_mapper = None ):
- rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper = value_mapper )
+ def to_dict( self, view='collection', value_mapper=None ):
+ rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper=value_mapper )
tags_str_list = []
for tag in self.tags:
tag_str = tag.user_tname
@@ -3007,12 +3025,11 @@
return False
def to_dict( self, view='collection', value_mapper=None):
- rval = super( Workflow, self ).to_dict( view=view, value_mapper = value_mapper )
- rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
+ rval = super( Workflow, self ).to_dict( view=view, value_mapper=value_mapper )
+ rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
return rval
-
class WorkflowStep( object ):
def __init__( self ):
@@ -3061,7 +3078,7 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'workflow_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'workflow_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper )
if view == 'element':
steps = {}
@@ -3072,12 +3089,12 @@
inputs = {}
for step in self.steps:
- if step.workflow_step.type =='tool':
+ if step.workflow_step.type == 'tool':
for step_input in step.workflow_step.input_connections:
if step_input.output_step.type == 'data_input':
for job_input in step.job.input_datasets:
if job_input.name == step_input.input_name:
- inputs[str(step_input.output_step.order_index)] = { "id" : job_input.dataset_id, "src" : "hda"}
+ inputs[str(step_input.output_step.order_index)] = { "id": job_input.dataset_id, "src": "hda"}
rval['inputs'] = inputs
return rval
@@ -3086,19 +3103,20 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocationStep, self ).to_dict( view=view, value_mapper=value_mapper )
rval['order_index'] = self.workflow_step.order_index
return rval
class MetadataFile( object ):
- def __init__( self, dataset = None, name = None ):
+ def __init__( self, dataset=None, name=None ):
if isinstance( dataset, HistoryDatasetAssociation ):
self.history_dataset = dataset
elif isinstance( dataset, LibraryDatasetDatasetAssociation ):
self.library_dataset = dataset
self.name = name
+
@property
def file_name( self ):
assert self.id is not None, "ID must be set before filename used (commit the object)"
@@ -3129,14 +3147,15 @@
class FormDefinition( object, Dictifiable ):
# The following form_builder classes are supported by the FormDefinition class.
supported_field_types = [ AddressField, CheckboxField, PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- types = Bunch( REQUEST = 'Sequencing Request Form',
- SAMPLE = 'Sequencing Sample Form',
- EXTERNAL_SERVICE = 'External Service Information Form',
- RUN_DETAILS_TEMPLATE = 'Sample run details template',
- LIBRARY_INFO_TEMPLATE = 'Library information template',
- USER_INFO = 'User Information' )
+ types = Bunch( REQUEST='Sequencing Request Form',
+ SAMPLE='Sequencing Sample Form',
+ EXTERNAL_SERVICE='External Service Information Form',
+ RUN_DETAILS_TEMPLATE='Sample run details template',
+ LIBRARY_INFO_TEMPLATE='Library information template',
+ USER_INFO='User Information' )
dict_collection_visible_keys = ( 'id', 'name' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'form_definition_current_id', 'fields', 'layout' )
+
def __init__( self, name=None, desc=None, fields=[], form_definition_current=None, form_type=None, layout=None ):
self.name = name
self.desc = desc
@@ -3144,6 +3163,7 @@
self.form_definition_current = form_definition_current
self.type = form_type
self.layout = layout
+
def grid_fields( self, grid_index ):
# Returns a dictionary whose keys are integers corresponding to field positions
# on the grid and whose values are the field.
@@ -3152,6 +3172,7 @@
if str( f[ 'layout' ] ) == str( grid_index ):
gridfields[i] = f
return gridfields
+
def get_widgets( self, user, contents={}, **kwd ):
'''
Return the list of widgets that comprise a form definition,
@@ -3213,24 +3234,28 @@
field_widget.params = params
elif field_type == 'SelectField':
for option in field[ 'selectlist' ]:
+
if option == value:
field_widget.add_option( option, option, selected=True )
else:
field_widget.add_option( option, option )
elif field_type == 'CheckboxField':
+
field_widget.set_checked( value )
if field[ 'required' ] == 'required':
req = 'Required'
else:
req = 'Optional'
if field[ 'helptext' ]:
- helptext='%s (%s)' % ( field[ 'helptext' ], req )
+ helptext = '%s (%s)' % ( field[ 'helptext' ], req )
else:
helptext = '(%s)' % req
widgets.append( dict( label=field[ 'label' ],
+
widget=field_widget,
helptext=helptext ) )
return widgets
+
def field_as_html( self, field ):
"""Generates disabled html for a field"""
type = field[ 'type' ]
@@ -3245,21 +3270,25 @@
# Return None if unsupported field type
return None
+
class FormDefinitionCurrent( object ):
def __init__(self, form_definition=None):
self.latest_form = form_definition
+
class FormValues( object ):
def __init__(self, form_def=None, content=None):
self.form_definition = form_def
self.content = content
+
class Request( object, Dictifiable ):
- states = Bunch( NEW = 'New',
- SUBMITTED = 'In Progress',
- REJECTED = 'Rejected',
- COMPLETE = 'Complete' )
+ states = Bunch( NEW='New',
+ SUBMITTED='In Progress',
+ REJECTED='Rejected',
+ COMPLETE='Complete' )
dict_collection_visible_keys = ( 'id', 'name', 'state' )
+
def __init__( self, name=None, desc=None, request_type=None, user=None, form_values=None, notification=None ):
self.name = name
self.desc = desc
@@ -3268,17 +3297,20 @@
self.user = user
self.notification = notification
self.samples_list = []
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def samples_have_common_state( self ):
"""
@@ -3294,6 +3326,7 @@
if s.state.id != state_for_comparison.id:
return False
return state_for_comparison
+
@property
def last_comment( self ):
latest_event = self.latest_event
@@ -3302,26 +3335,34 @@
return latest_event.comment
return ''
return 'No comment'
+
def get_sample( self, sample_name ):
for sample in self.samples:
if sample.name == sample_name:
return sample
return None
+
@property
def is_unsubmitted( self ):
return self.state in [ self.states.REJECTED, self.states.NEW ]
+
@property
def is_rejected( self ):
return self.state == self.states.REJECTED
+
@property
def is_submitted( self ):
return self.state == self.states.SUBMITTED
+
@property
def is_new( self ):
+
return self.state == self.states.NEW
+
@property
def is_complete( self ):
return self.state == self.states.COMPLETE
+
@property
def samples_without_library_destinations( self ):
# Return all samples that are not associated with a library
@@ -3330,6 +3371,7 @@
if not sample.library:
samples.append( sample )
return samples
+
@property
def samples_with_bar_code( self ):
# Return all samples that have associated bar code
@@ -3338,6 +3380,7 @@
if sample.bar_code:
samples.append( sample )
return samples
+
def send_email_notification( self, trans, common_state, final_state=False ):
# Check if an email notification is configured to be sent when the samples
# are in this state
@@ -3390,7 +3433,7 @@
try:
send_mail( frm, to, subject, body, trans.app.config )
comments = "Email notification sent to %s." % ", ".join( to ).strip().strip( ',' )
- except Exception,e:
+ except Exception, e:
comments = "Email notification failed. (%s)" % str(e)
# update the request history with the email notification event
elif not trans.app.config.smtp_server:
@@ -3401,16 +3444,19 @@
trans.sa_session.flush()
return comments
+
class RequestEvent( object ):
def __init__(self, request=None, request_state=None, comment=''):
self.request = request
self.state = request_state
self.comment = comment
+
class ExternalService( object ):
- data_transfer_protocol = Bunch( HTTP = 'http',
- HTTPS = 'https',
- SCP = 'scp' )
+ data_transfer_protocol = Bunch( HTTP='http',
+ HTTPS='https',
+ SCP='scp' )
+
def __init__( self, name=None, description=None, external_service_type_id=None, version=None, form_definition_id=None, form_values_id=None, deleted=None ):
self.name = name
self.description = description
@@ -3419,9 +3465,11 @@
self.form_definition_id = form_definition_id
self.form_values_id = form_values_id
self.deleted = deleted
- self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+ self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+
def get_external_service_type( self, trans ):
return trans.app.external_service_types.all_external_service_types[ self.external_service_type_id ]
+
def load_data_transfer_settings( self, trans ):
trans.app.external_service_types.reload( self.external_service_type_id )
self.data_transfer = {}
@@ -3442,33 +3490,39 @@
automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
http_configs[ 'automatic_transfer' ] = galaxy.util.string_as_bool( automatic_transfer )
self.data_transfer[ self.data_transfer_protocol.HTTP ] = http_configs
+
def populate_actions( self, trans, item, param_dict=None ):
return self.get_external_service_type( trans ).actions.populate( self, item, param_dict=param_dict )
+
class RequestType( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name', 'desc' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'request_form_id', 'sample_form_id' )
- rename_dataset_options = Bunch( NO = 'Do not rename',
- SAMPLE_NAME = 'Preprend sample name',
- EXPERIMENT_NAME = 'Prepend experiment name',
- EXPERIMENT_AND_SAMPLE_NAME = 'Prepend experiment and sample name')
+ rename_dataset_options = Bunch( NO='Do not rename',
+ SAMPLE_NAME='Preprend sample name',
+ EXPERIMENT_NAME='Prepend experiment name',
+ EXPERIMENT_AND_SAMPLE_NAME='Prepend experiment and sample name')
permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' )
+
def __init__( self, name=None, desc=None, request_form=None, sample_form=None ):
self.name = name
self.desc = desc
self.request_form = request_form
self.sample_form = sample_form
+
@property
def external_services( self ):
external_services = []
for rtesa in self.external_service_associations:
external_services.append( rtesa.external_service )
return external_services
+
def get_external_service( self, external_service_type_id ):
for rtesa in self.external_service_associations:
if rtesa.external_service.external_service_type_id == external_service_type_id:
return rtesa.external_service
return None
+
def get_external_services_for_manual_data_transfer( self, trans ):
'''Returns all external services that use manual data transfer'''
external_services = []
@@ -3481,6 +3535,7 @@
if not transfer_type_settings[ 'automatic_transfer' ]:
external_services.append( external_service )
return external_services
+
def delete_external_service_associations( self, trans ):
'''Deletes all external service associations.'''
flush_needed = False
@@ -3489,20 +3544,24 @@
flush_needed = True
if flush_needed:
trans.sa_session.flush()
+
def add_external_service_association( self, trans, external_service ):
rtesa = trans.model.RequestTypeExternalServiceAssociation( self, external_service )
trans.sa_session.add( rtesa )
trans.sa_session.flush()
+
@property
def final_sample_state( self ):
# The states mapper for this object orders ascending
return self.states[-1]
+
@property
def run_details( self ):
if self.run:
# self.run[0] is [RequestTypeRunAssociation]
return self.run[0]
return None
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templates. The get_contents param
# is passed by callers that are inheriting a template - these are
@@ -3520,23 +3579,27 @@
return template.get_widgets( trans.user )
return []
+
class RequestTypeExternalServiceAssociation( object ):
def __init__( self, request_type, external_service ):
self.request_type = request_type
self.external_service = external_service
+
class RequestTypePermissions( object ):
def __init__( self, action, request_type, role ):
self.action = action
self.request_type = request_type
self.role = role
+
class Sample( object, Dictifiable ):
# The following form_builder classes are supported by the Sample class.
supported_field_types = [ CheckboxField, SelectField, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- bulk_operations = Bunch( CHANGE_STATE = 'Change state',
- SELECT_LIBRARY = 'Select data library and folder' )
+ bulk_operations = Bunch( CHANGE_STATE='Change state',
+ SELECT_LIBRARY='Select data library and folder' )
dict_collection_visible_keys = ( 'id', 'name' )
+
def __init__(self, name=None, desc=None, request=None, form_values=None, bar_code=None, library=None, folder=None, workflow=None, history=None):
self.name = name
self.desc = desc
@@ -3547,17 +3610,20 @@
self.folder = folder
self.history = history
self.workflow = workflow
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def adding_to_library_dataset_files( self ):
adding_to_library_datasets = []
@@ -3565,6 +3631,7 @@
if dataset.status == SampleDataset.transfer_status.ADD_TO_LIBRARY:
adding_to_library_datasets.append( dataset )
return adding_to_library_datasets
+
@property
def inprogress_dataset_files( self ):
inprogress_datasets = []
@@ -3572,6 +3639,7 @@
if dataset.status not in [ SampleDataset.transfer_status.NOT_STARTED, SampleDataset.transfer_status.COMPLETE ]:
inprogress_datasets.append( dataset )
return inprogress_datasets
+
@property
def queued_dataset_files( self ):
queued_datasets = []
@@ -3579,6 +3647,7 @@
if dataset.status == SampleDataset.transfer_status.IN_QUEUE:
queued_datasets.append( dataset )
return queued_datasets
+
@property
def transfer_error_dataset_files( self ):
transfer_error_datasets = []
@@ -3586,6 +3655,7 @@
if dataset.status == SampleDataset.transfer_status.ERROR:
transfer_error_datasets.append( dataset )
return transfer_error_datasets
+
@property
def transferred_dataset_files( self ):
transferred_datasets = []
@@ -3593,6 +3663,7 @@
if dataset.status == SampleDataset.transfer_status.COMPLETE:
transferred_datasets.append( dataset )
return transferred_datasets
+
@property
def transferring_dataset_files( self ):
transferring_datasets = []
@@ -3600,6 +3671,7 @@
if dataset.status == SampleDataset.transfer_status.TRANSFERRING:
transferring_datasets.append( dataset )
return transferring_datasets
+
@property
def untransferred_dataset_files( self ):
untransferred_datasets = []
@@ -3607,6 +3679,7 @@
if dataset.status != SampleDataset.transfer_status.COMPLETE:
untransferred_datasets.append( dataset )
return untransferred_datasets
+
def get_untransferred_dataset_size( self, filepath, scp_configs ):
def print_ticks( d ):
pass
https://bitbucket.org/galaxy/galaxy-central/commits/cebadec812f5/
Changeset: cebadec812f5
User: carlfeberhard
Date: 2014-09-04 17:06:08
Summary: merge central
Affected #: 2 files
diff -r b50fbeda0365140ea4b91f332f52e56cc4b9d25a -r cebadec812f5b1ca10abdebdb60456796e061c56 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -98,6 +98,21 @@
return False
+def is_uuid( value ):
+ """
+ This method returns True if value is a UUID, otherwise False.
+ >>> is_uuid( "123e4567-e89b-12d3-a456-426655440000" )
+ True
+ >>> is_uuid( "0x3242340298902834" )
+ False
+ """
+ uuid_re = re.compile( "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" )
+ if re.match( uuid_re, str( value ) ):
+ return True
+ else:
+ return False
+
+
def get_charset_from_http_headers( headers, default=None ):
rval = headers.get('content-type', None )
if rval and 'charset=' in rval:
diff -r b50fbeda0365140ea4b91f332f52e56cc4b9d25a -r cebadec812f5b1ca10abdebdb60456796e061c56 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -433,24 +433,19 @@
return stored_workflow
def __get_stored_workflow( self, trans, workflow_id ):
- try:
+ if util.is_uuid(workflow_id):
+ # see if they have passed in the UUID for a workflow that is attached to a stored workflow
+ workflow_uuid = uuid.UUID(workflow_id)
+ stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
+ trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
+ trans.app.model.Workflow.uuid == workflow_uuid
+ )).first()
+ if stored_workflow is None:
+ raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
+ else:
workflow_id = self.__decode_id( trans, workflow_id )
query = trans.sa_session.query( trans.app.model.StoredWorkflow )
stored_workflow = query.get( workflow_id )
- except Exception:
- try:
- #see if they have passed in the UUID for a workflow that is attached to a stored workflow
- workflow_uuid = uuid.UUID(workflow_id)
- stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
- trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
- trans.app.model.Workflow.uuid == workflow_uuid
- )).first()
- if stored_workflow is None:
- raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
- return stored_workflow
- except:
- pass #let the outer raise exception happen
- raise exceptions.ObjectNotFound( "No such workflow found - invalid workflow identifier." )
if stored_workflow is None:
raise exceptions.ObjectNotFound( "No such workflow found." )
return stored_workflow
https://bitbucket.org/galaxy/galaxy-central/commits/404c21fa0de7/
Changeset: 404c21fa0de7
User: carlfeberhard
Date: 2014-09-04 17:06:39
Summary: Fix doc
Affected #: 1 file
diff -r cebadec812f5b1ca10abdebdb60456796e061c56 -r 404c21fa0de76e20deba7b63eebc15cca617173c lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -55,6 +55,8 @@
class WebApplication( base.WebApplication ):
"""
+ Base WSGI application instantiated for all Galaxy webapps.
+
A web application that:
* adds API and UI controllers by scanning given directories and
importing all modules found there.
https://bitbucket.org/galaxy/galaxy-central/commits/e6546f41d7b2/
Changeset: e6546f41d7b2
User: carlfeberhard
Date: 2014-09-04 22:28:53
Summary: merge central
Affected #: 28 files
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -312,4 +312,22 @@
<sniffer type="galaxy.datatypes.assembly:Amos"/>
--></sniffers>
+ <build_sites>
+ <!--
+ Build sites define the builds (dbkeys) available at sites used by display
+ applications and the URL to those sites.
+
+ The `display` attributes on the `ucsc` and `gbrowse` sites replace the
+ `ucsc_display_sites` and `gbrowse_display_sites` options in
+ universe_wsgi.ini. Because these are used by "old-style" display
+ applications, their types cannot change if you want the old-style display
+ links for these sites to work.
+ -->
+ <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/>
+ <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/>
+ <site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/>
+ <site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/>
+ <site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
+ <site type="rviewer" file="tool-data/shared/rviewer/rviewer_build_sites.txt"/>
+ </build_sites></datatypes>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ensembl/ensembl_bam.xml
--- a/display_applications/ensembl/ensembl_bam.xml
+++ b/display_applications/ensembl/ensembl_bam.xml
@@ -1,7 +1,7 @@
<display id="ensembl_bam" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ensembl/ensembl_gff.xml
--- a/display_applications/ensembl/ensembl_gff.xml
+++ b/display_applications/ensembl/ensembl_gff.xml
@@ -1,7 +1,7 @@
<display id="ensembl_gff" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ensembl/ensembl_interval_as_bed.xml
--- a/display_applications/ensembl/ensembl_interval_as_bed.xml
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml
@@ -1,7 +1,7 @@
<display id="ensembl_interval" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/gbrowse/gbrowse_gff.xml
--- a/display_applications/gbrowse/gbrowse_gff.xml
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_gff" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/gbrowse/gbrowse_interval_as_bed.xml
--- a/display_applications/gbrowse/gbrowse_interval_as_bed.xml
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_interval_as_bed" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/gbrowse/gbrowse_wig.xml
--- a/display_applications/gbrowse/gbrowse_wig.xml
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_wig" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/igv/bam.xml
--- a/display_applications/igv/bam.xml
+++ b/display_applications/igv/bam.xml
@@ -2,7 +2,7 @@
<display id="igv_bam" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/igv/vcf.xml
--- a/display_applications/igv/vcf.xml
+++ b/display_applications/igv/vcf.xml
@@ -2,7 +2,7 @@
<display id="igv_vcf" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/rviewer/bed.xml
--- a/display_applications/rviewer/bed.xml
+++ b/display_applications/rviewer/bed.xml
@@ -2,7 +2,7 @@
<display id="rviewer_interval" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/rviewer/vcf.xml
--- a/display_applications/rviewer/vcf.xml
+++ b/display_applications/rviewer/vcf.xml
@@ -2,7 +2,7 @@
<display id="rviewer_vcf" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml
+++ b/display_applications/ucsc/bam.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bam" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/bigbed.xml
--- a/display_applications/ucsc/bigbed.xml
+++ b/display_applications/ucsc/bigbed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigbed" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/bigwig.xml
--- a/display_applications/ucsc/bigwig.xml
+++ b/display_applications/ucsc/bigwig.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigwig" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/maf_customtrack.xml
--- a/display_applications/ucsc/maf_customtrack.xml
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -1,6 +1,6 @@
<display id="ucsc_maf_customtrack" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 display_applications/ucsc/vcf.xml
--- a/display_applications/ucsc/vcf.xml
+++ b/display_applications/ucsc/vcf.xml
@@ -1,6 +1,6 @@
<display id="ucsc_vcf" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -64,9 +64,6 @@
# Load dbkey / genome build manager
self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
- # Load build sites (old-style)
- self._configure_build_sites()
-
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -16,7 +16,6 @@
from galaxy.util import string_as_bool
from galaxy.util import listify
from galaxy.util.dbkeys import GenomeBuilds
-from galaxy.util.build_sites import BuildSites
from galaxy import eggs
log = logging.getLogger( __name__ )
@@ -224,10 +223,6 @@
self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.ucsc_build_sites = resolve_path( kwargs.get( 'ucsc_build_sites', os.path.join( self.tool_data_path, 'shared', 'ucsc', 'ucsc_build_sites.txt') ), self.root )
- self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
- self.gbrowse_build_sites = resolve_path( kwargs.get( 'gbrowse_build_sites', os.path.join( self.tool_data_path, 'shared', 'gbrowse', 'gbrowse_build_sites.txt') ), self.root )
- self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
# Configuration for the message box directly below the masthead.
@@ -575,9 +570,6 @@
def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
- def _configure_build_sites( self ):
- self.build_sites = BuildSites( self )
-
def _configure_toolbox( self ):
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -81,9 +81,13 @@
class DynamicDisplayApplicationBuilder( object ):
@classmethod
- def __init__( self, elem, display_application ):
+ def __init__( self, elem, display_application, build_sites ):
rval = []
- filename = elem.get( 'from_file', None )
+ filename = None
+ if elem.get( 'site_type', None ) is not None:
+ filename = build_sites.get( elem.get( 'site_type' ) )
+ else:
+ filename = elem.get( 'from_file', None )
assert filename is not None, 'Filename and id attributes required for dynamic_links'
skip_startswith = elem.get( 'skip_startswith', None )
separator = elem.get( 'separator', '\t' )
@@ -176,7 +180,7 @@
if link:
rval.links[ link.id ] = link
for dynamic_links in elem.findall( 'dynamic_links' ):
- for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+ for link in DynamicDisplayApplicationBuilder( dynamic_links, rval, datatypes_registry.build_sites ):
rval.links[ link.id ] = link
return rval
def __init__( self, display_id, name, datatypes_registry, version = None ):
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -83,29 +83,31 @@
ret_val = []
ggtail = 'hgGenome_doSubmitUpload=submit'
if not dataset.dbkey:
- dataset.dbkey = 'hg18' # punt!
+ dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
- site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- internal_url = "%s" % url_for( controller='dataset',
- dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
- display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
- display_url = urllib.quote_plus( display_url )
- # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
- #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
- sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
- #sl.append("&hgt.customText=%s")
- sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
- sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
- sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
- sl.append("&hgGenome_doSubmitUpload=submit")
- sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
- s = ''.join(sl)
- s = urllib.quote_plus(s)
- redirect_url = s
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+ if site_name in datatypes_registry.get_display_sites('ucsc'):
+ site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+ internal_url = "%s" % url_for( controller='dataset',
+ dataset_id=dataset.id,
+ action='display_at',
+ filename='ucsc_' + site_name )
+ display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+ display_url = urllib.quote_plus( display_url )
+ # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+ sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+ #sl.append("&hgt.customText=%s")
+ sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+ sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+ sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+ sl.append("&hgGenome_doSubmitUpload=submit")
+ sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+ s = ''.join(sl)
+ s = urllib.quote_plus(s)
+ redirect_url = s
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( (site_name, link) )
return ret_val
def make_html_table( self, dataset, skipchars=[] ):
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -234,8 +234,8 @@
# Filter UCSC sites to only those that are supported by this build and
# enabled.
valid_sites = [ ( name, url )
- for name, url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey )
- if name in app.config.ucsc_display_sites ]
+ for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
+ if name in app.datatypes_registry.get_display_sites('ucsc') ]
if not valid_sites:
return []
# If there are any valid sites, we need to generate the estimated
@@ -750,8 +750,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
( site_url, dataset.dbkey, seqid, start, stop ) )
@@ -762,8 +762,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if seqid.startswith( 'chr' ) and len ( seqid ) > 3:
seqid = seqid[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, seqid, start, stop ) )
@@ -1091,8 +1091,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
chrom = chrom[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
@@ -1103,8 +1103,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
ret_val.append( ( site_name, link ) )
@@ -1285,8 +1285,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc',dataset.dbkey):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -69,6 +69,10 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
+ # Build sites
+ self.build_sites = {}
+ self.display_sites = {}
+ self.legacy_build_sites = {}
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
@@ -285,6 +289,8 @@
handling_proprietary_datatypes=handling_proprietary_datatypes,
override=override )
self.upload_file_formats.sort()
+ # Load build sites
+ self.load_build_sites( root )
# Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
# set_metadata processing.
self.to_xml_file()
@@ -303,6 +309,34 @@
self.sniff_order.append( datatype )
append_to_sniff_order()
+ def load_build_sites( self, root ):
+ if root.find( 'build_sites' ):
+ for elem in root.find( 'build_sites' ).findall( 'site' ):
+ if not (elem.get( 'type' ) and elem.get( 'file' )):
+ self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
+ else:
+ site_type = elem.get( 'type' )
+ file = elem.get( 'file' )
+ self.build_sites[site_type] = file
+ if site_type in ('ucsc', 'gbrowse'):
+ self.legacy_build_sites[site_type] = galaxy.util.read_build_sites( file )
+ if elem.get( 'display', None ):
+ display = elem.get( 'display' )
+ self.display_sites[site_type] = [ x.strip() for x in display.lower().split( ',' ) ]
+ self.log.debug( "Loaded build site '%s': %s with display sites: %s", site_type, file, display )
+ else:
+ self.log.debug( "Loaded build site '%s': %s", site_type, file )
+
+ def get_legacy_sites_by_build( self, site_type, build ):
+ sites = []
+ for site in self.legacy_build_sites.get(site_type, []):
+ if build in site['builds']:
+ sites.append((site['name'], site['url']))
+ return sites
+
+ def get_display_sites( self, site_type ):
+ return self.display_sites.get( site_type, [] )
+
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
"""
Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -844,6 +844,30 @@
return db_names
+def read_build_sites( filename, check_builds=True ):
+ """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
+ build_sites = []
+ try:
+ for line in open(filename):
+ try:
+ if line[0:1] == "#":
+ continue
+ fields = line.replace("\r", "").replace("\n", "").split("\t")
+ site_name = fields[0]
+ site = fields[1]
+ if check_builds:
+ site_builds = fields[2].split(",")
+ site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
+ else:
+ site_dict = {'name': site_name, 'url': site}
+ build_sites.append( site_dict )
+ except:
+ continue
+ except:
+ print "ERROR: Unable to read builds for site file %s" % filename
+ return build_sites
+
+
def relativize_symlinks( path, start=None, followlinks=False):
for root, dirs, files in os.walk( path, followlinks=followlinks ):
rel_start = None
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/util/build_sites.py
--- a/lib/galaxy/util/build_sites.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Functionality for dealing with build sites for legacy display applications.
-"""
-import os.path
-
-
-class BuildSites( object ):
-
- def __init__( self, app ):
- self._app = app
- self._build_sites = {}
- self.load_build_sites()
-
- def read_build_sites( self, filename, check_builds=True ):
- """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
- build_sites = []
- try:
- for line in open(filename):
- try:
- if line[0:1] == "#":
- continue
- fields = line.replace("\r", "").replace("\n", "").split("\t")
- site_name = fields[0]
- site = fields[1]
- if check_builds:
- site_builds = fields[2].split(",")
- site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
- else:
- site_dict = {'name': site_name, 'url': site}
- build_sites.append( site_dict )
- except:
- continue
- except:
- print "ERROR: Unable to read builds for site file %s" % filename
- return build_sites
-
- def load_build_sites( self ):
- self._build_sites['ucsc'] = self.read_build_sites( self._app.config.ucsc_build_sites )
- self._build_sites['gbrowse'] = self.read_build_sites( self._app.config.gbrowse_build_sites )
-
- def _get_site_by_build( self, site_type, build ):
- sites = []
- for site in self._build_sites[site_type]:
- if build in site['builds']:
- sites.append((site['name'], site['url']))
- return sites
-
- def get_ucsc_sites_by_build( self, build ):
- return self._get_site_by_build( 'ucsc', build )
-
- def get_gbrowse_sites_by_build( self, build ):
- return self._get_site_by_build( 'gbrowse', build )
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 lib/galaxy/web/framework/webapp.py
--- a/lib/galaxy/web/framework/webapp.py
+++ b/lib/galaxy/web/framework/webapp.py
@@ -429,7 +429,7 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
try:
host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
diff -r 404c21fa0de76e20deba7b63eebc15cca617173c -r e6546f41d7b2a11dbf26246662a55c6e86495269 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -364,14 +364,6 @@
# which browsers should be available. URLs and builds available at these
# browsers are defined in the specifield files.
-# UCSC browsers:
-#ucsc_build_sites = tool-data/shared/ucsc/ucsc_build_sites.txt
-#ucsc_display_sites = main,test,archaea,ucla
-
-# GBrowse servers:
-#gbrowse_build_sites = tool-data/shared/gbrowse/gbrowse_build_sites.txt
-#gbrowse_display_sites = modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
-
# If use_remote_user = True, display application servers will be denied access
# to Galaxy and so displaying datasets in these sites will fail.
# display_servers contains a list of hostnames which should be allowed to
https://bitbucket.org/galaxy/galaxy-central/commits/83e45a9e4cc9/
Changeset: 83e45a9e4cc9
User: dannon
Date: 2014-09-04 22:30:50
Summary: Merged in carlfeberhard/galaxy-central-fork0 (pull request #484)
Break up web/framework/__init__ into more focused modules
Affected #: 20 files
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -6,7 +6,6 @@
import copy
import cPickle
import json
-import logging
import os
import shutil
import sys
@@ -15,13 +14,19 @@
from os.path import abspath
-import galaxy.model
-from galaxy.util import listify, stringify_dictionary_keys, string_as_bool
-from galaxy.util.odict import odict
-from galaxy.util import in_directory
-from galaxy.web import form_builder
+from galaxy import eggs
+eggs.require( "SQLAlchemy >= 0.4" )
from sqlalchemy.orm import object_session
+import galaxy.model
+from galaxy.util import listify
+from galaxy.util import stringify_dictionary_keys
+from galaxy.util import string_as_bool
+from galaxy.util import in_directory
+from galaxy.util.odict import odict
+from galaxy.web import form_builder
+
+import logging
log = logging.getLogger(__name__)
STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/managers/context.py
--- /dev/null
+++ b/lib/galaxy/managers/context.py
@@ -0,0 +1,179 @@
+"""
+Mixins for transaction-like objects.
+"""
+
+import os
+
+from galaxy.util.json import to_json_string
+from galaxy.util import bunch
+
+class ProvidesAppContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ database and event handling.
+
+ Mixed in class must provide `app` property.
+ """
+
+ def log_action( self, user=None, action=None, context=None, params=None):
+ """
+ Application-level logging of user actions.
+ """
+ if self.app.config.log_actions:
+ action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
+ try:
+ if user:
+ action.user = user
+ else:
+ action.user = self.user
+ except:
+ action.user = None
+ try:
+ action.session_id = self.galaxy_session.id
+ except:
+ action.session_id = None
+ self.sa_session.add( action )
+ self.sa_session.flush()
+
+ def log_event( self, message, tool_id=None, **kwargs ):
+ """
+ Application level logging. Still needs fleshing out (log levels and such)
+ Logging events is a config setting - if False, do not log.
+ """
+ if self.app.config.log_events:
+ event = self.app.model.Event()
+ event.tool_id = tool_id
+ try:
+ event.message = message % kwargs
+ except:
+ event.message = message
+ try:
+ event.history = self.get_history()
+ except:
+ event.history = None
+ try:
+ event.history_id = self.history.id
+ except:
+ event.history_id = None
+ try:
+ event.user = self.user
+ except:
+ event.user = None
+ try:
+ event.session_id = self.galaxy_session.id
+ except:
+ event.session_id = None
+ self.sa_session.add( event )
+ self.sa_session.flush()
+
+ @property
+ def sa_session( self ):
+ """
+ Returns a SQLAlchemy session -- currently just gets the current
+ session from the threadlocal session context, but this is provided
+ to allow migration toward a more SQLAlchemy 0.4 style of use.
+ """
+ return self.app.model.context.current
+
+ def expunge_all( self ):
+ app = self.app
+ context = app.model.context
+ context.expunge_all()
+ # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
+ if hasattr(app, 'install_model'):
+ install_model = app.install_model
+ if install_model != app.model:
+ install_model.context.expunge_all()
+
+ def get_toolbox(self):
+ """Returns the application toolbox"""
+ return self.app.toolbox
+
+ @property
+ def model( self ):
+ return self.app.model
+
+ @property
+ def install_model( self ):
+ return self.app.install_model
+
+ def request_types(self):
+ if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
+ return True
+ return False
+
+
+class ProvidesUserContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about users.
+
+ Mixed in class must provide `user`, `api_inherit_admin`, and `app`
+ properties.
+ """
+
+ @property
+ def anonymous( self ):
+ return self.user is None and not self.api_inherit_admin
+
+ def get_current_user_roles( self ):
+ user = self.user
+ if user:
+ roles = user.all_roles()
+ else:
+ roles = []
+ return roles
+
+ def user_is_admin( self ):
+ if self.api_inherit_admin:
+ return True
+ return self.user and self.user.email in self.app.config.admin_users_list
+
+ def user_can_do_run_as( self ):
+ run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
+ if not run_as_users:
+ return False
+ user_in_run_as_users = self.user and self.user.email in run_as_users
+ # Can do if explicitly in list or master_api_key supplied.
+ can_do_run_as = user_in_run_as_users or self.api_inherit_admin
+ return can_do_run_as
+
+ @property
+ def user_ftp_dir( self ):
+ identifier = self.app.config.ftp_upload_dir_identifier
+ return os.path.join( self.app.config.ftp_upload_dir, getattr( self.user, identifier ) )
+
+
+class ProvidesHistoryContext( object ):
+ """ For transaction-like objects to provide Galaxy convience layer for
+ reasoning about histories.
+
+ Mixed in class must provide `user`, `history`, and `app`
+ properties.
+ """
+
+ def db_dataset_for( self, dbkey ):
+ """
+ Returns the db_file dataset associated/needed by `dataset`, or `None`.
+ """
+ # If no history, return None.
+ if self.history is None:
+ return None
+#TODO: when does this happen? is it Bunch or util.bunch.Bunch?
+ if isinstance( self.history, bunch.Bunch ):
+ # The API presents a Bunch for a history. Until the API is
+ # more fully featured for handling this, also return None.
+ return None
+ datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
+ .filter_by( deleted=False, history_id=self.history.id, extension="len" )
+ for ds in datasets:
+ if dbkey == ds.dbkey:
+ return ds
+ return None
+
+ @property
+ def db_builds( self ):
+ """
+ Returns the builds defined by galaxy and the builds defined by
+ the user (chromInfo in history).
+ """
+ # FIXME: This method should be removed
+ return self.app.genome_builds.get_genome_build_names( trans=self )
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -1,23 +1,25 @@
"""
The Galaxy web application framework
"""
-from framework import expose
-from framework import json
-from framework import json_pretty
-from framework import require_login
-from framework import require_admin
from framework import url_for
-from framework import error
-from framework import form
-from framework import FormBuilder
-from framework import expose_api
-from framework import expose_api_anonymous
-from framework import expose_api_raw
-from framework import expose_api_raw_anonymous
-from framework.base import httpexceptions
+from framework.decorators import error
+from framework.decorators import expose
+from framework.decorators import json
+from framework.decorators import json_pretty
+from framework.decorators import require_login
+from framework.decorators import require_admin
+from framework.decorators import expose_api
+from framework.decorators import expose_api_anonymous
+from framework.decorators import expose_api_raw
+from framework.decorators import expose_api_raw_anonymous
# TODO: Drop and make these the default.
-from framework import _future_expose_api
-from framework import _future_expose_api_anonymous
-from framework import _future_expose_api_raw
-from framework import _future_expose_api_raw_anonymous
+from framework.decorators import _future_expose_api
+from framework.decorators import _future_expose_api_anonymous
+from framework.decorators import _future_expose_api_raw
+from framework.decorators import _future_expose_api_raw_anonymous
+
+from framework.formbuilder import form
+from framework.formbuilder import FormBuilder
+
+from framework.base import httpexceptions
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/base/pluginframework.py
--- a/lib/galaxy/web/base/pluginframework.py
+++ b/lib/galaxy/web/base/pluginframework.py
@@ -579,7 +579,7 @@
"""
Pass control over to trans and render ``template_filename``.
- :type trans: ``galaxy.web.framework.GalaxyWebTransaction``
+ :type trans: ``galaxy.web.framework.webapp.GalaxyWebTransaction``
:param trans: transaction doing the rendering
:type plugin: ``util.bunch.Bunch``
:param plugin: the plugin containing the template to render
diff -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f -r 83e45a9e4cc994daed44e406a334660d9db7ed30 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -2,1395 +2,9 @@
Galaxy web application framework
"""
-import hashlib
-import inspect
-import os
-import random
-import socket
-import string
-import time
-from traceback import format_exc
-from Cookie import CookieError
-from functools import wraps
-
from galaxy import eggs
-
-eggs.require( "Cheetah" )
-from Cheetah.Template import Template
-
-from galaxy import util
-from galaxy.exceptions import error_codes
-from galaxy.exceptions import MessageException
-from galaxy.util import asbool
-from galaxy.util import safe_str_cmp
-from galaxy.util.backports.importlib import import_module
-from galaxy.util.json import from_json_string, to_json_string
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.web.framework import base, helpers
-
-import paste.httpexceptions
-
-eggs.require( "Mako" )
-import mako.template
-import mako.lookup
-import mako.runtime
-
-eggs.require( "pytz" ) # Used by Babel.
-eggs.require( "Babel" )
-from babel.support import Translations
-from babel import Locale
-
-eggs.require( "SQLAlchemy >= 0.4" )
-from sqlalchemy import and_
-from sqlalchemy.orm.exc import NoResultFound
-
eggs.require( "pexpect" )
eggs.require( "amqp" )
-import logging
-log = logging.getLogger( __name__ )
-
+import base
url_for = base.routes.url_for
-
-UCSC_SERVERS = (
- 'hgw1.cse.ucsc.edu',
- 'hgw2.cse.ucsc.edu',
- 'hgw3.cse.ucsc.edu',
- 'hgw4.cse.ucsc.edu',
- 'hgw5.cse.ucsc.edu',
- 'hgw6.cse.ucsc.edu',
- 'hgw7.cse.ucsc.edu',
- 'hgw8.cse.ucsc.edu',
-)
-
-JSON_CONTENT_TYPE = "application/json"
-
-
-# ----------------------------------------------------------------------------- web controller decorators
-def expose( func ):
- """
- Decorator: mark a function as 'exposed' and thus web accessible
- """
- func.exposed = True
- return func
-
-def json( func, **json_kwargs ):
- """
- Format the response as JSON and set the response content type to
- JSON_CONTENT_TYPE.
- """
- @wraps(func)
- def call_and_format( self, trans, *args, **kwargs ):
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- return to_json_string( func( self, trans, *args, **kwargs ), **json_kwargs )
- if not hasattr(func, '_orig'):
- call_and_format._orig = func
- call_and_format.exposed = True
- return call_and_format
-
-def json_pretty( func ):
- """
- Indent and sort returned JSON.
- """
- return json( func, indent=4, sort_keys=True )
-
-def require_login( verb="perform this action", use_panels=False, webapp='galaxy' ):
- def argcatcher( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.get_user():
- return func( self, trans, *args, **kwargs )
- else:
- return trans.show_error_message(
- 'You must be <a target="galaxy_main" href="%s">logged in</a> to %s.'
- % ( url_for( controller='user', action='login', webapp=webapp ), verb ), use_panels=use_panels )
- return decorator
- return argcatcher
-
-def require_admin( func ):
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if not trans.user_is_admin():
- msg = "You must be an administrator to access this feature."
- user = trans.get_user()
- if not trans.app.config.admin_users_list:
- msg = "You must be logged in as an administrator to access this feature, but no administrators are set in the Galaxy configuration."
- elif not user:
- msg = "You must be logged in as an administrator to access this feature."
- trans.response.status = 403
- if trans.response.get_content_type() == 'application/json':
- return msg
- else:
- return trans.show_error_message( msg )
- return func( self, trans, *args, **kwargs )
- return decorator
-
-
-# ----------------------------------------------------------------------------- (original) api decorators
-def expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- def error( environ, start_response ):
- start_response( error_status, [('Content-type', 'text/plain')] )
- return error_message
- error_status = '403 Forbidden'
- if trans.error_message:
- return trans.error_message
- if user_required and trans.anonymous:
- error_message = "API Authentication Required for this request"
- return error
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_status = '400 Bad Request'
- error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
- return error
- trans.response.set_content_type( "application/json" )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_message = 'User does not have permissions to run jobs as another user'
- return error
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- trans.response.status = 400
- return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- trans.response.status = 400
- return "That user does not exist."
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except paste.httpexceptions.HTTPException:
- raise # handled
- except:
- log.exception( 'Uncaught exception in exposed API method:' )
- raise paste.httpexceptions.HTTPServerError()
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __extract_payload_from_request(trans, func, kwargs):
- content_type = trans.request.headers['content-type']
- if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
- # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
- # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
- # are not a part of the request body. This is a problem because it's not possible to differentiate between values
- # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
- # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
- # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
- payload = kwargs.copy()
- named_args, _, _, _ = inspect.getargspec(func)
- for arg in named_args:
- payload.pop(arg, None)
- for k, v in payload.iteritems():
- if isinstance(v, (str, unicode)):
- try:
- payload[k] = from_json_string(v)
- except:
- # may not actually be json, just continue
- pass
- payload = util.recursively_stringify_dictionary_keys( payload )
- else:
- # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
- # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
- # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
- return payload
-
-def expose_api_raw( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False )
-
-def expose_api_raw_anonymous( func ):
- """
- Expose this function via the API but don't dump the results
- to JSON.
- """
- return expose_api( func, to_json=False, user_required=False )
-
-def expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return expose_api( func, to_json=to_json, user_required=False )
-
-
-# ----------------------------------------------------------------------------- (new) api decorators
-# TODO: rename as expose_api and make default.
-def _future_expose_api( func, to_json=True, user_required=True ):
- """
- Expose this function via the API.
- """
- @wraps(func)
- def decorator( self, trans, *args, **kwargs ):
- if trans.error_message:
- # TODO: Document this branch, when can this happen,
- # I don't understand it.
- return __api_error_response( trans, err_msg=trans.error_message )
- if user_required and trans.anonymous:
- error_code = error_codes.USER_NO_API_KEY
- # Use error codes default error message.
- err_msg = "API authentication required for this request"
- return __api_error_response( trans, err_code=error_code, err_msg=err_msg, status_code=403 )
- if trans.request.body:
- try:
- kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
- except ValueError:
- error_code = error_codes.USER_INVALID_JSON
- return __api_error_response( trans, status_code=400, err_code=error_code )
-
- trans.response.set_content_type( JSON_CONTENT_TYPE )
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- # TODO: Refactor next block out into a helper procedure.
- # Perform api_run_as processing, possibly changing identity
- if 'payload' in kwargs and 'run_as' in kwargs['payload']:
- if not trans.user_can_do_run_as():
- error_code = error_codes.USER_CANNOT_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=403 )
- try:
- decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
- except TypeError:
- error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
- try:
- user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
- trans.api_inherit_admin = trans.user_is_admin()
- trans.set_user(user)
- except:
- error_code = error_codes.USER_INVALID_RUN_AS
- return __api_error_response( trans, err_code=error_code, status_code=400 )
- try:
- rval = func( self, trans, *args, **kwargs)
- if to_json and trans.debug:
- rval = to_json_string( rval, indent=4, sort_keys=True )
- elif to_json:
- rval = to_json_string( rval )
- return rval
- except MessageException as e:
- traceback_string = format_exc()
- return __api_error_response( trans, exception=e, traceback=traceback_string )
- except paste.httpexceptions.HTTPException:
- # TODO: Allow to pass or format for the API???
- raise # handled
- except Exception as e:
- traceback_string = format_exc()
- error_message = 'Uncaught exception in exposed API method:'
- log.exception( error_message )
- return __api_error_response(
- trans,
- status_code=500,
- exception=e,
- traceback=traceback_string,
- err_msg=error_message,
- err_code=error_codes.UNKNOWN
- )
- if not hasattr(func, '_orig'):
- decorator._orig = func
- decorator.exposed = True
- return decorator
-
-def __api_error_message( trans, **kwds ):
- exception = kwds.get( "exception", None )
- if exception:
- # If we are passed a MessageException use err_msg.
- default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
- default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
- extra_error_info = getattr( exception, 'extra_error_info', {} )
- if not isinstance( extra_error_info, dict ):
- extra_error_info = {}
- else:
- default_error_message = "Error processing API request."
- default_error_code = error_codes.UNKNOWN
- extra_error_info = {}
- traceback_string = kwds.get( "traceback", "No traceback available." )
- err_msg = kwds.get( "err_msg", default_error_message )
- error_code_object = kwds.get( "err_code", default_error_code )
- try:
- error_code = error_code_object.code
- except AttributeError:
- # Some sort of bad error code sent in, logic failure on part of
- # Galaxy developer.
- error_code = error_codes.UNKNOWN.code
- # Would prefer the terminology of error_code and error_message, but
- # err_msg used a good number of places already. Might as well not change
- # it?
- error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
- if trans.debug: # TODO: Should admins get to see traceback as well?
- error_response[ "traceback" ] = traceback_string
- return error_response
-
-def __api_error_response( trans, **kwds ):
- error_dict = __api_error_message( trans, **kwds )
- exception = kwds.get( "exception", None )
- # If we are given an status code directly - use it - otherwise check
- # the exception for a status_code attribute.
- if "status_code" in kwds:
- status_code = int( kwds.get( "status_code" ) )
- elif hasattr( exception, "status_code" ):
- status_code = int( exception.status_code )
- else:
- status_code = 500
- response = trans.response
- if not response.status or str(response.status).startswith("20"):
- # Unset status code appears to be string '200 OK', if anything
- # non-success (i.e. not 200 or 201) has been set, do not override
- # underlying controller.
- response.status = status_code
- return to_json_string( error_dict )
-
-
-# TODO: rename as expose_api and make default.
-def _future_expose_api_anonymous( func, to_json=True ):
- """
- Expose this function via the API but don't require a set user.
- """
- return _future_expose_api( func, to_json=to_json, user_required=False )
-
-
-def _future_expose_api_raw( func ):
- return _future_expose_api( func, to_json=False, user_required=True )
-
-
-def _future_expose_api_raw_anonymous( func ):
- return _future_expose_api( func, to_json=False, user_required=False )
-
-
-
-NOT_SET = object()
-
-
-def error( message ):
- raise MessageException( message, type='error' )
-
-
-def form( *args, **kwargs ):
- return FormBuilder( *args, **kwargs )
-
-
-class WebApplication( base.WebApplication ):
-
- def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
- self.name = name
- base.WebApplication.__init__( self )
- self.set_transaction_factory( lambda e: self.transaction_chooser( e, galaxy_app, session_cookie ) )
- # Mako support
- self.mako_template_lookup = self.create_mako_template_lookup( galaxy_app, name )
- # Security helper
- self.security = galaxy_app.security
-
- def create_mako_template_lookup( self, galaxy_app, name ):
- paths = []
- # First look in webapp specific directory
- if name is not None:
- paths.append( os.path.join( galaxy_app.config.template_path, 'webapps', name ) )
- # Then look in root directory
- paths.append( galaxy_app.config.template_path )
- # Create TemplateLookup with a small cache
- return mako.lookup.TemplateLookup(directories=paths,
- module_directory=galaxy_app.config.template_cache,
- collection_size=500,
- output_encoding='utf-8' )
-
- def handle_controller_exception( self, e, trans, **kwargs ):
- if isinstance( e, MessageException ):
- # In the case of a controller exception, sanitize to make sure
- # unsafe html input isn't reflected back to the user
- return trans.show_message( sanitize_html(e.err_msg), e.type )
-
- def make_body_iterable( self, trans, body ):
- if isinstance( body, FormBuilder ):
- body = trans.show_form( body )
- return base.WebApplication.make_body_iterable( self, trans, body )
-
- def transaction_chooser( self, environ, galaxy_app, session_cookie ):
- return GalaxyWebTransaction( environ, galaxy_app, self, session_cookie )
-
- def add_ui_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseUIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- # Look for a controller inside the modules
- for key in dir( module ):
- T = getattr( module, key )
- if inspect.isclass( T ) and T is not BaseUIController and issubclass( T, BaseUIController ):
- controller = self._instantiate_controller( T, app )
- self.add_ui_controller( name, controller )
-
- def add_api_controllers( self, package_name, app ):
- """
- Search for UI controllers in `package_name` and add
- them to the webapp.
- """
- from galaxy.web.base.controller import BaseAPIController
- from galaxy.web.base.controller import ControllerUnavailable
- package = import_module( package_name )
- controller_dir = package.__path__[0]
- for fname in os.listdir( controller_dir ):
- if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
- name = fname[:-3]
- module_name = package_name + "." + name
- try:
- module = import_module( module_name )
- except ControllerUnavailable, exc:
- log.debug("%s could not be loaded: %s" % (module_name, str(exc)))
- continue
- for key in dir( module ):
- T = getattr( module, key )
- # Exclude classes such as BaseAPIController and BaseTagItemsController
- if inspect.isclass( T ) and not key.startswith("Base") and issubclass( T, BaseAPIController ):
- # By default use module_name, but allow controller to override name
- controller_name = getattr( T, "controller_name", name )
- controller = self._instantiate_controller( T, app )
- self.add_api_controller( controller_name, controller )
-
- def _instantiate_controller( self, T, app ):
- """ Extension point, allow apps to contstruct controllers differently,
- really just used to stub out actual controllers for routes testing.
- """
- return T( app )
-
-
-class ProvidesAppContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- database and event handling.
-
- Mixed in class must provide `app` property.
- """
-
- def log_action( self, user=None, action=None, context=None, params=None):
- """
- Application-level logging of user actions.
- """
- if self.app.config.log_actions:
- action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) )
- try:
- if user:
- action.user = user
- else:
- action.user = self.user
- except:
- action.user = None
- try:
- action.session_id = self.galaxy_session.id
- except:
- action.session_id = None
- self.sa_session.add( action )
- self.sa_session.flush()
-
- def log_event( self, message, tool_id=None, **kwargs ):
- """
- Application level logging. Still needs fleshing out (log levels and such)
- Logging events is a config setting - if False, do not log.
- """
- if self.app.config.log_events:
- event = self.app.model.Event()
- event.tool_id = tool_id
- try:
- event.message = message % kwargs
- except:
- event.message = message
- try:
- event.history = self.get_history()
- except:
- event.history = None
- try:
- event.history_id = self.history.id
- except:
- event.history_id = None
- try:
- event.user = self.user
- except:
- event.user = None
- try:
- event.session_id = self.galaxy_session.id
- except:
- event.session_id = None
- self.sa_session.add( event )
- self.sa_session.flush()
-
- @property
- def sa_session( self ):
- """
- Returns a SQLAlchemy session -- currently just gets the current
- session from the threadlocal session context, but this is provided
- to allow migration toward a more SQLAlchemy 0.4 style of use.
- """
- return self.app.model.context.current
-
- def expunge_all( self ):
- app = self.app
- context = app.model.context
- context.expunge_all()
- # This is a bit hacky, should refctor this. Maybe refactor to app -> expunge_all()
- if hasattr(app, 'install_model'):
- install_model = app.install_model
- if install_model != app.model:
- install_model.context.expunge_all()
-
- def get_toolbox(self):
- """Returns the application toolbox"""
- return self.app.toolbox
-
- @property
- def model( self ):
- return self.app.model
-
- @property
- def install_model( self ):
- return self.app.install_model
-
- def request_types(self):
- if self.sa_session.query( self.app.model.RequestType ).filter_by( deleted=False ).count() > 0:
- return True
- return False
-
-
-class ProvidesUserContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about users.
-
- Mixed in class must provide `user`, `api_inherit_admin`, and `app`
- properties.
- """
-
- @property
- def anonymous( self ):
- return self.user is None and not self.api_inherit_admin
-
- def get_current_user_roles( self ):
- user = self.user
- if user:
- roles = user.all_roles()
- else:
- roles = []
- return roles
-
- def user_is_admin( self ):
- if self.api_inherit_admin:
- return True
- return self.user and self.user.email in self.app.config.admin_users_list
-
- def user_can_do_run_as( self ):
- run_as_users = [ user for user in self.app.config.get( "api_allow_run_as", "" ).split( "," ) if user ]
- if not run_as_users:
- return False
- user_in_run_as_users = self.user and self.user.email in run_as_users
- # Can do if explicitly in list or master_api_key supplied.
- can_do_run_as = user_in_run_as_users or self.api_inherit_admin
- return can_do_run_as
-
- @property
- def user_ftp_dir( self ):
- identifier = self.app.config.ftp_upload_dir_identifier
- return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
-
-
-class ProvidesHistoryContext( object ):
- """ For transaction-like objects to provide Galaxy convience layer for
- reasoning about histories.
-
- Mixed in class must provide `user`, `history`, and `app`
- properties.
- """
-
- def db_dataset_for( self, dbkey ):
- """
- Returns the db_file dataset associated/needed by `dataset`, or `None`.
- """
- # If no history, return None.
- if self.history is None:
- return None
- if isinstance(self.history, Bunch):
- # The API presents a Bunch for a history. Until the API is
- # more fully featured for handling this, also return None.
- return None
- datasets = self.sa_session.query( self.app.model.HistoryDatasetAssociation ) \
- .filter_by( deleted=False, history_id=self.history.id, extension="len" )
- for ds in datasets:
- if dbkey == ds.dbkey:
- return ds
- return None
-
- @property
- def db_builds( self ):
- """
- Returns the builds defined by galaxy and the builds defined by
- the user (chromInfo in history).
- """
- # FIXME: This method should be removed
- return self.app.genome_builds.get_genome_build_names( trans=self )
-
-
-class GalaxyWebTransaction( base.DefaultWebTransaction, ProvidesAppContext, ProvidesUserContext, ProvidesHistoryContext ):
- """
- Encapsulates web transaction specific state for the Galaxy application
- (specifically the user's "cookie" session and history)
- """
-
- def __init__( self, environ, app, webapp, session_cookie=None):
- self.app = app
- self.webapp = webapp
- self.security = webapp.security
- base.DefaultWebTransaction.__init__( self, environ )
- self.setup_i18n()
- self.expunge_all()
- self.debug = asbool( self.app.config.get( 'debug', False ) )
- # Flag indicating whether we are in workflow building mode (means
- # that the current history should not be used for parameter values
- # and such).
- self.workflow_building_mode = False
- # Flag indicating whether this is an API call and the API key user is an administrator
- self.api_inherit_admin = False
- self.__user = None
- self.galaxy_session = None
- self.error_message = None
-
- if self.environ.get('is_api_request', False):
- # With API requests, if there's a key, use it and associate the
- # user with the transaction.
- # If not, check for an active session but do not create one.
- # If an error message is set here, it's sent back using
- # trans.show_error in the response -- in expose_api.
- self.error_message = self._authenticate_api( session_cookie )
- elif self.app.name == "reports":
- self.galaxy_session = None
- else:
- # This is a web request, get or create session.
- self._ensure_valid_session( session_cookie )
- if self.galaxy_session:
- # When we've authenticated by session, we have to check the
- # following.
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self._ensure_logged_in_user( environ, session_cookie )
-
- def setup_i18n( self ):
- locales = []
- if 'HTTP_ACCEPT_LANGUAGE' in self.environ:
- # locales looks something like: ['en', 'en-us;q=0.7', 'ja;q=0.3']
- client_locales = self.environ['HTTP_ACCEPT_LANGUAGE'].split( ',' )
- for locale in client_locales:
- try:
- locales.append( Locale.parse( locale.split( ';' )[0].strip(), sep='-' ).language )
- except Exception, e:
- log.debug( "Error parsing locale '%s'. %s: %s", locale, type( e ), e )
- if not locales:
- # Default to English
- locales = 'en'
- t = Translations.load( dirname='locale', locales=locales, domain='ginga' )
- self.template_context.update( dict( _=t.ugettext, n_=t.ugettext, N_=t.ungettext ) )
-
- def get_user( self ):
- """Return the current user if logged in or None."""
- if self.galaxy_session:
- return self.galaxy_session.user
- else:
- return self.__user
-
- def set_user( self, user ):
- """Set the current user."""
- if self.galaxy_session:
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
- self.__user = user
-
- user = property( get_user, set_user )
-
- def get_cookie( self, name='galaxysession' ):
- """Convenience method for getting a session cookie"""
- try:
- # If we've changed the cookie during the request return the new value
- if name in self.response.cookies:
- return self.response.cookies[name].value
- else:
- return self.request.cookies[name].value
- except:
- return None
-
- def set_cookie( self, value, name='galaxysession', path='/', age=90, version='1' ):
- """Convenience method for setting a session cookie"""
- # The galaxysession cookie value must be a high entropy 128 bit random number encrypted
- # using a server secret key. Any other value is invalid and could pose security issues.
- self.response.cookies[name] = value
- self.response.cookies[name]['path'] = path
- self.response.cookies[name]['max-age'] = 3600 * 24 * age # 90 days
- tstamp = time.localtime( time.time() + 3600 * 24 * age )
- self.response.cookies[name]['expires'] = time.strftime( '%a, %d-%b-%Y %H:%M:%S GMT', tstamp )
- self.response.cookies[name]['version'] = version
- try:
- self.response.cookies[name]['httponly'] = True
- except CookieError, e:
- log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
-
- def _authenticate_api( self, session_cookie ):
- """
- Authenticate for the API via key or session (if available).
- """
- api_key = self.request.params.get('key', None)
- secure_id = self.get_cookie( name=session_cookie )
- api_key_supplied = self.environ.get('is_api_request', False) and api_key
- if api_key_supplied and self._check_master_api_key( api_key ):
- self.api_inherit_admin = True
- log.info( "Session authenticated using Galaxy master api key" )
- self.user = None
- self.galaxy_session = None
- elif api_key_supplied:
- # Sessionless API transaction, we just need to associate a user.
- try:
- provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
- except NoResultFound:
- return 'Provided API key is not valid.'
- if provided_key.user.deleted:
- return 'User account is deactivated, please contact an administrator.'
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- return 'Provided API key has expired.'
- self.set_user( provided_key.user )
- elif secure_id:
- # API authentication via active session
- # Associate user using existing session
- self._ensure_valid_session( session_cookie )
- else:
- # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
- self.user = None
- self.galaxy_session = None
-
- def _check_master_api_key( self, api_key ):
- master_api_key = getattr( self.app.config, 'master_api_key', None )
- if not master_api_key:
- return False
- # Hash keys to make them the same size, so we can do safe comparison.
- master_hash = hashlib.sha256( master_api_key ).hexdigest()
- provided_hash = hashlib.sha256( api_key ).hexdigest()
- return safe_str_cmp( master_hash, provided_hash )
-
- def _ensure_valid_session( self, session_cookie, create=True):
- """
- Ensure that a valid Galaxy session exists and is available as
- trans.session (part of initialization)
-
- Support for universe_session and universe_user cookies has been
- removed as of 31 Oct 2008.
- """
- # Try to load an existing session
- secure_id = self.get_cookie( name=session_cookie )
- galaxy_session = None
- prev_galaxy_session = None
- user_for_new_session = None
- invalidate_existing_session = False
- # Track whether the session has changed so we can avoid calling flush
- # in the most common case (session exists and is valid).
- galaxy_session_requires_flush = False
- if secure_id:
- # Decode the cookie value to get the session_key
- session_key = self.security.decode_guid( secure_id )
- try:
- # Make sure we have a valid UTF-8 string
- session_key = session_key.encode( 'utf8' )
- except UnicodeDecodeError:
- # We'll end up creating a new galaxy_session
- session_key = None
- if session_key:
- # Retrieve the galaxy_session id via the unique session_key
- galaxy_session = self.sa_session.query( self.app.model.GalaxySession ) \
- .filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True ) ).first() #noqa
- # If remote user is in use it can invalidate the session and in some
- # cases won't have a cookie set above, so we need to to check some
- # things now.
- if self.app.config.use_remote_user:
- # If this is an api request, and they've passed a key, we let this go.
- assert self.app.config.remote_user_header in self.environ, \
- "use_remote_user is set but %s header was not provided" % self.app.config.remote_user_header
- remote_user_email = self.environ[ self.app.config.remote_user_header ]
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- if galaxy_session:
- # An existing session, make sure correct association exists
- if galaxy_session.user is None:
- # No user, associate
- galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
- galaxy_session_requires_flush = True
- elif ((galaxy_session.user.email != remote_user_email) and
- ((not self.app.config.allow_user_impersonation) or
- (remote_user_email not in self.app.config.admin_users_list))):
- # Session exists but is not associated with the correct
- # remote user, and the currently set remote_user is not a
- # potentially impersonating admin.
- invalidate_existing_session = True
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, galaxy_session.user.email )
- else:
- # No session exists, get/create user for new session
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- else:
- if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
- # Remote user support is not enabled, but there is an existing
- # session with an external user, invalidate
- invalidate_existing_session = True
- log.warning( "User '%s' is an external user with an existing session, invalidating session since external auth is disabled",
- galaxy_session.user.email )
- elif galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted:
- invalidate_existing_session = True
- log.warning( "User '%s' is marked deleted, invalidating session" % galaxy_session.user.email )
- # Do we need to invalidate the session for some reason?
- if invalidate_existing_session:
- prev_galaxy_session = galaxy_session
- prev_galaxy_session.is_valid = False
- galaxy_session = None
- # No relevant cookies, or couldn't find, or invalid, so create a new session
- if galaxy_session is None:
- galaxy_session = self.__create_new_session( prev_galaxy_session, user_for_new_session )
- galaxy_session_requires_flush = True
- self.galaxy_session = galaxy_session
- self.__update_session_cookie( name=session_cookie )
- else:
- self.galaxy_session = galaxy_session
- # Do we need to flush the session?
- if galaxy_session_requires_flush:
- self.sa_session.add( galaxy_session )
- # FIXME: If prev_session is a proper relation this would not
- # be needed.
- if prev_galaxy_session:
- self.sa_session.add( prev_galaxy_session )
- self.sa_session.flush()
- # If the old session was invalid, get a new history with our new session
- if invalidate_existing_session:
- self.new_history()
-
- def _ensure_logged_in_user( self, environ, session_cookie ):
- # The value of session_cookie can be one of
- # 'galaxysession' or 'galaxycommunitysession'
- # Currently this method does nothing unless session_cookie is 'galaxysession'
- if session_cookie == 'galaxysession' and self.galaxy_session.user is None:
- # TODO: re-engineer to eliminate the use of allowed_paths
- # as maintenance overhead is far too high.
- allowed_paths = (
- url_for( controller='root', action='index' ),
- url_for( controller='root', action='tool_menu' ),
- url_for( controller='root', action='masthead' ),
- url_for( controller='root', action='history' ),
- url_for( controller='user', action='api_keys' ),
- url_for( controller='user', action='create' ),
- url_for( controller='user', action='index' ),
- url_for( controller='user', action='login' ),
- url_for( controller='user', action='logout' ),
- url_for( controller='user', action='manage_user_info' ),
- url_for( controller='user', action='set_default_permissions' ),
- url_for( controller='user', action='reset_password' ),
- url_for( controller='user', action='openid_auth' ),
- url_for( controller='user', action='openid_process' ),
- url_for( controller='user', action='openid_associate' ),
- url_for( controller='library', action='browse' ),
- url_for( controller='history', action='list' ),
- url_for( controller='dataset', action='list' )
- )
- display_as = url_for( controller='root', action='display_as' )
- if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
- try:
- host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
- except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
- host = None
- if host in UCSC_SERVERS:
- return
- external_display_path = url_for( controller='', action='display_application' )
- if self.request.path.startswith( external_display_path ):
- request_path_split = self.request.path.split( '/' )
- try:
- if (self.app.datatypes_registry.display_applications.get( request_path_split[-5] )
- and request_path_split[-4] in self.app.datatypes_registry.display_applications.get( request_path_split[-5] ).links
- and request_path_split[-3] != 'None'):
- return
- except IndexError:
- pass
- if self.request.path not in allowed_paths:
- self.response.send_redirect( url_for( controller='root', action='index' ) )
-
- def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
- """
- Create a new GalaxySession for this request, possibly with a connection
- to a previous session (in `prev_galaxy_session`) and an existing user
- (in `user_for_new_session`).
-
- Caller is responsible for flushing the returned session.
- """
- session_key = self.security.get_new_guid()
- galaxy_session = self.app.model.GalaxySession(
- session_key=session_key,
- is_valid=True,
- remote_host=self.request.remote_host,
- remote_addr=self.request.remote_addr,
- referer=self.request.headers.get( 'Referer', None ) )
- if prev_galaxy_session:
- # Invalidated an existing session for some reason, keep track
- galaxy_session.prev_session_id = prev_galaxy_session.id
- if user_for_new_session:
- # The new session should be associated with the user
- galaxy_session.user = user_for_new_session
- return galaxy_session
-
- def get_or_create_remote_user( self, remote_user_email ):
- """
- Create a remote user with the email remote_user_email and return it
- """
- if not self.app.config.use_remote_user:
- return None
- if getattr( self.app.config, "normalize_remote_user_email", False ):
- remote_user_email = remote_user_email.lower()
- user = self.sa_session.query( self.app.model.User
- ).filter( self.app.model.User.table.c.email==remote_user_email ).first() #noqa
- if user:
- # GVK: June 29, 2009 - This is to correct the behavior of a previous bug where a private
- # role and default user / history permissions were not set for remote users. When a
- # remote user authenticates, we'll look for this information, and if missing, create it.
- if not self.app.security_agent.get_private_user_role( user ):
- self.app.security_agent.create_private_user_role( user )
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- if not user.default_permissions:
- self.app.security_agent.user_set_default_permissions( user )
- self.app.security_agent.user_set_default_permissions( user, history=True, dataset=True )
- elif user is None:
- username = remote_user_email.split( '@', 1 )[0].lower()
- random.seed()
- user = self.app.model.User( email=remote_user_email )
- user.set_password_cleartext( ''.join( random.sample( string.letters + string.digits, 12 ) ) )
- user.external = True
- # Replace invalid characters in the username
- for char in filter( lambda x: x not in string.ascii_lowercase + string.digits + '-', username ):
- username = username.replace( char, '-' )
- # Find a unique username - user can change it later
- if ( self.sa_session.query( self.app.model.User ).filter_by( username=username ).first() ):
- i = 1
- while ( self.sa_session.query( self.app.model.User ).filter_by( username=(username + '-' + str(i) ) ).first() ):
- i += 1
- username += '-' + str(i)
- user.username = username
- self.sa_session.add( user )
- self.sa_session.flush()
- self.app.security_agent.create_private_user_role( user )
- # We set default user permissions, before we log in and set the default history permissions
- if 'webapp' not in self.environ or self.environ['webapp'] != 'tool_shed':
- self.app.security_agent.user_set_default_permissions( user )
- # self.log_event( "Automatically created account '%s'", user.email )
- return user
-
- def __update_session_cookie( self, name='galaxysession' ):
- """
- Update the session cookie to match the current session.
- """
- self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
- name=name, path=self.app.config.cookie_path )
-
- def handle_user_login( self, user ):
- """
- Login a new user (possibly newly created)
-
- - create a new session
- - associate new session with user
- - if old session had a history and it was not associated with a user, associate it with the new session,
- otherwise associate the current session's history with the user
- - add the disk usage of the current session to the user's total disk usage
- """
- # Set the previous session
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- # Define a new current_session
- self.galaxy_session = self.__create_new_session( prev_galaxy_session, user )
- if self.webapp.name == 'galaxy':
- cookie_name = 'galaxysession'
- # Associated the current user's last accessed history (if exists) with their new session
- history = None
- try:
- users_last_session = user.galaxy_sessions[0]
- last_accessed = True
- except:
- users_last_session = None
- last_accessed = False
- if (prev_galaxy_session.current_history and not
- prev_galaxy_session.current_history.deleted and
- prev_galaxy_session.current_history.datasets):
- if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
- # If the previous galaxy session had a history, associate it with the new
- # session, but only if it didn't belong to a different user.
- history = prev_galaxy_session.current_history
- if prev_galaxy_session.user is None:
- # Increase the user's disk usage by the amount of the previous history's datasets if they didn't already own it.
- for hda in history.datasets:
- user.total_disk_usage += hda.quota_amount( user )
- elif self.galaxy_session.current_history:
- history = self.galaxy_session.current_history
- if (not history and users_last_session and
- users_last_session.current_history and not
- users_last_session.current_history.deleted):
- history = users_last_session.current_history
- elif not history:
- history = self.get_history( create=True )
- if history not in self.galaxy_session.histories:
- self.galaxy_session.add_history( history )
- if history.user is None:
- history.user = user
- self.galaxy_session.current_history = history
- if not last_accessed:
- # Only set default history permissions if current history is not from a previous session
- self.app.security_agent.history_set_default_permissions( history, dataset=True, bypass_manage_permission=True )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session, history ) )
- else:
- cookie_name = 'galaxycommunitysession'
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- self.sa_session.flush()
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name=cookie_name )
-
- def handle_user_logout( self, logout_all=False ):
- """
- Logout the current user:
- - invalidate the current session
- - create a new session with no user associated
- """
- prev_galaxy_session = self.galaxy_session
- prev_galaxy_session.is_valid = False
- self.galaxy_session = self.__create_new_session( prev_galaxy_session )
- self.sa_session.add_all( ( prev_galaxy_session, self.galaxy_session ) )
- galaxy_user_id = prev_galaxy_session.user_id
- if logout_all and galaxy_user_id is not None:
- for other_galaxy_session in self.sa_session.query( self.app.model.GalaxySession
- ).filter( and_( self.app.model.GalaxySession.table.c.user_id==galaxy_user_id, #noqa
- self.app.model.GalaxySession.table.c.is_valid==True, #noqa
- self.app.model.GalaxySession.table.c.id!=prev_galaxy_session.id ) ): #noqa
- other_galaxy_session.is_valid = False
- self.sa_session.add( other_galaxy_session )
- self.sa_session.flush()
- if self.webapp.name == 'galaxy':
- # This method is not called from the Galaxy reports, so the cookie will always be galaxysession
- self.__update_session_cookie( name='galaxysession' )
- elif self.webapp.name == 'tool_shed':
- self.__update_session_cookie( name='galaxycommunitysession' )
-
- def get_galaxy_session( self ):
- """
- Return the current galaxy session
- """
- return self.galaxy_session
-
- def get_history( self, create=False ):
- """
- Load the current history, creating a new one only if there is not
- current history and we're told to create.
- Transactions will not always have an active history (API requests), so
- None is a valid response.
- """
- history = None
- if self.galaxy_session:
- history = self.galaxy_session.current_history
- if not history and util.string_as_bool( create ):
- history = self.new_history()
- return history
-
- def set_history( self, history ):
- if history and not history.deleted:
- self.galaxy_session.current_history = history
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
-
- history = property( get_history, set_history )
-
- def get_or_create_default_history( self ):
- """
- Gets or creates a default history and associates it with the current
- session.
- """
-
- # There must be a user to fetch a default history.
- if not self.galaxy_session.user:
- return self.new_history()
-
- # Look for default history that (a) has default name + is not deleted and
- # (b) has no datasets. If suitable history found, use it; otherwise, create
- # new history.
- unnamed_histories = self.sa_session.query( self.app.model.History ).filter_by(
- user=self.galaxy_session.user,
- name=self.app.model.History.default_name,
- deleted=False )
- default_history = None
- for history in unnamed_histories:
- if len( history.datasets ) == 0:
- # Found suitable default history.
- default_history = history
- break
-
- # Set or create hsitory.
- if default_history:
- history = default_history
- self.set_history( history )
- else:
- history = self.new_history()
-
- return history
-
- def new_history( self, name=None ):
- """
- Create a new history and associate it with the current session and
- its associated user (if set).
- """
- # Create new history
- history = self.app.model.History()
- if name:
- history.name = name
- # Associate with session
- history.add_galaxy_session( self.galaxy_session )
- # Make it the session's current history
- self.galaxy_session.current_history = history
- # Associate with user
- if self.galaxy_session.user:
- history.user = self.galaxy_session.user
- # Track genome_build with history
- history.genome_build = self.app.genome_builds.default_value
- # Set the user's default history permissions
- self.app.security_agent.history_set_default_permissions( history )
- # Save
- self.sa_session.add_all( ( self.galaxy_session, history ) )
- self.sa_session.flush()
- return history
-
- @base.lazy_property
- def template_context( self ):
- return dict()
-
- def make_form_data( self, name, **kwargs ):
- rval = self.template_context[name] = FormData()
- rval.values.update( kwargs )
- return rval
-
- def set_message( self, message, type=None ):
- """
- Convenience method for setting the 'message' and 'message_type'
- element of the template context.
- """
- self.template_context['message'] = message
- if type:
- self.template_context['status'] = type
-
- def get_message( self ):
- """
- Convenience method for getting the 'message' element of the template
- context.
- """
- return self.template_context['message']
-
- def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single message.
-
- `type`: one of "error", "warning", "info", or "done"; determines the
- type of dialog box and icon displayed with the message
-
- `refresh_frames`: names of frames in the interface that should be
- refreshed when the message is displayed
- """
- return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
-
- def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an error message. See `show_message`.
- """
- return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an ok message. See `show_message`.
- """
- return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
- """
- Convenience method for displaying an warn message. See `show_message`.
- """
- return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
-
- def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
- """
- Convenience method for displaying a simple page with a single HTML
- form.
- """
- return self.fill_template( template, form=form, header=header,
- use_panels=( form.use_panels or use_panels ),
- active_view=active_view )
-
- def fill_template(self, filename, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- # call get_user so we can invalidate sessions from external users,
- # if external auth has been disabled.
- self.get_user()
- if filename.endswith( ".mako" ):
- return self.fill_template_mako( filename, **kwargs )
- else:
- template = Template( file=os.path.join(self.app.config.template_path, filename),
- searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
- return str( template )
-
- def fill_template_mako( self, filename, template_lookup=None, **kwargs ):
- template_lookup = template_lookup or self.webapp.mako_template_lookup
- template = template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
-
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util,
- request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
- return template.render( **data )
-
- def stream_template_mako( self, filename, **kwargs ):
- template = self.webapp.mako_template_lookup.get_template( filename )
- template.output_encoding = 'utf-8'
- data = dict( caller=self, t=self, trans=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app )
- data.update( self.template_context )
- data.update( kwargs )
-
- def render( environ, start_response ):
- response_write = start_response( self.response.wsgi_status(), self.response.wsgi_headeritems() )
-
- class StreamBuffer( object ):
- def write( self, d ):
- response_write( d.encode( 'utf-8' ) )
- buffer = StreamBuffer()
- context = mako.runtime.Context( buffer, **data )
- template.render_context( context )
- return []
- return render
-
- def fill_template_string(self, template_string, context=None, **kwargs):
- """
- Fill in a template, putting any keyword arguments on the context.
- """
- template = Template( source=template_string,
- searchList=[context or kwargs, dict(caller=self)] )
- return str(template)
-
-
-class FormBuilder( object ):
- """
- Simple class describing an HTML form
- """
- def __init__( self, action="", title="", name="form", submit_text="submit", use_panels=False ):
- self.title = title
- self.name = name
- self.action = action
- self.submit_text = submit_text
- self.inputs = []
- self.use_panels = use_panels
-
- def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
- self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
- return self
-
- def add_text( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'text', label, name, value, error, help )
-
- def add_password( self, name, label, value=None, error=None, help=None ):
- return self.add_input( 'password', label, name, value, error, help )
-
- def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
- return self
-
-
-class FormInput( object ):
- """
- Simple class describing a form input element
- """
- def __init__( self, type, name, label, value=None, error=None, help=None, use_label=True, extra_attributes={}, **kwargs ):
- self.type = type
- self.name = name
- self.label = label
- self.value = value
- self.error = error
- self.help = help
- self.use_label = use_label
- self.extra_attributes = extra_attributes
-
-
-class DatalistInput( FormInput ):
- """ Data list input """
-
- def __init__( self, name, *args, **kwargs ):
- if 'extra_attributes' not in kwargs:
- kwargs[ 'extra_attributes' ] = {}
- kwargs[ 'extra_attributes' ][ 'list' ] = name
- FormInput.__init__( self, None, name, *args, **kwargs )
- self.options = kwargs.get( 'options', {} )
-
- def body_html( self ):
- options = "".join( [ "<option value='%s'>%s</option>" % ( key, value ) for key, value in self.options.iteritems() ] )
- return """<datalist id="%s">%s</datalist>""" % ( self.name, options )
-
-
-class SelectInput( FormInput ):
- """ A select form input. """
- def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
- FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
- self.options = options
-
-
-class FormData( object ):
- """
- Class for passing data about a form to a template, very rudimentary, could
- be combined with the tool form handling to build something more general.
- """
- def __init__( self ):
- self.values = Bunch()
- self.errors = Bunch()
-
-
-class Bunch( dict ):
- """
- Bunch based on a dict
- """
- def __getattr__( self, key ):
- if key not in self:
- raise AttributeError(key)
- return self[key]
-
- def __setattr__( self, key, value ):
- self[key] = value
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Merged in lance_parsons/galaxy-central-pull-requests/venv (pull request #486)
by commits-noreply@bitbucket.org 04 Sep '14
by commits-noreply@bitbucket.org 04 Sep '14
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1eedbc8a3a8d/
Changeset: 1eedbc8a3a8d
User: dannon
Date: 2014-09-04 22:25:37
Summary: Merged in lance_parsons/galaxy-central-pull-requests/venv (pull request #486)
Source virtualenv prior to python check and source venv for reports webapp
Affected #: 2 files
diff -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f run.sh
--- a/run.sh
+++ b/run.sh
@@ -2,6 +2,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
python ./scripts/check_python.py
[ $? -ne 0 ] && exit 1
@@ -11,13 +18,6 @@
python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
fi
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' universe_wsgi.ini | xargs echo`
daemon=`echo "$@" | grep -q daemon`
diff -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f run_reports.sh
--- a/run_reports.sh
+++ b/run_reports.sh
@@ -11,6 +11,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
./scripts/common_startup.sh --skip-samples
GALAXY_REPORTS_CONFIG=${GALAXY_REPORTS_CONFIG:-reports_wsgi.ini}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/22c4bcbfc20f/
Changeset: 22c4bcbfc20f
Branch: venv
User: lance_parsons
Date: 2014-09-04 21:04:34
Summary: Created new branch venv
Affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/commits/619a8d213032/
Changeset: 619a8d213032
Branch: venv
User: lance_parsons
Date: 2014-09-04 21:12:04
Summary: Source virtualenv before python check and for reports app
Affected #: 2 files
diff -r 22c4bcbfc20f116d45398f93b88666ad8a94a4e7 -r 619a8d213032b726eded009572c2ebfc78c70834 run.sh
--- a/run.sh
+++ b/run.sh
@@ -2,6 +2,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
python ./scripts/check_python.py
[ $? -ne 0 ] && exit 1
@@ -11,13 +18,6 @@
python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
fi
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' universe_wsgi.ini | xargs echo`
daemon=`echo "$@" | grep -q daemon`
diff -r 22c4bcbfc20f116d45398f93b88666ad8a94a4e7 -r 619a8d213032b726eded009572c2ebfc78c70834 run_reports.sh
--- a/run_reports.sh
+++ b/run_reports.sh
@@ -11,6 +11,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
./scripts/common_startup.sh --skip-samples
GALAXY_REPORTS_CONFIG=${GALAXY_REPORTS_CONFIG:-reports_wsgi.ini}
https://bitbucket.org/galaxy/galaxy-central/commits/1eedbc8a3a8d/
Changeset: 1eedbc8a3a8d
User: dannon
Date: 2014-09-04 22:25:37
Summary: Merged in lance_parsons/galaxy-central-pull-requests/venv (pull request #486)
Source virtualenv prior to python check and source venv for reports webapp
Affected #: 2 files
diff -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f run.sh
--- a/run.sh
+++ b/run.sh
@@ -2,6 +2,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
python ./scripts/check_python.py
[ $? -ne 0 ] && exit 1
@@ -11,13 +18,6 @@
python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
fi
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' universe_wsgi.ini | xargs echo`
daemon=`echo "$@" | grep -q daemon`
diff -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 -r 1eedbc8a3a8d6abcb8da2a7db246cb4f924b763f run_reports.sh
--- a/run_reports.sh
+++ b/run_reports.sh
@@ -11,6 +11,13 @@
cd `dirname $0`
+# If there is a .venv/ directory, assume it contains a virtualenv that we
+# should run this instance in.
+if [ -d .venv ];
+then
+ . .venv/bin/activate
+fi
+
./scripts/common_startup.sh --skip-samples
GALAXY_REPORTS_CONFIG=${GALAXY_REPORTS_CONFIG:-reports_wsgi.ini}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Merged in natefoo/galaxy-central (pull request #481)
by commits-noreply@bitbucket.org 04 Sep '14
by commits-noreply@bitbucket.org 04 Sep '14
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9b6cccb3af2d/
Changeset: 9b6cccb3af2d
User: natefoo
Date: 2014-09-04 17:38:26
Summary: Merged in natefoo/galaxy-central (pull request #481)
Make build sites files configurable
Affected #: 27 files
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -312,4 +312,22 @@
<sniffer type="galaxy.datatypes.assembly:Amos"/>
--></sniffers>
+ <build_sites>
+ <!--
+ Build sites define the builds (dbkeys) available at sites used by display
+ applications and the URL to those sites.
+
+ The `display` attributes on the `ucsc` and `gbrowse` sites replace the
+ `ucsc_display_sites` and `gbrowse_display_sites` options in
+ universe_wsgi.ini. Because these are used by "old-style" display
+ applications, their types cannot change if you want the old-style display
+ links for these sites to work.
+ -->
+ <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/>
+ <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/>
+ <site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/>
+ <site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/>
+ <site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
+ <site type="rviewer" file="tool-data/shared/rviewer/rviewer_build_sites.txt"/>
+ </build_sites></datatypes>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_bam.xml
--- a/display_applications/ensembl/ensembl_bam.xml
+++ b/display_applications/ensembl/ensembl_bam.xml
@@ -1,7 +1,7 @@
<display id="ensembl_bam" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_gff.xml
--- a/display_applications/ensembl/ensembl_gff.xml
+++ b/display_applications/ensembl/ensembl_gff.xml
@@ -1,7 +1,7 @@
<display id="ensembl_gff" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_interval_as_bed.xml
--- a/display_applications/ensembl/ensembl_interval_as_bed.xml
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml
@@ -1,7 +1,7 @@
<display id="ensembl_interval" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_gff.xml
--- a/display_applications/gbrowse/gbrowse_gff.xml
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_gff" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_interval_as_bed.xml
--- a/display_applications/gbrowse/gbrowse_interval_as_bed.xml
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_interval_as_bed" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_wig.xml
--- a/display_applications/gbrowse/gbrowse_wig.xml
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_wig" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/igv/bam.xml
--- a/display_applications/igv/bam.xml
+++ b/display_applications/igv/bam.xml
@@ -2,7 +2,7 @@
<display id="igv_bam" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/igv/vcf.xml
--- a/display_applications/igv/vcf.xml
+++ b/display_applications/igv/vcf.xml
@@ -2,7 +2,7 @@
<display id="igv_vcf" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/rviewer/bed.xml
--- a/display_applications/rviewer/bed.xml
+++ b/display_applications/rviewer/bed.xml
@@ -2,7 +2,7 @@
<display id="rviewer_interval" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/rviewer/vcf.xml
--- a/display_applications/rviewer/vcf.xml
+++ b/display_applications/rviewer/vcf.xml
@@ -2,7 +2,7 @@
<display id="rviewer_vcf" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml
+++ b/display_applications/ucsc/bam.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bam" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bigbed.xml
--- a/display_applications/ucsc/bigbed.xml
+++ b/display_applications/ucsc/bigbed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigbed" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bigwig.xml
--- a/display_applications/ucsc/bigwig.xml
+++ b/display_applications/ucsc/bigwig.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigwig" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/maf_customtrack.xml
--- a/display_applications/ucsc/maf_customtrack.xml
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -1,6 +1,6 @@
<display id="ucsc_maf_customtrack" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/vcf.xml
--- a/display_applications/ucsc/vcf.xml
+++ b/display_applications/ucsc/vcf.xml
@@ -1,6 +1,6 @@
<display id="ucsc_vcf" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -64,9 +64,6 @@
# Load dbkey / genome build manager
self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
- # Load build sites (old-style)
- self._configure_build_sites()
-
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -16,7 +16,6 @@
from galaxy.util import string_as_bool
from galaxy.util import listify
from galaxy.util.dbkeys import GenomeBuilds
-from galaxy.util.build_sites import BuildSites
from galaxy import eggs
log = logging.getLogger( __name__ )
@@ -224,10 +223,6 @@
self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.ucsc_build_sites = resolve_path( kwargs.get( 'ucsc_build_sites', os.path.join( self.tool_data_path, 'shared', 'ucsc', 'ucsc_build_sites.txt') ), self.root )
- self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
- self.gbrowse_build_sites = resolve_path( kwargs.get( 'gbrowse_build_sites', os.path.join( self.tool_data_path, 'shared', 'gbrowse', 'gbrowse_build_sites.txt') ), self.root )
- self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
# Configuration for the message box directly below the masthead.
@@ -575,9 +570,6 @@
def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
- def _configure_build_sites( self ):
- self.build_sites = BuildSites( self )
-
def _configure_toolbox( self ):
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -81,9 +81,13 @@
class DynamicDisplayApplicationBuilder( object ):
@classmethod
- def __init__( self, elem, display_application ):
+ def __init__( self, elem, display_application, build_sites ):
rval = []
- filename = elem.get( 'from_file', None )
+ filename = None
+ if elem.get( 'site_type', None ) is not None:
+ filename = build_sites.get( elem.get( 'site_type' ) )
+ else:
+ filename = elem.get( 'from_file', None )
assert filename is not None, 'Filename and id attributes required for dynamic_links'
skip_startswith = elem.get( 'skip_startswith', None )
separator = elem.get( 'separator', '\t' )
@@ -176,7 +180,7 @@
if link:
rval.links[ link.id ] = link
for dynamic_links in elem.findall( 'dynamic_links' ):
- for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+ for link in DynamicDisplayApplicationBuilder( dynamic_links, rval, datatypes_registry.build_sites ):
rval.links[ link.id ] = link
return rval
def __init__( self, display_id, name, datatypes_registry, version = None ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -83,29 +83,31 @@
ret_val = []
ggtail = 'hgGenome_doSubmitUpload=submit'
if not dataset.dbkey:
- dataset.dbkey = 'hg18' # punt!
+ dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
- site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- internal_url = "%s" % url_for( controller='dataset',
- dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
- display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
- display_url = urllib.quote_plus( display_url )
- # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
- #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
- sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
- #sl.append("&hgt.customText=%s")
- sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
- sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
- sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
- sl.append("&hgGenome_doSubmitUpload=submit")
- sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
- s = ''.join(sl)
- s = urllib.quote_plus(s)
- redirect_url = s
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+ if site_name in datatypes_registry.get_display_sites('ucsc'):
+ site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+ internal_url = "%s" % url_for( controller='dataset',
+ dataset_id=dataset.id,
+ action='display_at',
+ filename='ucsc_' + site_name )
+ display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+ display_url = urllib.quote_plus( display_url )
+ # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+ sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+ #sl.append("&hgt.customText=%s")
+ sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+ sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+ sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+ sl.append("&hgGenome_doSubmitUpload=submit")
+ sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+ s = ''.join(sl)
+ s = urllib.quote_plus(s)
+ redirect_url = s
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( (site_name, link) )
return ret_val
def make_html_table( self, dataset, skipchars=[] ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -234,8 +234,8 @@
# Filter UCSC sites to only those that are supported by this build and
# enabled.
valid_sites = [ ( name, url )
- for name, url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey )
- if name in app.config.ucsc_display_sites ]
+ for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
+ if name in app.datatypes_registry.get_display_sites('ucsc') ]
if not valid_sites:
return []
# If there are any valid sites, we need to generate the estimated
@@ -750,8 +750,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
( site_url, dataset.dbkey, seqid, start, stop ) )
@@ -762,8 +762,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if seqid.startswith( 'chr' ) and len ( seqid ) > 3:
seqid = seqid[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, seqid, start, stop ) )
@@ -1091,8 +1091,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
chrom = chrom[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
@@ -1103,8 +1103,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
ret_val.append( ( site_name, link ) )
@@ -1285,8 +1285,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc',dataset.dbkey):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -69,6 +69,10 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
+ # Build sites
+ self.build_sites = {}
+ self.display_sites = {}
+ self.legacy_build_sites = {}
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
@@ -285,6 +289,8 @@
handling_proprietary_datatypes=handling_proprietary_datatypes,
override=override )
self.upload_file_formats.sort()
+ # Load build sites
+ self.load_build_sites( root )
# Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
# set_metadata processing.
self.to_xml_file()
@@ -303,6 +309,34 @@
self.sniff_order.append( datatype )
append_to_sniff_order()
+ def load_build_sites( self, root ):
+ if root.find( 'build_sites' ):
+ for elem in root.find( 'build_sites' ).findall( 'site' ):
+ if not (elem.get( 'type' ) and elem.get( 'file' )):
+ self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
+ else:
+ site_type = elem.get( 'type' )
+ file = elem.get( 'file' )
+ self.build_sites[site_type] = file
+ if site_type in ('ucsc', 'gbrowse'):
+ self.legacy_build_sites[site_type] = galaxy.util.read_build_sites( file )
+ if elem.get( 'display', None ):
+ display = elem.get( 'display' )
+ self.display_sites[site_type] = [ x.strip() for x in display.lower().split( ',' ) ]
+ self.log.debug( "Loaded build site '%s': %s with display sites: %s", site_type, file, display )
+ else:
+ self.log.debug( "Loaded build site '%s': %s", site_type, file )
+
+ def get_legacy_sites_by_build( self, site_type, build ):
+ sites = []
+ for site in self.legacy_build_sites.get(site_type, []):
+ if build in site['builds']:
+ sites.append((site['name'], site['url']))
+ return sites
+
+ def get_display_sites( self, site_type ):
+ return self.display_sites.get( site_type, [] )
+
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
"""
Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -844,6 +844,30 @@
return db_names
+def read_build_sites( filename, check_builds=True ):
+ """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
+ build_sites = []
+ try:
+ for line in open(filename):
+ try:
+ if line[0:1] == "#":
+ continue
+ fields = line.replace("\r", "").replace("\n", "").split("\t")
+ site_name = fields[0]
+ site = fields[1]
+ if check_builds:
+ site_builds = fields[2].split(",")
+ site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
+ else:
+ site_dict = {'name': site_name, 'url': site}
+ build_sites.append( site_dict )
+ except:
+ continue
+ except:
+ print "ERROR: Unable to read builds for site file %s" % filename
+ return build_sites
+
+
def relativize_symlinks( path, start=None, followlinks=False):
for root, dirs, files in os.walk( path, followlinks=followlinks ):
rel_start = None
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/util/build_sites.py
--- a/lib/galaxy/util/build_sites.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Functionality for dealing with build sites for legacy display applications.
-"""
-import os.path
-
-
-class BuildSites( object ):
-
- def __init__( self, app ):
- self._app = app
- self._build_sites = {}
- self.load_build_sites()
-
- def read_build_sites( self, filename, check_builds=True ):
- """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
- build_sites = []
- try:
- for line in open(filename):
- try:
- if line[0:1] == "#":
- continue
- fields = line.replace("\r", "").replace("\n", "").split("\t")
- site_name = fields[0]
- site = fields[1]
- if check_builds:
- site_builds = fields[2].split(",")
- site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
- else:
- site_dict = {'name': site_name, 'url': site}
- build_sites.append( site_dict )
- except:
- continue
- except:
- print "ERROR: Unable to read builds for site file %s" % filename
- return build_sites
-
- def load_build_sites( self ):
- self._build_sites['ucsc'] = self.read_build_sites( self._app.config.ucsc_build_sites )
- self._build_sites['gbrowse'] = self.read_build_sites( self._app.config.gbrowse_build_sites )
-
- def _get_site_by_build( self, site_type, build ):
- sites = []
- for site in self._build_sites[site_type]:
- if build in site['builds']:
- sites.append((site['name'], site['url']))
- return sites
-
- def get_ucsc_sites_by_build( self, build ):
- return self._get_site_by_build( 'ucsc', build )
-
- def get_gbrowse_sites_by_build( self, build ):
- return self._get_site_by_build( 'gbrowse', build )
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -926,7 +926,7 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
try:
host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -364,14 +364,6 @@
# which browsers should be available. URLs and builds available at these
# browsers are defined in the specifield files.
-# UCSC browsers:
-#ucsc_build_sites = tool-data/shared/ucsc/ucsc_build_sites.txt
-#ucsc_display_sites = main,test,archaea,ucla
-
-# GBrowse servers:
-#gbrowse_build_sites = tool-data/shared/gbrowse/gbrowse_build_sites.txt
-#gbrowse_display_sites = modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
-
# If use_remote_user = True, display application servers will be denied access
# to Galaxy and so displaying datasets in these sites will fail.
# display_servers contains a list of hostnames which should be allowed to
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
6 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a3c5b729592b/
Changeset: a3c5b729592b
User: natefoo
Date: 2014-09-03 00:20:23
Summary: Remove the BuildSites class I recently created and merge its functionality back into util and the datatypes registry. As a result, the build_sites configuration moves to datatypes_conf.xml and display applications can now use configured build sites so that the path is not hardcoded into the individual committed display application files. `from_file` in the display applications is still supported.
Affected #: 25 files
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -312,4 +312,12 @@
<sniffer type="galaxy.datatypes.assembly:Amos"/>
--></sniffers>
+ <build_sites>
+ <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt"/>
+ <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt"/>
+ <site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/>
+ <site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/>
+ <site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
+ <site type="rviewer" file="tool-data/shared/rviewer/rviewer_build_sites.txt"/>
+ </build_sites></datatypes>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ensembl/ensembl_bam.xml
--- a/display_applications/ensembl/ensembl_bam.xml
+++ b/display_applications/ensembl/ensembl_bam.xml
@@ -1,7 +1,7 @@
<display id="ensembl_bam" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ensembl/ensembl_gff.xml
--- a/display_applications/ensembl/ensembl_gff.xml
+++ b/display_applications/ensembl/ensembl_gff.xml
@@ -1,7 +1,7 @@
<display id="ensembl_gff" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ensembl/ensembl_interval_as_bed.xml
--- a/display_applications/ensembl/ensembl_interval_as_bed.xml
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml
@@ -1,7 +1,7 @@
<display id="ensembl_interval" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/gbrowse/gbrowse_gff.xml
--- a/display_applications/gbrowse/gbrowse_gff.xml
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_gff" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/gbrowse/gbrowse_interval_as_bed.xml
--- a/display_applications/gbrowse/gbrowse_interval_as_bed.xml
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_interval_as_bed" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/gbrowse/gbrowse_wig.xml
--- a/display_applications/gbrowse/gbrowse_wig.xml
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_wig" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/igv/bam.xml
--- a/display_applications/igv/bam.xml
+++ b/display_applications/igv/bam.xml
@@ -2,7 +2,7 @@
<display id="igv_bam" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/igv/vcf.xml
--- a/display_applications/igv/vcf.xml
+++ b/display_applications/igv/vcf.xml
@@ -2,7 +2,7 @@
<display id="igv_vcf" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/rviewer/bed.xml
--- a/display_applications/rviewer/bed.xml
+++ b/display_applications/rviewer/bed.xml
@@ -2,7 +2,7 @@
<display id="rviewer_interval" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/rviewer/vcf.xml
--- a/display_applications/rviewer/vcf.xml
+++ b/display_applications/rviewer/vcf.xml
@@ -2,7 +2,7 @@
<display id="rviewer_vcf" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml
+++ b/display_applications/ucsc/bam.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bam" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/bigbed.xml
--- a/display_applications/ucsc/bigbed.xml
+++ b/display_applications/ucsc/bigbed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigbed" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/bigwig.xml
--- a/display_applications/ucsc/bigwig.xml
+++ b/display_applications/ucsc/bigwig.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigwig" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/maf_customtrack.xml
--- a/display_applications/ucsc/maf_customtrack.xml
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -1,6 +1,6 @@
<display id="ucsc_maf_customtrack" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d display_applications/ucsc/vcf.xml
--- a/display_applications/ucsc/vcf.xml
+++ b/display_applications/ucsc/vcf.xml
@@ -1,6 +1,6 @@
<display id="ucsc_vcf" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -64,9 +64,6 @@
# Load dbkey / genome build manager
self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
- # Load build sites (old-style)
- self._configure_build_sites()
-
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -16,7 +16,6 @@
from galaxy.util import string_as_bool
from galaxy.util import listify
from galaxy.util.dbkeys import GenomeBuilds
-from galaxy.util.build_sites import BuildSites
from galaxy import eggs
log = logging.getLogger( __name__ )
@@ -224,9 +223,7 @@
self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.ucsc_build_sites = resolve_path( kwargs.get( 'ucsc_build_sites', os.path.join( self.tool_data_path, 'shared', 'ucsc', 'ucsc_build_sites.txt') ), self.root )
self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
- self.gbrowse_build_sites = resolve_path( kwargs.get( 'gbrowse_build_sites', os.path.join( self.tool_data_path, 'shared', 'gbrowse', 'gbrowse_build_sites.txt') ), self.root )
self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
@@ -574,9 +571,6 @@
def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
- def _configure_build_sites( self ):
- self.build_sites = BuildSites( self )
-
def _configure_toolbox( self ):
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -81,9 +81,13 @@
class DynamicDisplayApplicationBuilder( object ):
@classmethod
- def __init__( self, elem, display_application ):
+ def __init__( self, elem, display_application, build_sites ):
rval = []
- filename = elem.get( 'from_file', None )
+ filename = None
+ if elem.get( 'site_type', None ) is not None:
+ filename = build_sites.get( elem.get( 'site_type' ) )
+ else:
+ filename = elem.get( 'from_file', None )
assert filename is not None, 'Filename and id attributes required for dynamic_links'
skip_startswith = elem.get( 'skip_startswith', None )
separator = elem.get( 'separator', '\t' )
@@ -176,7 +180,7 @@
if link:
rval.links[ link.id ] = link
for dynamic_links in elem.findall( 'dynamic_links' ):
- for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+ for link in DynamicDisplayApplicationBuilder( dynamic_links, rval, datatypes_registry.build_sites ):
rval.links[ link.id ] = link
return rval
def __init__( self, display_id, name, datatypes_registry, version = None ):
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -85,7 +85,7 @@
if not dataset.dbkey:
dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
if site_name in app.config.ucsc_display_sites:
site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
internal_url = "%s" % url_for( controller='dataset',
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -234,7 +234,7 @@
# Filter UCSC sites to only those that are supported by this build and
# enabled.
valid_sites = [ ( name, url )
- for name, url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey )
+ for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
if name in app.config.ucsc_display_sites ]
if not valid_sites:
return []
@@ -750,7 +750,7 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
if site_name in app.config.ucsc_display_sites:
redirect_url = urllib.quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
@@ -762,7 +762,7 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
if site_name in app.config.gbrowse_display_sites:
if seqid.startswith( 'chr' ) and len ( seqid ) > 3:
seqid = seqid[3:]
@@ -1091,7 +1091,7 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
if site_name in app.config.gbrowse_display_sites:
if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
chrom = chrom[3:]
@@ -1103,7 +1103,7 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
if site_name in app.config.ucsc_display_sites:
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
@@ -1285,7 +1285,7 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc',dataset.dbkey):
if site_name in app.config.ucsc_display_sites:
internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -69,6 +69,9 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
+ # Build sites
+ self.build_sites = {}
+ self.legacy_build_sites = {}
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
@@ -285,6 +288,8 @@
handling_proprietary_datatypes=handling_proprietary_datatypes,
override=override )
self.upload_file_formats.sort()
+ # Load build sites
+ self.load_build_sites( root )
# Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
# set_metadata processing.
self.to_xml_file()
@@ -303,6 +308,26 @@
self.sniff_order.append( datatype )
append_to_sniff_order()
+ def load_build_sites( self, root ):
+ if root.find( 'build_sites' ):
+ for elem in root.find( 'build_sites' ).findall( 'site' ):
+ if not (elem.get( 'type' ) and elem.get( 'file' )):
+ self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
+ else:
+ self.build_sites[elem.get( 'type' )] = elem.get( 'file' )
+ self.log.debug( "Loaded build site '%s': %s", elem.get( 'type' ), elem.get( 'file' ) )
+ for site, file in self.build_sites.items():
+ if site in ( 'ucsc', 'gbrowse' ):
+ self.legacy_build_sites[site] = galaxy.util.read_build_sites( file )
+ self.log.debug( "Loaded legacy build site '%s': %s", site, file )
+
+ def get_legacy_sites_by_build( self, site_type, build ):
+ sites = []
+ for site in self.legacy_build_sites[site_type]:
+ if build in site['builds']:
+ sites.append((site['name'], site['url']))
+ return sites
+
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
"""
Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -829,6 +829,30 @@
return db_names
+def read_build_sites( filename, check_builds=True ):
+ """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
+ build_sites = []
+ try:
+ for line in open(filename):
+ try:
+ if line[0:1] == "#":
+ continue
+ fields = line.replace("\r", "").replace("\n", "").split("\t")
+ site_name = fields[0]
+ site = fields[1]
+ if check_builds:
+ site_builds = fields[2].split(",")
+ site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
+ else:
+ site_dict = {'name': site_name, 'url': site}
+ build_sites.append( site_dict )
+ except:
+ continue
+ except:
+ print "ERROR: Unable to read builds for site file %s" % filename
+ return build_sites
+
+
def relativize_symlinks( path, start=None, followlinks=False):
for root, dirs, files in os.walk( path, followlinks=followlinks ):
rel_start = None
diff -r 8e699330dfcd889b02acccb058f16b28c95ac848 -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d lib/galaxy/util/build_sites.py
--- a/lib/galaxy/util/build_sites.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Functionality for dealing with build sites for legacy display applications.
-"""
-import os.path
-
-
-class BuildSites( object ):
-
- def __init__( self, app ):
- self._app = app
- self._build_sites = {}
- self.load_build_sites()
-
- def read_build_sites( self, filename, check_builds=True ):
- """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
- build_sites = []
- try:
- for line in open(filename):
- try:
- if line[0:1] == "#":
- continue
- fields = line.replace("\r", "").replace("\n", "").split("\t")
- site_name = fields[0]
- site = fields[1]
- if check_builds:
- site_builds = fields[2].split(",")
- site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
- else:
- site_dict = {'name': site_name, 'url': site}
- build_sites.append( site_dict )
- except:
- continue
- except:
- print "ERROR: Unable to read builds for site file %s" % filename
- return build_sites
-
- def load_build_sites( self ):
- self._build_sites['ucsc'] = self.read_build_sites( self._app.config.ucsc_build_sites )
- self._build_sites['gbrowse'] = self.read_build_sites( self._app.config.gbrowse_build_sites )
-
- def _get_site_by_build( self, site_type, build ):
- sites = []
- for site in self._build_sites[site_type]:
- if build in site['builds']:
- sites.append((site['name'], site['url']))
- return sites
-
- def get_ucsc_sites_by_build( self, build ):
- return self._get_site_by_build( 'ucsc', build )
-
- def get_gbrowse_sites_by_build( self, build ):
- return self._get_site_by_build( 'gbrowse', build )
https://bitbucket.org/galaxy/galaxy-central/commits/6347caa946d7/
Changeset: 6347caa946d7
User: natefoo
Date: 2014-09-03 00:21:50
Summary: Remove config options from sample.
Affected #: 1 file
diff -r a3c5b729592b9f1d18a9e911a0125ba10f74c68d -r 6347caa946d76480267766122718db064ac3e0a1 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -364,12 +364,10 @@
# which browsers should be available. URLs and builds available at these
# browsers are defined in the specifield files.
-# UCSC browsers:
-#ucsc_build_sites = tool-data/shared/ucsc/ucsc_build_sites.txt
+# UCSC browsers: tool-data/shared/ucsc/ucsc_build_sites.txt
#ucsc_display_sites = main,test,archaea,ucla
-# GBrowse servers:
-#gbrowse_build_sites = tool-data/shared/gbrowse/gbrowse_build_sites.txt
+# GBrowse servers: tool-data/shared/gbrowse/gbrowse_build_sites.txt
#gbrowse_display_sites = modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
# If use_remote_user = True, display application servers will be denied access
https://bitbucket.org/galaxy/galaxy-central/commits/4888400b34d1/
Changeset: 4888400b34d1
User: natefoo
Date: 2014-09-03 18:25:22
Summary: Move ucsc_display_sites and gbrowse_display_sites into datatypes_conf.xml's new <build_sites> section, as per the suggestion by @nsoranzo.
Affected #: 10 files
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -313,8 +313,18 @@
--></sniffers><build_sites>
- <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt"/>
- <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt"/>
+ <!--
+ Build sites define the builds (dbkeys) available at sites used by display
+ applications and the URL to those sites.
+
+ The `display` attributes on the `ucsc` and `gbrowse` sites replace the
+ `ucsc_display_sites` and `gbrowse_display_sites` options in
+ universe_wsgi.ini. Because these are used by "old-style" display
+ applications, their types cannot not change if you want the old-style
+ display links for these sites to work.
+ -->
+ <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/>
+ <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/><site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/><site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/><site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb display_applications/gbrowse/gbrowse_gff.xml
--- a/display_applications/gbrowse/gbrowse_gff.xml
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb display_applications/gbrowse/gbrowse_interval_as_bed.xml
--- a/display_applications/gbrowse/gbrowse_interval_as_bed.xml
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb display_applications/gbrowse/gbrowse_wig.xml
--- a/display_applications/gbrowse/gbrowse_wig.xml
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -223,8 +223,6 @@
self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
- self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
# Configuration for the message box directly below the masthead.
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -86,7 +86,7 @@
dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
+ if site_name in datatypes_registry.get_display_sites('ucsc'):
site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
internal_url = "%s" % url_for( controller='dataset',
dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -235,7 +235,7 @@
# enabled.
valid_sites = [ ( name, url )
for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
- if name in app.config.ucsc_display_sites ]
+ if name in app.datatypes_registry.get_display_sites('ucsc') ]
if not valid_sites:
return []
# If there are any valid sites, we need to generate the estimated
@@ -751,7 +751,7 @@
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
( site_url, dataset.dbkey, seqid, start, stop ) )
@@ -763,7 +763,7 @@
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if seqid.startswith( 'chr' ) and len ( seqid ) > 3:
seqid = seqid[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, seqid, start, stop ) )
@@ -1092,7 +1092,7 @@
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
chrom = chrom[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
@@ -1104,7 +1104,7 @@
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
ret_val.append( ( site_name, link ) )
@@ -1286,7 +1286,7 @@
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc',dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -71,6 +71,7 @@
self.xml_filename = None
# Build sites
self.build_sites = {}
+ self.display_sites = {}
self.legacy_build_sites = {}
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
@@ -314,20 +315,28 @@
if not (elem.get( 'type' ) and elem.get( 'file' )):
self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
else:
- self.build_sites[elem.get( 'type' )] = elem.get( 'file' )
- self.log.debug( "Loaded build site '%s': %s", elem.get( 'type' ), elem.get( 'file' ) )
- for site, file in self.build_sites.items():
- if site in ( 'ucsc', 'gbrowse' ):
- self.legacy_build_sites[site] = galaxy.util.read_build_sites( file )
- self.log.debug( "Loaded legacy build site '%s': %s", site, file )
+ site_type = elem.get( 'type' )
+ file = elem.get( 'file' )
+ self.build_sites[site_type] = file
+ if site_type in ('ucsc', 'gbrowse'):
+ self.legacy_build_sites[site_type] = galaxy.util.read_build_sites( file )
+ if elem.get( 'display', None ):
+ display = elem.get( 'display' )
+ self.display_sites[site_type] = [ x.strip() for x in display.lower().split( ',' ) ]
+ self.log.debug( "Loaded build site '%s': %s with display sites: %s", site_type, file, display )
+ else:
+ self.log.debug( "Loaded build site '%s': %s", site_type, file )
def get_legacy_sites_by_build( self, site_type, build ):
sites = []
- for site in self.legacy_build_sites[site_type]:
+ for site in self.legacy_build_sites.get(site_type, []):
if build in site['builds']:
sites.append((site['name'], site['url']))
return sites
+ def get_display_sites( self, site_type ):
+ return self.display_sites.get( site_type, [] )
+
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
"""
Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -926,7 +926,7 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
try:
host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
diff -r 6347caa946d76480267766122718db064ac3e0a1 -r 4888400b34d120140d163ea3d92d053ba04e2ffb universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -364,12 +364,6 @@
# which browsers should be available. URLs and builds available at these
# browsers are defined in the specifield files.
-# UCSC browsers: tool-data/shared/ucsc/ucsc_build_sites.txt
-#ucsc_display_sites = main,test,archaea,ucla
-
-# GBrowse servers: tool-data/shared/gbrowse/gbrowse_build_sites.txt
-#gbrowse_display_sites = modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
-
# If use_remote_user = True, display application servers will be denied access
# to Galaxy and so displaying datasets in these sites will fail.
# display_servers contains a list of hostnames which should be allowed to
https://bitbucket.org/galaxy/galaxy-central/commits/608391566ac8/
Changeset: 608391566ac8
User: natefoo
Date: 2014-09-03 18:27:01
Summary: Fix nonstandard indentation.
Affected #: 1 file
diff -r 4888400b34d120140d163ea3d92d053ba04e2ffb -r 608391566ac80318f703ddf9b8f9506fc46dc866 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -83,29 +83,31 @@
ret_val = []
ggtail = 'hgGenome_doSubmitUpload=submit'
if not dataset.dbkey:
- dataset.dbkey = 'hg18' # punt!
+ dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
- for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
- if site_name in datatypes_registry.get_display_sites('ucsc'):
- site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- internal_url = "%s" % url_for( controller='dataset',
- dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
- display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
- display_url = urllib.quote_plus( display_url )
- # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
- #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
- sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
- #sl.append("&hgt.customText=%s")
- sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
- sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
- sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
- sl.append("&hgGenome_doSubmitUpload=submit")
- sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
- s = ''.join(sl)
- s = urllib.quote_plus(s)
- redirect_url = s
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+ if site_name in datatypes_registry.get_display_sites('ucsc'):
+ site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+ internal_url = "%s" % url_for( controller='dataset',
+ dataset_id=dataset.id,
+ action='display_at',
+ filename='ucsc_' + site_name )
+ display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+ display_url = urllib.quote_plus( display_url )
+ # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+ sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+ #sl.append("&hgt.customText=%s")
+ sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+ sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+ sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+ sl.append("&hgGenome_doSubmitUpload=submit")
+ sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+ s = ''.join(sl)
+ s = urllib.quote_plus(s)
+ redirect_url = s
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( (site_name, link) )
return ret_val
def make_html_table( self, dataset, skipchars=[] ):
https://bitbucket.org/galaxy/galaxy-central/commits/f7e396d29268/
Changeset: f7e396d29268
User: natefoo
Date: 2014-09-03 19:13:48
Summary: Fix typo in sample config.
Affected #: 1 file
diff -r 608391566ac80318f703ddf9b8f9506fc46dc866 -r f7e396d292685cd03145225f40ad7eb6f74b9408 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -320,8 +320,8 @@
The `display` attributes on the `ucsc` and `gbrowse` sites replace the
`ucsc_display_sites` and `gbrowse_display_sites` options in
universe_wsgi.ini. Because these are used by "old-style" display
- applications, their types cannot not change if you want the old-style
- display links for these sites to work.
+ applications, their types cannot change if you want the old-style display
+ links for these sites to work.
--><site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/><site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/>
https://bitbucket.org/galaxy/galaxy-central/commits/9b6cccb3af2d/
Changeset: 9b6cccb3af2d
User: natefoo
Date: 2014-09-04 17:38:26
Summary: Merged in natefoo/galaxy-central (pull request #481)
Make build sites files configurable
Affected #: 27 files
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -312,4 +312,22 @@
<sniffer type="galaxy.datatypes.assembly:Amos"/>
--></sniffers>
+ <build_sites>
+ <!--
+ Build sites define the builds (dbkeys) available at sites used by display
+ applications and the URL to those sites.
+
+ The `display` attributes on the `ucsc` and `gbrowse` sites replace the
+ `ucsc_display_sites` and `gbrowse_display_sites` options in
+ universe_wsgi.ini. Because these are used by "old-style" display
+ applications, their types cannot change if you want the old-style display
+ links for these sites to work.
+ -->
+ <site type="ucsc" file="tool-data/shared/ucsc/ucsc_build_sites.txt" display="main,test,archaea,ucla"/>
+ <site type="gbrowse" file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" display="modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225"/>
+ <site type="ensembl" file="tool-data/shared/ensembl/ensembl_sites.txt"/>
+ <site type="ensembl_data_url" file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt"/>
+ <site type="igv" file="tool-data/shared/igv/igv_build_sites.txt"/>
+ <site type="rviewer" file="tool-data/shared/rviewer/rviewer_build_sites.txt"/>
+ </build_sites></datatypes>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_bam.xml
--- a/display_applications/ensembl/ensembl_bam.xml
+++ b/display_applications/ensembl/ensembl_bam.xml
@@ -1,7 +1,7 @@
<display id="ensembl_bam" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_gff.xml
--- a/display_applications/ensembl/ensembl_gff.xml
+++ b/display_applications/ensembl/ensembl_gff.xml
@@ -1,7 +1,7 @@
<display id="ensembl_gff" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ensembl/ensembl_interval_as_bed.xml
--- a/display_applications/ensembl/ensembl_interval_as_bed.xml
+++ b/display_applications/ensembl/ensembl_interval_as_bed.xml
@@ -1,7 +1,7 @@
<display id="ensembl_interval" version="1.0.0" name="display at Ensembl"><!-- Current Ensembl method of attaching user data via URL; archives older than ~November 2008 will use a different method --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -41,7 +41,7 @@
<!-- Old Ensembl method of attaching user data via URL --><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ensembl/ensembl_sites_data_URL.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="ensembl_data_url" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_gff.xml
--- a/display_applications/gbrowse/gbrowse_gff.xml
+++ b/display_applications/gbrowse/gbrowse_gff.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_gff" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_interval_as_bed.xml
--- a/display_applications/gbrowse/gbrowse_interval_as_bed.xml
+++ b/display_applications/gbrowse/gbrowse_interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_interval_as_bed" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/gbrowse/gbrowse_wig.xml
--- a/display_applications/gbrowse/gbrowse_wig.xml
+++ b/display_applications/gbrowse/gbrowse_wig.xml
@@ -1,6 +1,6 @@
<display id="gbrowse_wig" version="1.0.0" name="display at GBrowse"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/gbrowse/gbrowse_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="gbrowse" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -10,7 +10,7 @@
<dynamic_param name="site_organisms" value="4" split="True" separator="," /><!-- Filter out some of the links based upon matching site_id to a Galaxy application configuration parameter and by dataset dbkey -->
- <filter>${site_id in $APP.config.gbrowse_display_sites}</filter>
+ <filter>${site_id in $APP.datatypes_registry.get_display_sites('gbrowse')}</filter><filter>${dataset.dbkey in $site_dbkeys}</filter><!-- We define url and params as normal, but values defined in dynamic_param are available by specified name -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/igv/bam.xml
--- a/display_applications/igv/bam.xml
+++ b/display_applications/igv/bam.xml
@@ -2,7 +2,7 @@
<display id="igv_bam" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/igv/vcf.xml
--- a/display_applications/igv/vcf.xml
+++ b/display_applications/igv/vcf.xml
@@ -2,7 +2,7 @@
<display id="igv_vcf" version="1.0.0" name="display with IGV"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/igv/igv_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="igv" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
@@ -92,4 +92,4 @@
</display>
-<!-- Dan Blankenberg -->
\ No newline at end of file
+<!-- Dan Blankenberg -->
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/rviewer/bed.xml
--- a/display_applications/rviewer/bed.xml
+++ b/display_applications/rviewer/bed.xml
@@ -2,7 +2,7 @@
<display id="rviewer_interval" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/rviewer/vcf.xml
--- a/display_applications/rviewer/vcf.xml
+++ b/display_applications/rviewer/vcf.xml
@@ -2,7 +2,7 @@
<display id="rviewer_vcf" version="1.0.0" name="display at RViewer"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/rviewer/rviewer_build_sites.txt" skip_startswith="#" id="0" name="1">
+ <dynamic_links site_type="rviewer" skip_startswith="#" id="0" name="1"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bam.xml
--- a/display_applications/ucsc/bam.xml
+++ b/display_applications/ucsc/bam.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bam" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bigbed.xml
--- a/display_applications/ucsc/bigbed.xml
+++ b/display_applications/ucsc/bigbed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigbed" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/bigwig.xml
--- a/display_applications/ucsc/bigwig.xml
+++ b/display_applications/ucsc/bigwig.xml
@@ -1,6 +1,6 @@
<display id="ucsc_bigwig" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/interval_as_bed.xml
--- a/display_applications/ucsc/interval_as_bed.xml
+++ b/display_applications/ucsc/interval_as_bed.xml
@@ -1,6 +1,6 @@
<display id="ucsc_interval_as_bed" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/maf_customtrack.xml
--- a/display_applications/ucsc/maf_customtrack.xml
+++ b/display_applications/ucsc/maf_customtrack.xml
@@ -1,6 +1,6 @@
<display id="ucsc_maf_customtrack" version="1.0.0" name="display at UCSC" inherit="True"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 display_applications/ucsc/vcf.xml
--- a/display_applications/ucsc/vcf.xml
+++ b/display_applications/ucsc/vcf.xml
@@ -1,6 +1,6 @@
<display id="ucsc_vcf" version="1.0.0" name="display at UCSC"><!-- Load links from file: one line to one link -->
- <dynamic_links from_file="tool-data/shared/ucsc/ucsc_build_sites.txt" skip_startswith="#" id="0" name="0">
+ <dynamic_links site_type="ucsc" skip_startswith="#" id="0" name="0"><!-- Define parameters by column from file, allow splitting on builds --><dynamic_param name="site_id" value="0"/><dynamic_param name="ucsc_link" value="1"/>
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/app.py
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -64,9 +64,6 @@
# Load dbkey / genome build manager
self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
- # Load build sites (old-style)
- self._configure_build_sites()
-
# Genomes
self.genomes = Genomes( self )
# Data providers registry.
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -16,7 +16,6 @@
from galaxy.util import string_as_bool
from galaxy.util import listify
from galaxy.util.dbkeys import GenomeBuilds
-from galaxy.util.build_sites import BuildSites
from galaxy import eggs
log = logging.getLogger( __name__ )
@@ -224,10 +223,6 @@
self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.ucsc_build_sites = resolve_path( kwargs.get( 'ucsc_build_sites', os.path.join( self.tool_data_path, 'shared', 'ucsc', 'ucsc_build_sites.txt') ), self.root )
- self.ucsc_display_sites = kwargs.get( 'ucsc_display_sites', "main,test,archaea,ucla" ).lower().split(",")
- self.gbrowse_build_sites = resolve_path( kwargs.get( 'gbrowse_build_sites', os.path.join( self.tool_data_path, 'shared', 'gbrowse', 'gbrowse_build_sites.txt') ), self.root )
- self.gbrowse_display_sites = kwargs.get( 'gbrowse_display_sites', "modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225" ).lower().split(",")
self.brand = kwargs.get( 'brand', None )
self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
# Configuration for the message box directly below the masthead.
@@ -575,9 +570,6 @@
def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
- def _configure_build_sites( self ):
- self.build_sites = BuildSites( self )
-
def _configure_toolbox( self ):
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/display_applications/application.py
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -81,9 +81,13 @@
class DynamicDisplayApplicationBuilder( object ):
@classmethod
- def __init__( self, elem, display_application ):
+ def __init__( self, elem, display_application, build_sites ):
rval = []
- filename = elem.get( 'from_file', None )
+ filename = None
+ if elem.get( 'site_type', None ) is not None:
+ filename = build_sites.get( elem.get( 'site_type' ) )
+ else:
+ filename = elem.get( 'from_file', None )
assert filename is not None, 'Filename and id attributes required for dynamic_links'
skip_startswith = elem.get( 'skip_startswith', None )
separator = elem.get( 'separator', '\t' )
@@ -176,7 +180,7 @@
if link:
rval.links[ link.id ] = link
for dynamic_links in elem.findall( 'dynamic_links' ):
- for link in DynamicDisplayApplicationBuilder( dynamic_links, rval ):
+ for link in DynamicDisplayApplicationBuilder( dynamic_links, rval, datatypes_registry.build_sites ):
rval.links[ link.id ] = link
return rval
def __init__( self, display_id, name, datatypes_registry, version = None ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -83,29 +83,31 @@
ret_val = []
ggtail = 'hgGenome_doSubmitUpload=submit'
if not dataset.dbkey:
- dataset.dbkey = 'hg18' # punt!
+ dataset.dbkey = 'hg18' # punt!
if dataset.has_data():
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
- site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
- internal_url = "%s" % url_for( controller='dataset',
- dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
- display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
- display_url = urllib.quote_plus( display_url )
- # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
- #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
- sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
- #sl.append("&hgt.customText=%s")
- sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
- sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
- sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
- sl.append("&hgGenome_doSubmitUpload=submit")
- sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
- s = ''.join(sl)
- s = urllib.quote_plus(s)
- redirect_url = s
- link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
- ret_val.append( (site_name, link) )
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey):
+ if site_name in datatypes_registry.get_display_sites('ucsc'):
+ site_url = site_url.replace('/hgTracks?','/hgGenome?') # for genome graphs
+ internal_url = "%s" % url_for( controller='dataset',
+ dataset_id=dataset.id,
+ action='display_at',
+ filename='ucsc_' + site_name )
+ display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type)
+ display_url = urllib.quote_plus( display_url )
+ # was display_url = urllib.quote_plus( "%s/display_as?id=%i&display_app=%s" % (base_url, dataset.id, type) )
+ #redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop) )
+ sl = ["%sdb=%s" % (site_url,dataset.dbkey ),]
+ #sl.append("&hgt.customText=%s")
+ sl.append("&hgGenome_dataSetName=%s&hgGenome_dataSetDescription=%s" % (dataset.name, 'GalaxyGG_data'))
+ sl.append("&hgGenome_formatType=best guess&hgGenome_markerType=best guess")
+ sl.append("&hgGenome_columnLabels=first row&hgGenome_maxVal=&hgGenome_labelVals=")
+ sl.append("&hgGenome_doSubmitUpload=submit")
+ sl.append("&hgGenome_maxGapToFill=25000000&hgGenome_uploadFile=%s" % display_url)
+ s = ''.join(sl)
+ s = urllib.quote_plus(s)
+ redirect_url = s
+ link = '%s?redirect_url=%s&display_url=%s' % ( internal_url, redirect_url, display_url )
+ ret_val.append( (site_name, link) )
return ret_val
def make_html_table( self, dataset, skipchars=[] ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -234,8 +234,8 @@
# Filter UCSC sites to only those that are supported by this build and
# enabled.
valid_sites = [ ( name, url )
- for name, url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey )
- if name in app.config.ucsc_display_sites ]
+ for name, url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey )
+ if name in app.datatypes_registry.get_display_sites('ucsc') ]
if not valid_sites:
return []
# If there are any valid sites, we need to generate the estimated
@@ -750,8 +750,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" %
( site_url, dataset.dbkey, seqid, start, stop ) )
@@ -762,8 +762,8 @@
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport( dataset )
if seqid is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if seqid.startswith( 'chr' ) and len ( seqid ) > 3:
seqid = seqid[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, seqid, start, stop ) )
@@ -1091,8 +1091,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_gbrowse_sites_by_build( dataset.dbkey ):
- if site_name in app.config.gbrowse_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('gbrowse', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('gbrowse'):
if chrom.startswith( 'chr' ) and len ( chrom ) > 3:
chrom = chrom[3:]
redirect_url = urllib.quote_plus( "%s/?q=%s:%s..%s&eurl=%%s" % ( site_url, chrom, start, stop ) )
@@ -1103,8 +1103,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport( dataset )
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build( dataset.dbkey ):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc', dataset.dbkey ):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % ( site_url, dataset.dbkey, chrom, start, stop ) )
link = self._get_remote_call_url( redirect_url, site_name, dataset, type, app, base_url )
ret_val.append( ( site_name, link ) )
@@ -1285,8 +1285,8 @@
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
- for site_name, site_url in app.build_sites.get_ucsc_sites_by_build(dataset.dbkey):
- if site_name in app.config.ucsc_display_sites:
+ for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build('ucsc',dataset.dbkey):
+ if site_name in app.datatypes_registry.get_display_sites('ucsc'):
internal_url = "%s" % url_for( controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name )
display_url = urllib.quote_plus( "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (base_url, url_for( controller='root' ), dataset.id, type) )
redirect_url = urllib.quote_plus( "%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop ) )
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -69,6 +69,10 @@
self.datatype_elems = []
self.sniffer_elems = []
self.xml_filename = None
+ # Build sites
+ self.build_sites = {}
+ self.display_sites = {}
+ self.legacy_build_sites = {}
def load_datatypes( self, root_dir=None, config=None, deactivate=False, override=True ):
"""
@@ -285,6 +289,8 @@
handling_proprietary_datatypes=handling_proprietary_datatypes,
override=override )
self.upload_file_formats.sort()
+ # Load build sites
+ self.load_build_sites( root )
# Persist the xml form of the registry into a temporary file so that it can be loaded from the command line by tools and
# set_metadata processing.
self.to_xml_file()
@@ -303,6 +309,34 @@
self.sniff_order.append( datatype )
append_to_sniff_order()
+ def load_build_sites( self, root ):
+ if root.find( 'build_sites' ):
+ for elem in root.find( 'build_sites' ).findall( 'site' ):
+ if not (elem.get( 'type' ) and elem.get( 'file' )):
+ self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
+ else:
+ site_type = elem.get( 'type' )
+ file = elem.get( 'file' )
+ self.build_sites[site_type] = file
+ if site_type in ('ucsc', 'gbrowse'):
+ self.legacy_build_sites[site_type] = galaxy.util.read_build_sites( file )
+ if elem.get( 'display', None ):
+ display = elem.get( 'display' )
+ self.display_sites[site_type] = [ x.strip() for x in display.lower().split( ',' ) ]
+ self.log.debug( "Loaded build site '%s': %s with display sites: %s", site_type, file, display )
+ else:
+ self.log.debug( "Loaded build site '%s': %s", site_type, file )
+
+ def get_legacy_sites_by_build( self, site_type, build ):
+ sites = []
+ for site in self.legacy_build_sites.get(site_type, []):
+ if build in site['builds']:
+ sites.append((site['name'], site['url']))
+ return sites
+
+ def get_display_sites( self, site_type ):
+ return self.display_sites.get( site_type, [] )
+
def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_datatypes=False, override=False ):
"""
Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -844,6 +844,30 @@
return db_names
+def read_build_sites( filename, check_builds=True ):
+ """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
+ build_sites = []
+ try:
+ for line in open(filename):
+ try:
+ if line[0:1] == "#":
+ continue
+ fields = line.replace("\r", "").replace("\n", "").split("\t")
+ site_name = fields[0]
+ site = fields[1]
+ if check_builds:
+ site_builds = fields[2].split(",")
+ site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
+ else:
+ site_dict = {'name': site_name, 'url': site}
+ build_sites.append( site_dict )
+ except:
+ continue
+ except:
+ print "ERROR: Unable to read builds for site file %s" % filename
+ return build_sites
+
+
def relativize_symlinks( path, start=None, followlinks=False):
for root, dirs, files in os.walk( path, followlinks=followlinks ):
rel_start = None
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/util/build_sites.py
--- a/lib/galaxy/util/build_sites.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-Functionality for dealing with build sites for legacy display applications.
-"""
-import os.path
-
-
-class BuildSites( object ):
-
- def __init__( self, app ):
- self._app = app
- self._build_sites = {}
- self.load_build_sites()
-
- def read_build_sites( self, filename, check_builds=True ):
- """ read db names to ucsc mappings from file, this file should probably be merged with the one above """
- build_sites = []
- try:
- for line in open(filename):
- try:
- if line[0:1] == "#":
- continue
- fields = line.replace("\r", "").replace("\n", "").split("\t")
- site_name = fields[0]
- site = fields[1]
- if check_builds:
- site_builds = fields[2].split(",")
- site_dict = {'name': site_name, 'url': site, 'builds': site_builds}
- else:
- site_dict = {'name': site_name, 'url': site}
- build_sites.append( site_dict )
- except:
- continue
- except:
- print "ERROR: Unable to read builds for site file %s" % filename
- return build_sites
-
- def load_build_sites( self ):
- self._build_sites['ucsc'] = self.read_build_sites( self._app.config.ucsc_build_sites )
- self._build_sites['gbrowse'] = self.read_build_sites( self._app.config.gbrowse_build_sites )
-
- def _get_site_by_build( self, site_type, build ):
- sites = []
- for site in self._build_sites[site_type]:
- if build in site['builds']:
- sites.append((site['name'], site['url']))
- return sites
-
- def get_ucsc_sites_by_build( self, build ):
- return self._get_site_by_build( 'ucsc', build )
-
- def get_gbrowse_sites_by_build( self, build ):
- return self._get_site_by_build( 'gbrowse', build )
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -926,7 +926,7 @@
url_for( controller='dataset', action='list' )
)
display_as = url_for( controller='root', action='display_as' )
- if self.app.config.ucsc_display_sites and self.request.path == display_as:
+ if self.app.datatypes_registry.get_display_sites('ucsc') and self.request.path == display_as:
try:
host = socket.gethostbyaddr( self.environ[ 'REMOTE_ADDR' ] )[0]
except( socket.error, socket.herror, socket.gaierror, socket.timeout ):
diff -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf -r 9b6cccb3af2d6b13a1a1da7def87dcc19172be88 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -364,14 +364,6 @@
# which browsers should be available. URLs and builds available at these
# browsers are defined in the specifield files.
-# UCSC browsers:
-#ucsc_build_sites = tool-data/shared/ucsc/ucsc_build_sites.txt
-#ucsc_display_sites = main,test,archaea,ucla
-
-# GBrowse servers:
-#gbrowse_build_sites = tool-data/shared/gbrowse/gbrowse_build_sites.txt
-#gbrowse_display_sites = modencode,sgd_yeast,tair,wormbase,wormbase_ws120,wormbase_ws140,wormbase_ws170,wormbase_ws180,wormbase_ws190,wormbase_ws200,wormbase_ws204,wormbase_ws210,wormbase_ws220,wormbase_ws225
-
# If use_remote_user = True, display application servers will be denied access
# to Galaxy and so displaying datasets in these sites will fail.
# display_servers contains a list of hostnames which should be allowed to
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/edc187004292/
Changeset: edc187004292
User: dannon
Date: 2014-09-04 01:38:44
Summary: Add is_uuid to lib/galaxy/util
Affected #: 1 file
diff -r 6070f4fbb38f88eb98cf65f0477924ffea9bc10c -r edc1870042923f32dfeb9dadcc13e1915edff825 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -98,6 +98,21 @@
return False
+def is_uuid( value ):
+ """
+ This method returns True if value is a UUID, otherwise False.
+ >>> is_uuid( "123e4567-e89b-12d3-a456-426655440000" )
+ True
+ >>> is_uuid( "0x3242340298902834" )
+ False
+ """
+ uuid_re = re.compile( "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" )
+ if re.match( uuid_re, str( value ) ):
+ return True
+ else:
+ return False
+
+
def get_charset_from_http_headers( headers, default=None ):
rval = headers.get('content-type', None )
if rval and 'charset=' in rval:
https://bitbucket.org/galaxy/galaxy-central/commits/8a93d99fccfa/
Changeset: 8a93d99fccfa
User: dannon
Date: 2014-09-04 01:49:05
Summary: pep8 prior to modifications / whitespace and comment spacing.
Affected #: 1 file
diff -r edc1870042923f32dfeb9dadcc13e1915edff825 -r 8a93d99fccfa452786836a568976b5ebb50cddf7 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -439,17 +439,17 @@
stored_workflow = query.get( workflow_id )
except Exception:
try:
- #see if they have passed in the UUID for a workflow that is attached to a stored workflow
+ # see if they have passed in the UUID for a workflow that is attached to a stored workflow
workflow_uuid = uuid.UUID(workflow_id)
stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
trans.app.model.Workflow.uuid == workflow_uuid
- )).first()
+ )).first()
if stored_workflow is None:
raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
return stored_workflow
except:
- pass #let the outer raise exception happen
+ pass # let the outer raise exception happen
raise exceptions.ObjectNotFound( "No such workflow found - invalid workflow identifier." )
if stored_workflow is None:
raise exceptions.ObjectNotFound( "No such workflow found." )
https://bitbucket.org/galaxy/galaxy-central/commits/75709a2361b1/
Changeset: 75709a2361b1
User: dannon
Date: 2014-09-04 16:59:44
Summary: Adjust __get_stored_workflow (by uuid) logic to detect UUIDs and switch accordingly.
Affected #: 1 file
diff -r 8a93d99fccfa452786836a568976b5ebb50cddf7 -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 lib/galaxy/webapps/galaxy/api/workflows.py
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -433,24 +433,19 @@
return stored_workflow
def __get_stored_workflow( self, trans, workflow_id ):
- try:
+ if util.is_uuid(workflow_id):
+ # see if they have passed in the UUID for a workflow that is attached to a stored workflow
+ workflow_uuid = uuid.UUID(workflow_id)
+ stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
+ trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
+ trans.app.model.Workflow.uuid == workflow_uuid
+ )).first()
+ if stored_workflow is None:
+ raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
+ else:
workflow_id = self.__decode_id( trans, workflow_id )
query = trans.sa_session.query( trans.app.model.StoredWorkflow )
stored_workflow = query.get( workflow_id )
- except Exception:
- try:
- # see if they have passed in the UUID for a workflow that is attached to a stored workflow
- workflow_uuid = uuid.UUID(workflow_id)
- stored_workflow = trans.sa_session.query(trans.app.model.StoredWorkflow).filter( and_(
- trans.app.model.StoredWorkflow.latest_workflow_id == trans.app.model.Workflow.id,
- trans.app.model.Workflow.uuid == workflow_uuid
- )).first()
- if stored_workflow is None:
- raise exceptions.ObjectNotFound( "Workflow not found: %s" % workflow_id )
- return stored_workflow
- except:
- pass # let the outer raise exception happen
- raise exceptions.ObjectNotFound( "No such workflow found - invalid workflow identifier." )
if stored_workflow is None:
raise exceptions.ObjectNotFound( "No such workflow found." )
return stored_workflow
https://bitbucket.org/galaxy/galaxy-central/commits/9fdca7477f44/
Changeset: 9fdca7477f44
User: dannon
Date: 2014-09-04 16:59:59
Summary: Merge.
Affected #: 11 files
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf .hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -18,4 +18,4 @@
81fbe25bd02edcd53065e8e4476dd1dfb5a72cf2 latest_2013.11.04
2a756ca2cb1826db7796018e77d12e2dd7b67603 latest_2014.02.10
ca45b78adb4152fc6e7395514d46eba6b7d0b838 release_2014.08.11
-ea12550fbc34260ae70bde38db59a4024f35f988 latest_2014.08.11
+20f4fdf1735aeff23a6e7ab00389781fe3f3142c latest_2014.08.11
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2466,6 +2466,7 @@
val = getattr( ldda.datatype, name )
rval['metadata_' + name] = val
return rval
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templatesThe get_contents
# param is passed by callers that are inheriting a template - these
@@ -2492,6 +2493,7 @@
else:
return template.get_widgets( trans.user )
return []
+
def templates_dict( self, use_name=False ):
"""
Returns a dict of template info
@@ -2510,6 +2512,7 @@
tmp_dict[ name ] = content.get( field[ 'name' ] )
template_data[template.name] = tmp_dict
return template_data
+
def templates_json( self, use_name=False ):
return json.dumps( self.templates_dict( use_name=use_name ) )
@@ -2533,6 +2536,7 @@
self.info = info
self.inheritable = inheritable
+
class LibraryFolderInfoAssociation( object ):
def __init__( self, folder, form_definition, info, inheritable=False ):
self.folder = folder
@@ -2540,15 +2544,18 @@
self.info = info
self.inheritable = inheritable
+
class LibraryDatasetDatasetInfoAssociation( object ):
def __init__( self, library_dataset_dataset_association, form_definition, info ):
# TODO: need to figure out if this should be inheritable to the associated LibraryDataset
self.library_dataset_dataset_association = library_dataset_dataset_association
self.template = form_definition
self.info = info
+
@property
def inheritable( self ):
- return True #always allow inheriting, used for replacement
+ return True # always allow inheriting, used for replacement
+
class ValidationError( object ):
def __init__( self, message=None, err_type=None, attributes=None ):
@@ -2556,42 +2563,47 @@
self.err_type = err_type
self.attributes = attributes
+
class DatasetToValidationErrorAssociation( object ):
def __init__( self, dataset, validation_error ):
self.dataset = dataset
self.validation_error = validation_error
+
class ImplicitlyConvertedDatasetAssociation( object ):
- def __init__( self, id = None, parent = None, dataset = None, file_type = None, deleted = False, purged = False, metadata_safe = True ):
+
+ def __init__( self, id=None, parent=None, dataset=None, file_type=None, deleted=False, purged=False, metadata_safe=True ):
self.id = id
if isinstance(dataset, HistoryDatasetAssociation):
self.dataset = dataset
elif isinstance(dataset, LibraryDatasetDatasetAssociation):
self.dataset_ldda = dataset
else:
- raise AttributeError, 'Unknown dataset type provided for dataset: %s' % type( dataset )
+ raise AttributeError( 'Unknown dataset type provided for dataset: %s' % type( dataset ) )
if isinstance(parent, HistoryDatasetAssociation):
self.parent_hda = parent
elif isinstance(parent, LibraryDatasetDatasetAssociation):
self.parent_ldda = parent
else:
- raise AttributeError, 'Unknown dataset type provided for parent: %s' % type( parent )
+ raise AttributeError( 'Unknown dataset type provided for parent: %s' % type( parent ) )
self.type = file_type
self.deleted = deleted
self.purged = purged
self.metadata_safe = metadata_safe
- def clear( self, purge = False, delete_dataset = True ):
+ def clear( self, purge=False, delete_dataset=True ):
self.deleted = True
if self.dataset:
if delete_dataset:
self.dataset.deleted = True
if purge:
self.dataset.purged = True
- if purge and self.dataset.deleted: #do something with purging
+ if purge and self.dataset.deleted: # do something with purging
self.purged = True
- try: os.unlink( self.file_name )
- except Exception, e: print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
+ try:
+ os.unlink( self.file_name )
+ except Exception, e:
+ print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
DEFAULT_COLLECTION_NAME = "Unnamed Collection"
@@ -2903,6 +2915,7 @@
self.tool_id = None
self.message = message
+
class GalaxySession( object ):
def __init__( self,
id=None,
@@ -2924,24 +2937,29 @@
self.is_valid = is_valid
self.prev_session_id = prev_session_id
self.histories = []
+
def add_history( self, history, association=None ):
if association is None:
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+
def get_disk_usage( self ):
if self.disk_usage is None:
return 0
return self.disk_usage
+
def set_disk_usage( self, bytes ):
self.disk_usage = bytes
total_disk_usage = property( get_disk_usage, set_disk_usage )
+
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
self.galaxy_session = galaxy_session
self.history = history
+
class UCI( object ):
def __init__( self ):
self.id = None
@@ -2962,14 +2980,14 @@
self.latest_workflow_id = None
self.workflows = []
- def copy_tags_from(self,target_user,source_workflow):
+ def copy_tags_from(self, target_user, source_workflow):
for src_swta in source_workflow.owner_tags:
new_swta = src_swta.copy()
new_swta.user = target_user
self.tags.append(new_swta)
- def to_dict( self, view='collection', value_mapper = None ):
- rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper = value_mapper )
+ def to_dict( self, view='collection', value_mapper=None ):
+ rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper=value_mapper )
tags_str_list = []
for tag in self.tags:
tag_str = tag.user_tname
@@ -3007,12 +3025,11 @@
return False
def to_dict( self, view='collection', value_mapper=None):
- rval = super( Workflow, self ).to_dict( view=view, value_mapper = value_mapper )
- rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
+ rval = super( Workflow, self ).to_dict( view=view, value_mapper=value_mapper )
+ rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
return rval
-
class WorkflowStep( object ):
def __init__( self ):
@@ -3061,7 +3078,7 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'workflow_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'workflow_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper )
if view == 'element':
steps = {}
@@ -3072,12 +3089,12 @@
inputs = {}
for step in self.steps:
- if step.workflow_step.type =='tool':
+ if step.workflow_step.type == 'tool':
for step_input in step.workflow_step.input_connections:
if step_input.output_step.type == 'data_input':
for job_input in step.job.input_datasets:
if job_input.name == step_input.input_name:
- inputs[str(step_input.output_step.order_index)] = { "id" : job_input.dataset_id, "src" : "hda"}
+ inputs[str(step_input.output_step.order_index)] = { "id": job_input.dataset_id, "src": "hda"}
rval['inputs'] = inputs
return rval
@@ -3086,19 +3103,20 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocationStep, self ).to_dict( view=view, value_mapper=value_mapper )
rval['order_index'] = self.workflow_step.order_index
return rval
class MetadataFile( object ):
- def __init__( self, dataset = None, name = None ):
+ def __init__( self, dataset=None, name=None ):
if isinstance( dataset, HistoryDatasetAssociation ):
self.history_dataset = dataset
elif isinstance( dataset, LibraryDatasetDatasetAssociation ):
self.library_dataset = dataset
self.name = name
+
@property
def file_name( self ):
assert self.id is not None, "ID must be set before filename used (commit the object)"
@@ -3129,14 +3147,15 @@
class FormDefinition( object, Dictifiable ):
# The following form_builder classes are supported by the FormDefinition class.
supported_field_types = [ AddressField, CheckboxField, PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- types = Bunch( REQUEST = 'Sequencing Request Form',
- SAMPLE = 'Sequencing Sample Form',
- EXTERNAL_SERVICE = 'External Service Information Form',
- RUN_DETAILS_TEMPLATE = 'Sample run details template',
- LIBRARY_INFO_TEMPLATE = 'Library information template',
- USER_INFO = 'User Information' )
+ types = Bunch( REQUEST='Sequencing Request Form',
+ SAMPLE='Sequencing Sample Form',
+ EXTERNAL_SERVICE='External Service Information Form',
+ RUN_DETAILS_TEMPLATE='Sample run details template',
+ LIBRARY_INFO_TEMPLATE='Library information template',
+ USER_INFO='User Information' )
dict_collection_visible_keys = ( 'id', 'name' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'form_definition_current_id', 'fields', 'layout' )
+
def __init__( self, name=None, desc=None, fields=[], form_definition_current=None, form_type=None, layout=None ):
self.name = name
self.desc = desc
@@ -3144,6 +3163,7 @@
self.form_definition_current = form_definition_current
self.type = form_type
self.layout = layout
+
def grid_fields( self, grid_index ):
# Returns a dictionary whose keys are integers corresponding to field positions
# on the grid and whose values are the field.
@@ -3152,6 +3172,7 @@
if str( f[ 'layout' ] ) == str( grid_index ):
gridfields[i] = f
return gridfields
+
def get_widgets( self, user, contents={}, **kwd ):
'''
Return the list of widgets that comprise a form definition,
@@ -3213,24 +3234,28 @@
field_widget.params = params
elif field_type == 'SelectField':
for option in field[ 'selectlist' ]:
+
if option == value:
field_widget.add_option( option, option, selected=True )
else:
field_widget.add_option( option, option )
elif field_type == 'CheckboxField':
+
field_widget.set_checked( value )
if field[ 'required' ] == 'required':
req = 'Required'
else:
req = 'Optional'
if field[ 'helptext' ]:
- helptext='%s (%s)' % ( field[ 'helptext' ], req )
+ helptext = '%s (%s)' % ( field[ 'helptext' ], req )
else:
helptext = '(%s)' % req
widgets.append( dict( label=field[ 'label' ],
+
widget=field_widget,
helptext=helptext ) )
return widgets
+
def field_as_html( self, field ):
"""Generates disabled html for a field"""
type = field[ 'type' ]
@@ -3245,21 +3270,25 @@
# Return None if unsupported field type
return None
+
class FormDefinitionCurrent( object ):
def __init__(self, form_definition=None):
self.latest_form = form_definition
+
class FormValues( object ):
def __init__(self, form_def=None, content=None):
self.form_definition = form_def
self.content = content
+
class Request( object, Dictifiable ):
- states = Bunch( NEW = 'New',
- SUBMITTED = 'In Progress',
- REJECTED = 'Rejected',
- COMPLETE = 'Complete' )
+ states = Bunch( NEW='New',
+ SUBMITTED='In Progress',
+ REJECTED='Rejected',
+ COMPLETE='Complete' )
dict_collection_visible_keys = ( 'id', 'name', 'state' )
+
def __init__( self, name=None, desc=None, request_type=None, user=None, form_values=None, notification=None ):
self.name = name
self.desc = desc
@@ -3268,17 +3297,20 @@
self.user = user
self.notification = notification
self.samples_list = []
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def samples_have_common_state( self ):
"""
@@ -3294,6 +3326,7 @@
if s.state.id != state_for_comparison.id:
return False
return state_for_comparison
+
@property
def last_comment( self ):
latest_event = self.latest_event
@@ -3302,26 +3335,34 @@
return latest_event.comment
return ''
return 'No comment'
+
def get_sample( self, sample_name ):
for sample in self.samples:
if sample.name == sample_name:
return sample
return None
+
@property
def is_unsubmitted( self ):
return self.state in [ self.states.REJECTED, self.states.NEW ]
+
@property
def is_rejected( self ):
return self.state == self.states.REJECTED
+
@property
def is_submitted( self ):
return self.state == self.states.SUBMITTED
+
@property
def is_new( self ):
+
return self.state == self.states.NEW
+
@property
def is_complete( self ):
return self.state == self.states.COMPLETE
+
@property
def samples_without_library_destinations( self ):
# Return all samples that are not associated with a library
@@ -3330,6 +3371,7 @@
if not sample.library:
samples.append( sample )
return samples
+
@property
def samples_with_bar_code( self ):
# Return all samples that have associated bar code
@@ -3338,6 +3380,7 @@
if sample.bar_code:
samples.append( sample )
return samples
+
def send_email_notification( self, trans, common_state, final_state=False ):
# Check if an email notification is configured to be sent when the samples
# are in this state
@@ -3390,7 +3433,7 @@
try:
send_mail( frm, to, subject, body, trans.app.config )
comments = "Email notification sent to %s." % ", ".join( to ).strip().strip( ',' )
- except Exception,e:
+ except Exception, e:
comments = "Email notification failed. (%s)" % str(e)
# update the request history with the email notification event
elif not trans.app.config.smtp_server:
@@ -3401,16 +3444,19 @@
trans.sa_session.flush()
return comments
+
class RequestEvent( object ):
def __init__(self, request=None, request_state=None, comment=''):
self.request = request
self.state = request_state
self.comment = comment
+
class ExternalService( object ):
- data_transfer_protocol = Bunch( HTTP = 'http',
- HTTPS = 'https',
- SCP = 'scp' )
+ data_transfer_protocol = Bunch( HTTP='http',
+ HTTPS='https',
+ SCP='scp' )
+
def __init__( self, name=None, description=None, external_service_type_id=None, version=None, form_definition_id=None, form_values_id=None, deleted=None ):
self.name = name
self.description = description
@@ -3419,9 +3465,11 @@
self.form_definition_id = form_definition_id
self.form_values_id = form_values_id
self.deleted = deleted
- self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+ self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+
def get_external_service_type( self, trans ):
return trans.app.external_service_types.all_external_service_types[ self.external_service_type_id ]
+
def load_data_transfer_settings( self, trans ):
trans.app.external_service_types.reload( self.external_service_type_id )
self.data_transfer = {}
@@ -3442,33 +3490,39 @@
automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
http_configs[ 'automatic_transfer' ] = galaxy.util.string_as_bool( automatic_transfer )
self.data_transfer[ self.data_transfer_protocol.HTTP ] = http_configs
+
def populate_actions( self, trans, item, param_dict=None ):
return self.get_external_service_type( trans ).actions.populate( self, item, param_dict=param_dict )
+
class RequestType( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name', 'desc' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'request_form_id', 'sample_form_id' )
- rename_dataset_options = Bunch( NO = 'Do not rename',
- SAMPLE_NAME = 'Preprend sample name',
- EXPERIMENT_NAME = 'Prepend experiment name',
- EXPERIMENT_AND_SAMPLE_NAME = 'Prepend experiment and sample name')
+ rename_dataset_options = Bunch( NO='Do not rename',
+ SAMPLE_NAME='Preprend sample name',
+ EXPERIMENT_NAME='Prepend experiment name',
+ EXPERIMENT_AND_SAMPLE_NAME='Prepend experiment and sample name')
permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' )
+
def __init__( self, name=None, desc=None, request_form=None, sample_form=None ):
self.name = name
self.desc = desc
self.request_form = request_form
self.sample_form = sample_form
+
@property
def external_services( self ):
external_services = []
for rtesa in self.external_service_associations:
external_services.append( rtesa.external_service )
return external_services
+
def get_external_service( self, external_service_type_id ):
for rtesa in self.external_service_associations:
if rtesa.external_service.external_service_type_id == external_service_type_id:
return rtesa.external_service
return None
+
def get_external_services_for_manual_data_transfer( self, trans ):
'''Returns all external services that use manual data transfer'''
external_services = []
@@ -3481,6 +3535,7 @@
if not transfer_type_settings[ 'automatic_transfer' ]:
external_services.append( external_service )
return external_services
+
def delete_external_service_associations( self, trans ):
'''Deletes all external service associations.'''
flush_needed = False
@@ -3489,20 +3544,24 @@
flush_needed = True
if flush_needed:
trans.sa_session.flush()
+
def add_external_service_association( self, trans, external_service ):
rtesa = trans.model.RequestTypeExternalServiceAssociation( self, external_service )
trans.sa_session.add( rtesa )
trans.sa_session.flush()
+
@property
def final_sample_state( self ):
# The states mapper for this object orders ascending
return self.states[-1]
+
@property
def run_details( self ):
if self.run:
# self.run[0] is [RequestTypeRunAssociation]
return self.run[0]
return None
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templates. The get_contents param
# is passed by callers that are inheriting a template - these are
@@ -3520,23 +3579,27 @@
return template.get_widgets( trans.user )
return []
+
class RequestTypeExternalServiceAssociation( object ):
def __init__( self, request_type, external_service ):
self.request_type = request_type
self.external_service = external_service
+
class RequestTypePermissions( object ):
def __init__( self, action, request_type, role ):
self.action = action
self.request_type = request_type
self.role = role
+
class Sample( object, Dictifiable ):
# The following form_builder classes are supported by the Sample class.
supported_field_types = [ CheckboxField, SelectField, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- bulk_operations = Bunch( CHANGE_STATE = 'Change state',
- SELECT_LIBRARY = 'Select data library and folder' )
+ bulk_operations = Bunch( CHANGE_STATE='Change state',
+ SELECT_LIBRARY='Select data library and folder' )
dict_collection_visible_keys = ( 'id', 'name' )
+
def __init__(self, name=None, desc=None, request=None, form_values=None, bar_code=None, library=None, folder=None, workflow=None, history=None):
self.name = name
self.desc = desc
@@ -3547,17 +3610,20 @@
self.folder = folder
self.history = history
self.workflow = workflow
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def adding_to_library_dataset_files( self ):
adding_to_library_datasets = []
@@ -3565,6 +3631,7 @@
if dataset.status == SampleDataset.transfer_status.ADD_TO_LIBRARY:
adding_to_library_datasets.append( dataset )
return adding_to_library_datasets
+
@property
def inprogress_dataset_files( self ):
inprogress_datasets = []
@@ -3572,6 +3639,7 @@
if dataset.status not in [ SampleDataset.transfer_status.NOT_STARTED, SampleDataset.transfer_status.COMPLETE ]:
inprogress_datasets.append( dataset )
return inprogress_datasets
+
@property
def queued_dataset_files( self ):
queued_datasets = []
@@ -3579,6 +3647,7 @@
if dataset.status == SampleDataset.transfer_status.IN_QUEUE:
queued_datasets.append( dataset )
return queued_datasets
+
@property
def transfer_error_dataset_files( self ):
transfer_error_datasets = []
@@ -3586,6 +3655,7 @@
if dataset.status == SampleDataset.transfer_status.ERROR:
transfer_error_datasets.append( dataset )
return transfer_error_datasets
+
@property
def transferred_dataset_files( self ):
transferred_datasets = []
@@ -3593,6 +3663,7 @@
if dataset.status == SampleDataset.transfer_status.COMPLETE:
transferred_datasets.append( dataset )
return transferred_datasets
+
@property
def transferring_dataset_files( self ):
transferring_datasets = []
@@ -3600,6 +3671,7 @@
if dataset.status == SampleDataset.transfer_status.TRANSFERRING:
transferring_datasets.append( dataset )
return transferring_datasets
+
@property
def untransferred_dataset_files( self ):
untransferred_datasets = []
@@ -3607,6 +3679,7 @@
if dataset.status != SampleDataset.transfer_status.COMPLETE:
untransferred_datasets.append( dataset )
return untransferred_datasets
+
def get_untransferred_dataset_size( self, filepath, scp_configs ):
def print_ticks( d ):
pass
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -2130,6 +2130,8 @@
elif isinstance( value, basestring ):
if value.startswith( "dce:" ):
rval = trans.sa_session.query( trans.app.model.DatasetCollectionElement ).get( value[ len( "dce:"): ] )
+ elif value.startswith( "hdca:" ):
+ rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value[ len( "hdca:"): ] )
else:
rval = trans.sa_session.query( trans.app.model.HistoryDatasetCollectionAssociation ).get( value )
if rval and isinstance( rval, trans.app.model.HistoryDatasetCollectionAssociation ):
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/mvc/tools/tools-section.js
--- a/static/scripts/mvc/tools/tools-section.js
+++ b/static/scripts/mvc/tools/tools-section.js
@@ -336,7 +336,14 @@
id : 'field-' + id,
data : options,
value : options[0].value,
+ multiple : input_def.multiple,
onchange : function(value) {
+ // pick the first dataset if multiple might be selected
+ // TODO: iterate over all datasets and filter common/consistent columns
+ if (input_def.multiple) {
+ value = value[0];
+ }
+
// get referenced columns
var column_list = self.app.tree.findReferences(id, 'data_column');
@@ -437,18 +444,16 @@
// column selection field
_field_column : function (input_def) {
return new Ui.Select.View({
- id : 'field-' + input_def.id
+ id : 'field-' + input_def.id,
+ multiple: input_def.multiple
});
},
// text input field
_field_text : function(input_def) {
- var TextClass = Ui.Input;
- if (input_def.area) {
- TextClass = Ui.Textarea;
- }
- return new TextClass({
- id : 'field-' + input_def.id
+ return new Ui.Input({
+ id : 'field-' + input_def.id,
+ area : input_def.area
});
},
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/mvc/ui/ui-misc.js
--- a/static/scripts/mvc/ui/ui-misc.js
+++ b/static/scripts/mvc/ui/ui-misc.js
@@ -312,7 +312,8 @@
placeholder : '',
disabled : false,
visible : true,
- cls : ''
+ cls : '',
+ area : false
},
// initialize
@@ -352,60 +353,11 @@
// element
_template: function(options) {
- return '<input id="' + options.id + '" type="' + options.type + '" value="' + options.value + '" placeholder="' + options.placeholder + '" class="ui-input ' + options.cls + '">';
- }
-});
-
-// plugin
-var Textarea = Backbone.View.extend({
- // options
- optionsDefault: {
- value : '',
- type : 'text',
- placeholder : '',
- disabled : false,
- visible : true,
- cls : ''
- },
-
- // initialize
- initialize : function(options) {
- // configure options
- this.options = Utils.merge(options, this.optionsDefault);
-
- // create new element
- this.setElement(this._template(this.options));
-
- // disable input field
- if (this.options.disabled) {
- this.$el.prop('disabled', true);
+ if (options.area) {
+ return '<textarea id="' + options.id + '" class="ui-textarea ' + options.cls + '"></textarea>';
+ } else {
+ return '<input id="' + options.id + '" type="' + options.type + '" value="' + options.value + '" placeholder="' + options.placeholder + '" class="ui-input ' + options.cls + '">';
}
-
- // hide input field
- if (!this.options.visible) {
- this.$el.hide();
- }
-
- // onchange event handler. fires on user activity.
- var self = this;
- this.$el.on('input', function() {
- if (self.options.onchange) {
- self.options.onchange(self.$el.val());
- }
- });
- },
-
- // value
- value : function (new_val) {
- if (new_val !== undefined) {
- this.$el.val(new_val);
- }
- return this.$el.val();
- },
-
- // element
- _template: function(options) {
- return '<textarea id="' + options.id + '" class="ui-textarea ' + options.cls + '"></textarea>';
}
});
@@ -455,7 +407,6 @@
Checkbox : Checkbox,
Searchbox : Searchbox,
Select : Select,
- Textarea : Textarea,
Hidden : Hidden,
Slider : Slider
}
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/mvc/ui/ui-select-default.js
--- a/static/scripts/mvc/ui/ui-select-default.js
+++ b/static/scripts/mvc/ui/ui-select-default.js
@@ -7,24 +7,19 @@
var View = Backbone.View.extend({
// options
optionsDefault : {
- id : '',
- cls : '',
- empty : 'No data available',
- visible : true,
- wait : false
+ id : '',
+ cls : '',
+ empty : 'No data available',
+ visible : true,
+ wait : false,
+ multiple : false
},
- // value
- selected : null,
-
// initialize
initialize : function(options) {
// configure options
this.options = Utils.merge(options, this.optionsDefault);
- // initial value
- this.selected = this.options.value;
-
// create new element
this.setElement(this._template(this.options));
@@ -32,21 +27,17 @@
this.$select = this.$el.find('#select');
this.$icon = this.$el.find('#icon');
- // add change event. fires only on user activity
- var self = this;
- this.$select.on('change', function() {
- self.value(self.$select.val());
- });
-
- // add change event. fires on trigger
- this.on('change', function() {
- if (self.options.onchange) {
- self.options.onchange(self.value());
- }
- });
+ // configure multiple
+ if (this.options.multiple) {
+ this.$select.prop('multiple', true);
+ this.$select.addClass('ui-select-multiple');
+ this.$icon.remove();
+ } else {
+ this.$el.addClass('ui-select');
+ }
// refresh
- this._refresh();
+ this.update(this.options.data);
// show/hide
if (!this.options.visible) {
@@ -59,31 +50,25 @@
} else {
this.show();
}
+
+ // add change event. fires only on user activity
+ var self = this;
+ this.$select.on('change', function() {
+ self._change();
+ });
+
+ // add change event. fires on trigger
+ this.on('change', function() {
+ self._change();
+ });
},
// value
value : function (new_value) {
-
- // get current id/value
- var before = this.selected;
-
- // check if new_value is defined
if (new_value !== undefined) {
- this.selected = new_value;
this.$select.val(new_value);
}
-
- // get current id/value
- var after = this.selected;
- if (after) {
- // fire onchange
- if (after != before && this.options.onchange) {
- this.options.onchange(after);
- }
- }
-
- // return
- return after;
+ return this.$select.val();
},
// first
@@ -157,6 +142,9 @@
// render
update: function(options) {
+ // backup current value
+ var current = this.$select.val();
+
// remove all options
this.$select.find('option').remove();
@@ -167,6 +155,14 @@
// refresh
this._refresh();
+
+ // set previous value
+ this.$select.val(current);
+
+ // check if any value was set
+ if (!this.$select.val()) {
+ this.$select.val(this.first());
+ }
},
// set on change event
@@ -179,6 +175,13 @@
return this.$select.find('option[value=' + value + ']').length > 0;
},
+ // change
+ _change: function() {
+ if (this.options.onchange) {
+ this.options.onchange(this.$select.val());
+ }
+ },
+
// refresh
_refresh: function() {
// remove placeholder
@@ -196,41 +199,21 @@
// enable select field
this.enable();
}
-
- // update value
- if (this.selected) {
- this.$select.val(this.selected);
- }
},
- // option
+ // template option
_templateOption: function(options) {
return '<option value="' + options.value + '">' + options.label + '</option>';
},
- // element
+ // template
_template: function(options) {
- var tmpl = '<div id="' + options.id + '" class="ui-select">' +
- '<div class="button">' +
- '<i id="icon"/>' +
- '</div>' +
- '<select id="select" class="select ' + options.cls + ' ' + options.id + '">';
- for (key in options.data) {
- // options
- var item = options.data[key];
-
- // identify selected value
- var tag = '';
- if (item.value == options.value || item.value == '') {
- tag = 'selected';
- }
-
- // add template string
- tmpl += '<option value="' + item.value + '" ' + tag + '>' + item.label + '</option>';
- }
- tmpl += '</select>' +
- '</div>';
- return tmpl;
+ return '<div id="' + options.id + '">' +
+ '<div class="button">' +
+ '<i id="icon"/>' +
+ '</div>' +
+ '<select id="select" class="select ' + options.cls + ' ' + options.id + '"></select>' +
+ '</div>';
}
});
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/packed/mvc/tools/tools-section.js
--- a/static/scripts/packed/mvc/tools/tools-section.js
+++ b/static/scripts/packed/mvc/tools/tools-section.js
@@ -1,1 +1,1 @@
-define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs"],function(c,b,e,a){var d=Backbone.View.extend({initialize:function(g,f){this.app=g;this.inputs=f.inputs;f.cls_tr="section-row";this.table=new b.View(f);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var f in this.inputs){this._add(this.inputs[f])}},_add:function(h){var g=this;var f=jQuery.extend(true,{},h);f.id=c.uuid();this.app.input_list[f.id]=f;var i=f.type;switch(i){case"conditional":this._addConditional(f);break;case"repeat":this._addRepeat(f);break;default:this._addRow(i,f)}},_addConditional:function(f){f.label=f.test_param.label;f.value=f.test_param.value;this._addRow("conditional",f);for(var h in f.cases){var g=f.id+"-section-"+h;var j=new d(this.app,{inputs:f.cases[h].inputs,cls:"ui-table-plain"});this.table.add("");this.table.add(j.$el);this.table.append(g)}},_addRepeat:function(f){var g=this;var k=new a.View({title_new:"Add "+f.title,max:f.max,onnew:function(){var i=f.id+"-section-"+c.uuid();var m=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:i,title:f.title,$el:m.$el,ondel:function(){k.del(i);k.retitle(f.title);g.app.refresh()}});k.retitle(f.title);k.show(i);g.app.refresh()}});for(var j=0;j<f.min;j++){var h=f.id+"-section-"+c.uuid();var l=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:h,title:f.title,$el:l.$el})}k.retitle(f.title);this.table.add("");this.table.add(k.$el);this.table.append(f.id)},_addRow:function(h,f){var j=f.id;var g=null;switch(h){case"text":g=this._field_text(f);break;case"select":g=this._field_select(f);break;case"data":g=this._field_data(f);break;case"data_column":g=this._field_column(f);break;case"conditional":g=this._field_conditional(f);break;case"hidden":g=this._field_hidden(f);break;case"integer":g=this._field_slider(f);break;case"float":g=this._field_slider(f);break;case"boolean":g=this._field_boolean(f);break;default:g=this._field_text(f);console.debug("tools-form::_addRow() : Unmatched field type ("+h+").")}if(f.value!==undefined){g.value(f.value)}this.app.field_list[j]=g;var i=$("<div/>");i.append(g.$el);if(f.help){i.append('<div class="ui-table-form-info">'+f.help+"</div>")}this.table.add('<span class="ui-table-form-title">'+f.label+"</span>","20%");this.table.add(i);this.table.append(j)},_field_conditional:function(f){var g=this;var h=[];for(var j in f.test_param.options){var k=f.test_param.options[j];h.push({label:k[0],value:k[1]})}return new e.Select.View({id:"field-"+f.id,data:h,onchange:function(s){for(var q in f.cases){var m=f.cases[q];var p=f.id+"-section-"+q;var l=g.table.get(p);var o=false;for(var n in m.inputs){var r=m.inputs[n].type;if(r&&r!=="hidden"){o=true;break}}if(m.value==s&&o){l.fadeIn("fast")}else{l.hide()}}}})},_field_data:function(f){var g=this;var l=f.id;var k=this.app.datasets.filterType();var h=[];for(var j in k){h.push({label:k[j].get("name"),value:k[j].get("id")})}return new e.Select.View({id:"field-"+l,data:h,value:h[0].value,onchange:function(u){var s=g.app.tree.findReferences(l,"data_column");var n=g.app.datasets.filter(u);if(n&&s.length>0){console.debug("tool-form::field_data() - Selected dataset "+u+".");var w=n.get("metadata_column_types");if(!w){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+u+".")}for(var p in s){var q=g.app.input_list[s[p]];var r=g.app.field_list[s[p]];if(!q||!r){console.debug("tool-form::field_data() - FAILED: Column not found.")}var o=q.numerical;var m=[];for(var v in w){var t=w[v];if(t=="int"||t=="float"||!o){m.push({label:"Column: "+(parseInt(v)+1)+" ["+w[v]+"]",value:v})}}if(r){r.update(m);if(!r.exists(r.value())){r.value(r.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+u+".")}}})},_field_select:function(f){var g=[];for(var h in f.options){var j=f.options[h];g.push({label:j[0],value:j[1]})}var k=e.Select;switch(f.display){case"checkboxes":k=e.Checkbox;break;case"radio":k=e.RadioButton;break}if(f.multiple){k=e.Checkbox}return new k.View({id:"field-"+f.id,data:g})},_field_column:function(f){return new e.Select.View({id:"field-"+f.id})},_field_text:function(g){var f=e.Input;if(g.area){f=e.Textarea}return new f({id:"field-"+g.id})},_field_slider:function(f){return new e.Slider.View({id:"field-"+f.id,min:f.min||0,max:f.max||1000,decimal:f.type=="float"})},_field_hidden:function(f){return new e.Hidden({id:"field-"+f.id})},_field_boolean:function(f){return new e.RadioButton.View({id:"field-"+f.id,data:[{label:"Yes",value:true},{label:"No",value:false}]})}});return{View:d}});
\ No newline at end of file
+define(["utils/utils","mvc/ui/ui-table","mvc/ui/ui-misc","mvc/ui/ui-tabs"],function(c,b,e,a){var d=Backbone.View.extend({initialize:function(g,f){this.app=g;this.inputs=f.inputs;f.cls_tr="section-row";this.table=new b.View(f);this.setElement(this.table.$el);this.render()},render:function(){this.table.delAll();for(var f in this.inputs){this._add(this.inputs[f])}},_add:function(h){var g=this;var f=jQuery.extend(true,{},h);f.id=c.uuid();this.app.input_list[f.id]=f;var i=f.type;switch(i){case"conditional":this._addConditional(f);break;case"repeat":this._addRepeat(f);break;default:this._addRow(i,f)}},_addConditional:function(f){f.label=f.test_param.label;f.value=f.test_param.value;this._addRow("conditional",f);for(var h in f.cases){var g=f.id+"-section-"+h;var j=new d(this.app,{inputs:f.cases[h].inputs,cls:"ui-table-plain"});this.table.add("");this.table.add(j.$el);this.table.append(g)}},_addRepeat:function(f){var g=this;var k=new a.View({title_new:"Add "+f.title,max:f.max,onnew:function(){var i=f.id+"-section-"+c.uuid();var m=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:i,title:f.title,$el:m.$el,ondel:function(){k.del(i);k.retitle(f.title);g.app.refresh()}});k.retitle(f.title);k.show(i);g.app.refresh()}});for(var j=0;j<f.min;j++){var h=f.id+"-section-"+c.uuid();var l=new d(g.app,{inputs:f.inputs,cls:"ui-table-plain"});k.add({id:h,title:f.title,$el:l.$el})}k.retitle(f.title);this.table.add("");this.table.add(k.$el);this.table.append(f.id)},_addRow:function(h,f){var j=f.id;var g=null;switch(h){case"text":g=this._field_text(f);break;case"select":g=this._field_select(f);break;case"data":g=this._field_data(f);break;case"data_column":g=this._field_column(f);break;case"conditional":g=this._field_conditional(f);break;case"hidden":g=this._field_hidden(f);break;case"integer":g=this._field_slider(f);break;case"float":g=this._field_slider(f);break;case"boolean":g=this._field_boolean(f);break;default:g=this._field_text(f);console.debug("tools-form::_addRow() : Unmatched field type ("+h+").")}if(f.value!==undefined){g.value(f.value)}this.app.field_list[j]=g;var i=$("<div/>");i.append(g.$el);if(f.help){i.append('<div class="ui-table-form-info">'+f.help+"</div>")}this.table.add('<span class="ui-table-form-title">'+f.label+"</span>","20%");this.table.add(i);this.table.append(j)},_field_conditional:function(f){var g=this;var h=[];for(var j in f.test_param.options){var k=f.test_param.options[j];h.push({label:k[0],value:k[1]})}return new e.Select.View({id:"field-"+f.id,data:h,onchange:function(s){for(var q in f.cases){var m=f.cases[q];var p=f.id+"-section-"+q;var l=g.table.get(p);var o=false;for(var n in m.inputs){var r=m.inputs[n].type;if(r&&r!=="hidden"){o=true;break}}if(m.value==s&&o){l.fadeIn("fast")}else{l.hide()}}}})},_field_data:function(f){var g=this;var l=f.id;var k=this.app.datasets.filterType();var h=[];for(var j in k){h.push({label:k[j].get("name"),value:k[j].get("id")})}return new e.Select.View({id:"field-"+l,data:h,value:h[0].value,onchange:function(u){var s=g.app.tree.findReferences(l,"data_column");var n=g.app.datasets.filter(u);if(n&&s.length>0){console.debug("tool-form::field_data() - Selected dataset "+u+".");var w=n.get("metadata_column_types");if(!w){console.debug("tool-form::field_data() - FAILED: Could not find metadata for dataset "+u+".")}for(var p in s){var q=g.app.input_list[s[p]];var r=g.app.field_list[s[p]];if(!q||!r){console.debug("tool-form::field_data() - FAILED: Column not found.")}var o=q.numerical;var m=[];for(var v in w){var t=w[v];if(t=="int"||t=="float"||!o){m.push({label:"Column: "+(parseInt(v)+1)+" ["+w[v]+"]",value:v})}}if(r){r.update(m);if(!r.exists(r.value())){r.value(r.first())}}}}else{console.debug("tool-form::field_data() - FAILED: Could not find dataset "+u+".")}}})},_field_select:function(f){var g=[];for(var h in f.options){var j=f.options[h];g.push({label:j[0],value:j[1]})}var k=e.Select;switch(f.display){case"checkboxes":k=e.Checkbox;break;case"radio":k=e.RadioButton;break}if(f.multiple){k=e.Checkbox}return new k.View({id:"field-"+f.id,data:g})},_field_column:function(f){return new e.Select.View({id:"field-"+f.id,multiple:f.multiple})},_field_text:function(f){return new e.Input({id:"field-"+f.id,area:f.area})},_field_slider:function(f){return new e.Slider.View({id:"field-"+f.id,min:f.min||0,max:f.max||1000,decimal:f.type=="float"})},_field_hidden:function(f){return new e.Hidden({id:"field-"+f.id})},_field_boolean:function(f){return new e.RadioButton.View({id:"field-"+f.id,data:[{label:"Yes",value:true},{label:"No",value:false}]})}});return{View:d}});
\ No newline at end of file
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/packed/mvc/ui/ui-misc.js
--- a/static/scripts/packed/mvc/ui/ui-misc.js
+++ b/static/scripts/packed/mvc/ui/ui-misc.js
@@ -1,1 +1,1 @@
-define(["utils/utils","mvc/ui/ui-select-default","mvc/ui/ui-slider","mvc/ui/ui-checkbox","mvc/ui/ui-radiobutton","mvc/ui/ui-button-menu","mvc/ui/ui-modal"],function(e,h,k,i,b,r,c){var q=Backbone.View.extend({optionsDefault:{url:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},_template:function(s){return'<img class="ui-image '+s.cls+'" src="'+s.url+'"/>'}});var l=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},title:function(s){this.$el.html(s)},_template:function(s){return'<label class="ui-label '+s.cls+'">'+s.title+"</label>"},value:function(){return options.title}});var d=Backbone.View.extend({optionsDefault:{floating:"right",icon:"",tooltip:"",placement:"bottom",title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){return'<div><span class="fa '+s.icon+'" class="ui-icon"/> '+s.title+"</div>"}});var g=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"btn btn-default",icon:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick);$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){var t='<button id="'+s.id+'" type="submit" style="float: '+s.floating+';" type="button" class="ui-button '+s.cls+'">';if(s.icon){t+='<i class="icon fa '+s.icon+'"></i> '}t+=s.title+"</button>";return t}});var o=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"icon-btn",icon:"",tooltip:"",onclick:null},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick);$(this.el).tooltip({title:s.tooltip,placement:"bottom"})},_template:function(s){var t="";if(s.title){t="width: auto;"}var u='<div id="'+s.id+'" style="float: '+s.floating+"; "+t+'" class="ui-button-icon '+s.cls+'">';if(s.title){u+='<div class="button"><i class="icon fa '+s.icon+'"/> <span class="title">'+s.title+"</span></div>"}else{u+='<i class="icon fa '+s.icon+'"/>'}u+="</div>";return u}});var p=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",s.onclick)},_template:function(s){return'<div><a href="javascript:void(0)" class="ui-anchor '+s.cls+'">'+s.title+"</a></div>"}});var a=Backbone.View.extend({optionsDefault:{message:"",status:"info",persistent:false},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement("<div></div>")},update:function(t){this.options=e.merge(t,this.optionsDefault);if(t.message!=""){this.$el.html(this._template(this.options));this.$el.find(".alert").append(t.message);this.$el.fadeIn();if(!t.persistent){var s=this;window.setTimeout(function(){if(s.$el.is(":visible")){s.$el.fadeOut()}else{s.$el.hide()}},3000)}}else{this.$el.fadeOut()}},_template:function(s){return'<div class="ui-message alert alert-'+s.status+'"/>'}});var f=Backbone.View.extend({optionsDefault:{onclick:null,searchword:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));var s=this;if(this.options.onclick){this.$el.on("submit",function(v){var u=s.$el.find("#search");s.options.onclick(u.val())})}},_template:function(s){return'<div class="ui-search"><form onsubmit="return false;"><input id="search" class="form-control input-sm" type="text" name="search" placeholder="Search..." value="'+s.searchword+'"><button type="submit" class="btn search-btn"><i class="fa fa-search"></i></button></form></div>'}});var n=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var s=this;this.$el.on("input",function(){if(s.options.onchange){s.options.onchange(s.$el.val())}})},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<input id="'+s.id+'" type="'+s.type+'" value="'+s.value+'" placeholder="'+s.placeholder+'" class="ui-input '+s.cls+'">'}});var j=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:""},initialize:function(t){this.options=e.merge(t,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var s=this;this.$el.on("input",function(){if(s.options.onchange){s.options.onchange(s.$el.val())}})},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<textarea id="'+s.id+'" class="ui-textarea '+s.cls+'"></textarea>'}});var m=Backbone.View.extend({optionsDefault:{value:""},initialize:function(s){this.options=e.merge(s,this.optionsDefault);this.setElement(this._template(this.options))},value:function(s){if(s!==undefined){this.$el.val(s)}return this.$el.val()},_template:function(s){return'<hidden id="'+s.id+'" value="'+s.value+'"/>'}});return{Anchor:p,Button:g,ButtonIcon:o,ButtonMenu:r,Icon:d,Image:q,Input:n,Label:l,Message:a,Modal:c,RadioButton:b,Checkbox:i,Searchbox:f,Select:h,Textarea:j,Hidden:m,Slider:k}});
\ No newline at end of file
+define(["utils/utils","mvc/ui/ui-select-default","mvc/ui/ui-slider","mvc/ui/ui-checkbox","mvc/ui/ui-radiobutton","mvc/ui/ui-button-menu","mvc/ui/ui-modal"],function(l,b,f,e,m,q,n){var d=Backbone.View.extend({optionsDefault:{url:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},_template:function(r){return'<img class="ui-image '+r.cls+'" src="'+r.url+'"/>'}});var k=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},title:function(r){this.$el.html(r)},_template:function(r){return'<label class="ui-label '+r.cls+'">'+r.title+"</label>"},value:function(){return options.title}});var c=Backbone.View.extend({optionsDefault:{floating:"right",icon:"",tooltip:"",placement:"bottom",title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){return'<div><span class="fa '+r.icon+'" class="ui-icon"/> '+r.title+"</div>"}});var h=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"btn btn-default",icon:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick);$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){var s='<button id="'+r.id+'" type="submit" style="float: '+r.floating+';" type="button" class="ui-button '+r.cls+'">';if(r.icon){s+='<i class="icon fa '+r.icon+'"></i> '}s+=r.title+"</button>";return s}});var i=Backbone.View.extend({optionsDefault:{id:null,title:"",floating:"right",cls:"icon-btn",icon:"",tooltip:"",onclick:null},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick);$(this.el).tooltip({title:r.tooltip,placement:"bottom"})},_template:function(r){var s="";if(r.title){s="width: auto;"}var t='<div id="'+r.id+'" style="float: '+r.floating+"; "+s+'" class="ui-button-icon '+r.cls+'">';if(r.title){t+='<div class="button"><i class="icon fa '+r.icon+'"/> <span class="title">'+r.title+"</span></div>"}else{t+='<i class="icon fa '+r.icon+'"/>'}t+="</div>";return t}});var g=Backbone.View.extend({optionsDefault:{title:"",cls:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options));$(this.el).on("click",r.onclick)},_template:function(r){return'<div><a href="javascript:void(0)" class="ui-anchor '+r.cls+'">'+r.title+"</a></div>"}});var o=Backbone.View.extend({optionsDefault:{message:"",status:"info",persistent:false},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement("<div></div>")},update:function(s){this.options=l.merge(s,this.optionsDefault);if(s.message!=""){this.$el.html(this._template(this.options));this.$el.find(".alert").append(s.message);this.$el.fadeIn();if(!s.persistent){var r=this;window.setTimeout(function(){if(r.$el.is(":visible")){r.$el.fadeOut()}else{r.$el.hide()}},3000)}}else{this.$el.fadeOut()}},_template:function(r){return'<div class="ui-message alert alert-'+r.status+'"/>'}});var a=Backbone.View.extend({optionsDefault:{onclick:null,searchword:""},initialize:function(s){this.options=l.merge(s,this.optionsDefault);this.setElement(this._template(this.options));var r=this;if(this.options.onclick){this.$el.on("submit",function(u){var t=r.$el.find("#search");r.options.onclick(t.val())})}},_template:function(r){return'<div class="ui-search"><form onsubmit="return false;"><input id="search" class="form-control input-sm" type="text" name="search" placeholder="Search..." value="'+r.searchword+'"><button type="submit" class="btn search-btn"><i class="fa fa-search"></i></button></form></div>'}});var j=Backbone.View.extend({optionsDefault:{value:"",type:"text",placeholder:"",disabled:false,visible:true,cls:"",area:false},initialize:function(s){this.options=l.merge(s,this.optionsDefault);this.setElement(this._template(this.options));if(this.options.disabled){this.$el.prop("disabled",true)}if(!this.options.visible){this.$el.hide()}var r=this;this.$el.on("input",function(){if(r.options.onchange){r.options.onchange(r.$el.val())}})},value:function(r){if(r!==undefined){this.$el.val(r)}return this.$el.val()},_template:function(r){if(r.area){return'<textarea id="'+r.id+'" class="ui-textarea '+r.cls+'"></textarea>'}else{return'<input id="'+r.id+'" type="'+r.type+'" value="'+r.value+'" placeholder="'+r.placeholder+'" class="ui-input '+r.cls+'">'}}});var p=Backbone.View.extend({optionsDefault:{value:""},initialize:function(r){this.options=l.merge(r,this.optionsDefault);this.setElement(this._template(this.options))},value:function(r){if(r!==undefined){this.$el.val(r)}return this.$el.val()},_template:function(r){return'<hidden id="'+r.id+'" value="'+r.value+'"/>'}});return{Anchor:g,Button:h,ButtonIcon:i,ButtonMenu:q,Icon:c,Image:d,Input:j,Label:k,Message:o,Modal:n,RadioButton:m,Checkbox:e,Searchbox:a,Select:b,Hidden:p,Slider:f}});
\ No newline at end of file
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/scripts/packed/mvc/ui/ui-select-default.js
--- a/static/scripts/packed/mvc/ui/ui-select-default.js
+++ b/static/scripts/packed/mvc/ui/ui-select-default.js
@@ -1,1 +1,1 @@
-define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{id:"",cls:"",empty:"No data available",visible:true,wait:false},selected:null,initialize:function(d){this.options=a.merge(d,this.optionsDefault);this.selected=this.options.value;this.setElement(this._template(this.options));this.$select=this.$el.find("#select");this.$icon=this.$el.find("#icon");var c=this;this.$select.on("change",function(){c.value(c.$select.val())});this.on("change",function(){if(c.options.onchange){c.options.onchange(c.value())}});this._refresh();if(!this.options.visible){this.hide()}if(this.options.wait){this.wait()}else{this.show()}},value:function(c){var d=this.selected;if(c!==undefined){this.selected=c;this.$select.val(c)}var e=this.selected;if(e){if(e!=d&&this.options.onchange){this.options.onchange(e)}}return e},first:function(){var c=this.$select.find("option");if(c.length>0){return c.val()}else{return undefined}},text:function(){return this.$select.find("option:selected").text()},show:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-caret-down");this.$select.show();this.$el.show()},hide:function(){this.$el.hide()},wait:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-spinner fa-spin");this.$select.hide()},disabled:function(){return this.$select.is(":disabled")},enable:function(){this.$select.prop("disabled",false)},disable:function(){this.$select.prop("disabled",true)},add:function(c){this.$select.append(this._templateOption(c));this._refresh()},del:function(c){this.$select.find("option[value="+c+"]").remove();this.$select.trigger("change");this._refresh()},update:function(c){this.$select.find("option").remove();for(var d in c){this.$select.append(this._templateOption(c[d]))}this._refresh()},setOnChange:function(c){this.options.onchange=c},exists:function(c){return this.$select.find("option[value="+c+"]").length>0},_refresh:function(){this.$select.find("option[value=null]").remove();var c=this.$select.find("option").length;if(c==0){this.disable();this.$select.append(this._templateOption({value:"null",label:this.options.empty}))}else{this.enable()}if(this.selected){this.$select.val(this.selected)}},_templateOption:function(c){return'<option value="'+c.value+'">'+c.label+"</option>"},_template:function(e){var d='<div id="'+e.id+'" class="ui-select"><div class="button"><i id="icon"/></div><select id="select" class="select '+e.cls+" "+e.id+'">';for(key in e.data){var f=e.data[key];var c="";if(f.value==e.value||f.value==""){c="selected"}d+='<option value="'+f.value+'" '+c+">"+f.label+"</option>"}d+="</select></div>";return d}});return{View:b}});
\ No newline at end of file
+define(["utils/utils"],function(a){var b=Backbone.View.extend({optionsDefault:{id:"",cls:"",empty:"No data available",visible:true,wait:false,multiple:false},initialize:function(d){this.options=a.merge(d,this.optionsDefault);this.setElement(this._template(this.options));this.$select=this.$el.find("#select");this.$icon=this.$el.find("#icon");if(this.options.multiple){this.$select.prop("multiple",true);this.$select.addClass("ui-select-multiple");this.$icon.remove()}else{this.$el.addClass("ui-select")}this.update(this.options.data);if(!this.options.visible){this.hide()}if(this.options.wait){this.wait()}else{this.show()}var c=this;this.$select.on("change",function(){c._change()});this.on("change",function(){c._change()})},value:function(c){if(c!==undefined){this.$select.val(c)}return this.$select.val()},first:function(){var c=this.$select.find("option");if(c.length>0){return c.val()}else{return undefined}},text:function(){return this.$select.find("option:selected").text()},show:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-caret-down");this.$select.show();this.$el.show()},hide:function(){this.$el.hide()},wait:function(){this.$icon.removeClass();this.$icon.addClass("fa fa-spinner fa-spin");this.$select.hide()},disabled:function(){return this.$select.is(":disabled")},enable:function(){this.$select.prop("disabled",false)},disable:function(){this.$select.prop("disabled",true)},add:function(c){this.$select.append(this._templateOption(c));this._refresh()},del:function(c){this.$select.find("option[value="+c+"]").remove();this.$select.trigger("change");this._refresh()},update:function(c){var e=this.$select.val();this.$select.find("option").remove();for(var d in c){this.$select.append(this._templateOption(c[d]))}this._refresh();this.$select.val(e);if(!this.$select.val()){this.$select.val(this.first())}},setOnChange:function(c){this.options.onchange=c},exists:function(c){return this.$select.find("option[value="+c+"]").length>0},_change:function(){if(this.options.onchange){this.options.onchange(this.$select.val())}},_refresh:function(){this.$select.find("option[value=null]").remove();var c=this.$select.find("option").length;if(c==0){this.disable();this.$select.append(this._templateOption({value:"null",label:this.options.empty}))}else{this.enable()}},_templateOption:function(c){return'<option value="'+c.value+'">'+c.label+"</option>"},_template:function(c){return'<div id="'+c.id+'"><div class="button"><i id="icon"/></div><select id="select" class="select '+c.cls+" "+c.id+'"></select></div>'}});return{View:b}});
\ No newline at end of file
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/style/blue/base.css
--- a/static/style/blue/base.css
+++ b/static/style/blue/base.css
@@ -1282,6 +1282,7 @@
.ui-table tbody{cursor:pointer}
.ui-table-plain tbody td{padding:5px 0px 5px 5px !important;border:none !important}
.ui-table-plain tbody{cursor:auto !important}
+.ui-table-form-info{clear:both !important}
.ui-table-form-separator{font-weight:bold;font-size:0.9em}
.ui-label{font-weight:bold}
.ui-message{padding:2px 2px 2px 10px}
@@ -1299,8 +1300,9 @@
.ui-portlet .no-scroll{height:calc(100% - 80px)}
.ui-popover{max-width:700px;display:none}.ui-popover .popover-close{position:absolute;right:10px;top:7px;font-size:1.2em;cursor:pointer}
.ui-popover .popover-title{padding:4px 10px}
-.ui-select{position:relative;height:27px;overflow:hidden;border:1px solid #bfbfbf;-moz-border-radius:3px;border-radius:3px}.ui-select .button{position:relative;width:25px;height:100%;float:right;border-left:1px solid #bfbfbf;padding-left:9px;padding-top:4px;background:#f2f2f2}
+.ui-select,.ui-select-multiple{position:relative;height:27px;overflow:hidden;border:1px solid #bfbfbf;-moz-border-radius:3px;border-radius:3px}.ui-select .button{position:relative;width:25px;height:100%;float:right;border-left:1px solid #bfbfbf;padding-left:9px;padding-top:4px;background:#f2f2f2}
.ui-select select{position:absolute;top:0px;height:100%;width:100%;padding-left:5px;cursor:pointer;background:transparent;border:0;border-radius:0;-webkit-appearance:none}
+.ui-select-multiple{height:100% !important;width:100% !important}
.libraryRow{background-color:#ebd9b2}
.datasetHighlighted{background-color:#f9f9f9}
.libraryItemDeleted-True{font-style:italic}
diff -r 75709a2361b15ba5fd393b0ff2333a81b6ddff87 -r 9fdca7477f44165ae894177a2fef78a4ff9dc5bf static/style/src/less/ui.less
--- a/static/style/src/less/ui.less
+++ b/static/style/src/less/ui.less
@@ -32,6 +32,7 @@
.ui-table-form-info {
&:extend(.toolParamHelp);
+ clear: both !important;
}
.ui-table-form-separator {
@@ -208,4 +209,10 @@
border-radius: 0;
-webkit-appearance: none;
}
+}
+
+.ui-select-multiple {
+ &:extend(.ui-select);
+ height: 100% !important;
+ width: 100% !important;
}
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Some PEP-8 fixes for lib/galaxy/model/__init__.py.
by commits-noreply@bitbucket.org 04 Sep '14
by commits-noreply@bitbucket.org 04 Sep '14
04 Sep '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a90ec4d4066f/
Changeset: a90ec4d4066f
User: jmchilton
Date: 2014-09-04 16:31:05
Summary: Some PEP-8 fixes for lib/galaxy/model/__init__.py.
Affected #: 1 file
diff -r dc5e8d1d7a77f9a87a6a825fd8fa9236c9e9a229 -r a90ec4d4066f67149322f68d2dc5c8b98fe4dbcb lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2466,6 +2466,7 @@
val = getattr( ldda.datatype, name )
rval['metadata_' + name] = val
return rval
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templatesThe get_contents
# param is passed by callers that are inheriting a template - these
@@ -2492,6 +2493,7 @@
else:
return template.get_widgets( trans.user )
return []
+
def templates_dict( self, use_name=False ):
"""
Returns a dict of template info
@@ -2510,6 +2512,7 @@
tmp_dict[ name ] = content.get( field[ 'name' ] )
template_data[template.name] = tmp_dict
return template_data
+
def templates_json( self, use_name=False ):
return json.dumps( self.templates_dict( use_name=use_name ) )
@@ -2533,6 +2536,7 @@
self.info = info
self.inheritable = inheritable
+
class LibraryFolderInfoAssociation( object ):
def __init__( self, folder, form_definition, info, inheritable=False ):
self.folder = folder
@@ -2540,15 +2544,18 @@
self.info = info
self.inheritable = inheritable
+
class LibraryDatasetDatasetInfoAssociation( object ):
def __init__( self, library_dataset_dataset_association, form_definition, info ):
# TODO: need to figure out if this should be inheritable to the associated LibraryDataset
self.library_dataset_dataset_association = library_dataset_dataset_association
self.template = form_definition
self.info = info
+
@property
def inheritable( self ):
- return True #always allow inheriting, used for replacement
+ return True # always allow inheriting, used for replacement
+
class ValidationError( object ):
def __init__( self, message=None, err_type=None, attributes=None ):
@@ -2556,42 +2563,47 @@
self.err_type = err_type
self.attributes = attributes
+
class DatasetToValidationErrorAssociation( object ):
def __init__( self, dataset, validation_error ):
self.dataset = dataset
self.validation_error = validation_error
+
class ImplicitlyConvertedDatasetAssociation( object ):
- def __init__( self, id = None, parent = None, dataset = None, file_type = None, deleted = False, purged = False, metadata_safe = True ):
+
+ def __init__( self, id=None, parent=None, dataset=None, file_type=None, deleted=False, purged=False, metadata_safe=True ):
self.id = id
if isinstance(dataset, HistoryDatasetAssociation):
self.dataset = dataset
elif isinstance(dataset, LibraryDatasetDatasetAssociation):
self.dataset_ldda = dataset
else:
- raise AttributeError, 'Unknown dataset type provided for dataset: %s' % type( dataset )
+ raise AttributeError( 'Unknown dataset type provided for dataset: %s' % type( dataset ) )
if isinstance(parent, HistoryDatasetAssociation):
self.parent_hda = parent
elif isinstance(parent, LibraryDatasetDatasetAssociation):
self.parent_ldda = parent
else:
- raise AttributeError, 'Unknown dataset type provided for parent: %s' % type( parent )
+ raise AttributeError( 'Unknown dataset type provided for parent: %s' % type( parent ) )
self.type = file_type
self.deleted = deleted
self.purged = purged
self.metadata_safe = metadata_safe
- def clear( self, purge = False, delete_dataset = True ):
+ def clear( self, purge=False, delete_dataset=True ):
self.deleted = True
if self.dataset:
if delete_dataset:
self.dataset.deleted = True
if purge:
self.dataset.purged = True
- if purge and self.dataset.deleted: #do something with purging
+ if purge and self.dataset.deleted: # do something with purging
self.purged = True
- try: os.unlink( self.file_name )
- except Exception, e: print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
+ try:
+ os.unlink( self.file_name )
+ except Exception, e:
+ print "Failed to purge associated file (%s) from disk: %s" % ( self.file_name, e )
DEFAULT_COLLECTION_NAME = "Unnamed Collection"
@@ -2903,6 +2915,7 @@
self.tool_id = None
self.message = message
+
class GalaxySession( object ):
def __init__( self,
id=None,
@@ -2924,24 +2937,29 @@
self.is_valid = is_valid
self.prev_session_id = prev_session_id
self.histories = []
+
def add_history( self, history, association=None ):
if association is None:
self.histories.append( GalaxySessionToHistoryAssociation( self, history ) )
else:
self.histories.append( association )
+
def get_disk_usage( self ):
if self.disk_usage is None:
return 0
return self.disk_usage
+
def set_disk_usage( self, bytes ):
self.disk_usage = bytes
total_disk_usage = property( get_disk_usage, set_disk_usage )
+
class GalaxySessionToHistoryAssociation( object ):
def __init__( self, galaxy_session, history ):
self.galaxy_session = galaxy_session
self.history = history
+
class UCI( object ):
def __init__( self ):
self.id = None
@@ -2962,14 +2980,14 @@
self.latest_workflow_id = None
self.workflows = []
- def copy_tags_from(self,target_user,source_workflow):
+ def copy_tags_from(self, target_user, source_workflow):
for src_swta in source_workflow.owner_tags:
new_swta = src_swta.copy()
new_swta.user = target_user
self.tags.append(new_swta)
- def to_dict( self, view='collection', value_mapper = None ):
- rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper = value_mapper )
+ def to_dict( self, view='collection', value_mapper=None ):
+ rval = super( StoredWorkflow, self ).to_dict( view=view, value_mapper=value_mapper )
tags_str_list = []
for tag in self.tags:
tag_str = tag.user_tname
@@ -3007,12 +3025,11 @@
return False
def to_dict( self, view='collection', value_mapper=None):
- rval = super( Workflow, self ).to_dict( view=view, value_mapper = value_mapper )
- rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
+ rval = super( Workflow, self ).to_dict( view=view, value_mapper=value_mapper )
+ rval['uuid'] = ( lambda uuid: str( uuid ) if uuid else None )( self.uuid )
return rval
-
class WorkflowStep( object ):
def __init__( self ):
@@ -3061,7 +3078,7 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'workflow_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'workflow_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocation, self ).to_dict( view=view, value_mapper=value_mapper )
if view == 'element':
steps = {}
@@ -3072,12 +3089,12 @@
inputs = {}
for step in self.steps:
- if step.workflow_step.type =='tool':
+ if step.workflow_step.type == 'tool':
for step_input in step.workflow_step.input_connections:
if step_input.output_step.type == 'data_input':
for job_input in step.job.input_datasets:
if job_input.name == step_input.input_name:
- inputs[str(step_input.output_step.order_index)] = { "id" : job_input.dataset_id, "src" : "hda"}
+ inputs[str(step_input.output_step.order_index)] = { "id": job_input.dataset_id, "src": "hda"}
rval['inputs'] = inputs
return rval
@@ -3086,19 +3103,20 @@
dict_collection_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
dict_element_visible_keys = ( 'id', 'update_time', 'job_id', 'workflow_step_id' )
- def to_dict( self, view='collection', value_mapper = None ):
+ def to_dict( self, view='collection', value_mapper=None ):
rval = super( WorkflowInvocationStep, self ).to_dict( view=view, value_mapper=value_mapper )
rval['order_index'] = self.workflow_step.order_index
return rval
class MetadataFile( object ):
- def __init__( self, dataset = None, name = None ):
+ def __init__( self, dataset=None, name=None ):
if isinstance( dataset, HistoryDatasetAssociation ):
self.history_dataset = dataset
elif isinstance( dataset, LibraryDatasetDatasetAssociation ):
self.library_dataset = dataset
self.name = name
+
@property
def file_name( self ):
assert self.id is not None, "ID must be set before filename used (commit the object)"
@@ -3129,14 +3147,15 @@
class FormDefinition( object, Dictifiable ):
# The following form_builder classes are supported by the FormDefinition class.
supported_field_types = [ AddressField, CheckboxField, PasswordField, SelectField, TextArea, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- types = Bunch( REQUEST = 'Sequencing Request Form',
- SAMPLE = 'Sequencing Sample Form',
- EXTERNAL_SERVICE = 'External Service Information Form',
- RUN_DETAILS_TEMPLATE = 'Sample run details template',
- LIBRARY_INFO_TEMPLATE = 'Library information template',
- USER_INFO = 'User Information' )
+ types = Bunch( REQUEST='Sequencing Request Form',
+ SAMPLE='Sequencing Sample Form',
+ EXTERNAL_SERVICE='External Service Information Form',
+ RUN_DETAILS_TEMPLATE='Sample run details template',
+ LIBRARY_INFO_TEMPLATE='Library information template',
+ USER_INFO='User Information' )
dict_collection_visible_keys = ( 'id', 'name' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'form_definition_current_id', 'fields', 'layout' )
+
def __init__( self, name=None, desc=None, fields=[], form_definition_current=None, form_type=None, layout=None ):
self.name = name
self.desc = desc
@@ -3144,6 +3163,7 @@
self.form_definition_current = form_definition_current
self.type = form_type
self.layout = layout
+
def grid_fields( self, grid_index ):
# Returns a dictionary whose keys are integers corresponding to field positions
# on the grid and whose values are the field.
@@ -3152,6 +3172,7 @@
if str( f[ 'layout' ] ) == str( grid_index ):
gridfields[i] = f
return gridfields
+
def get_widgets( self, user, contents={}, **kwd ):
'''
Return the list of widgets that comprise a form definition,
@@ -3213,24 +3234,28 @@
field_widget.params = params
elif field_type == 'SelectField':
for option in field[ 'selectlist' ]:
+
if option == value:
field_widget.add_option( option, option, selected=True )
else:
field_widget.add_option( option, option )
elif field_type == 'CheckboxField':
+
field_widget.set_checked( value )
if field[ 'required' ] == 'required':
req = 'Required'
else:
req = 'Optional'
if field[ 'helptext' ]:
- helptext='%s (%s)' % ( field[ 'helptext' ], req )
+ helptext = '%s (%s)' % ( field[ 'helptext' ], req )
else:
helptext = '(%s)' % req
widgets.append( dict( label=field[ 'label' ],
+
widget=field_widget,
helptext=helptext ) )
return widgets
+
def field_as_html( self, field ):
"""Generates disabled html for a field"""
type = field[ 'type' ]
@@ -3245,21 +3270,25 @@
# Return None if unsupported field type
return None
+
class FormDefinitionCurrent( object ):
def __init__(self, form_definition=None):
self.latest_form = form_definition
+
class FormValues( object ):
def __init__(self, form_def=None, content=None):
self.form_definition = form_def
self.content = content
+
class Request( object, Dictifiable ):
- states = Bunch( NEW = 'New',
- SUBMITTED = 'In Progress',
- REJECTED = 'Rejected',
- COMPLETE = 'Complete' )
+ states = Bunch( NEW='New',
+ SUBMITTED='In Progress',
+ REJECTED='Rejected',
+ COMPLETE='Complete' )
dict_collection_visible_keys = ( 'id', 'name', 'state' )
+
def __init__( self, name=None, desc=None, request_type=None, user=None, form_values=None, notification=None ):
self.name = name
self.desc = desc
@@ -3268,17 +3297,20 @@
self.user = user
self.notification = notification
self.samples_list = []
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def samples_have_common_state( self ):
"""
@@ -3294,6 +3326,7 @@
if s.state.id != state_for_comparison.id:
return False
return state_for_comparison
+
@property
def last_comment( self ):
latest_event = self.latest_event
@@ -3302,26 +3335,34 @@
return latest_event.comment
return ''
return 'No comment'
+
def get_sample( self, sample_name ):
for sample in self.samples:
if sample.name == sample_name:
return sample
return None
+
@property
def is_unsubmitted( self ):
return self.state in [ self.states.REJECTED, self.states.NEW ]
+
@property
def is_rejected( self ):
return self.state == self.states.REJECTED
+
@property
def is_submitted( self ):
return self.state == self.states.SUBMITTED
+
@property
def is_new( self ):
+
return self.state == self.states.NEW
+
@property
def is_complete( self ):
return self.state == self.states.COMPLETE
+
@property
def samples_without_library_destinations( self ):
# Return all samples that are not associated with a library
@@ -3330,6 +3371,7 @@
if not sample.library:
samples.append( sample )
return samples
+
@property
def samples_with_bar_code( self ):
# Return all samples that have associated bar code
@@ -3338,6 +3380,7 @@
if sample.bar_code:
samples.append( sample )
return samples
+
def send_email_notification( self, trans, common_state, final_state=False ):
# Check if an email notification is configured to be sent when the samples
# are in this state
@@ -3390,7 +3433,7 @@
try:
send_mail( frm, to, subject, body, trans.app.config )
comments = "Email notification sent to %s." % ", ".join( to ).strip().strip( ',' )
- except Exception,e:
+ except Exception, e:
comments = "Email notification failed. (%s)" % str(e)
# update the request history with the email notification event
elif not trans.app.config.smtp_server:
@@ -3401,16 +3444,19 @@
trans.sa_session.flush()
return comments
+
class RequestEvent( object ):
def __init__(self, request=None, request_state=None, comment=''):
self.request = request
self.state = request_state
self.comment = comment
+
class ExternalService( object ):
- data_transfer_protocol = Bunch( HTTP = 'http',
- HTTPS = 'https',
- SCP = 'scp' )
+ data_transfer_protocol = Bunch( HTTP='http',
+ HTTPS='https',
+ SCP='scp' )
+
def __init__( self, name=None, description=None, external_service_type_id=None, version=None, form_definition_id=None, form_values_id=None, deleted=None ):
self.name = name
self.description = description
@@ -3419,9 +3465,11 @@
self.form_definition_id = form_definition_id
self.form_values_id = form_values_id
self.deleted = deleted
- self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+ self.label = None # Used in the request_type controller's __build_external_service_select_field() method
+
def get_external_service_type( self, trans ):
return trans.app.external_service_types.all_external_service_types[ self.external_service_type_id ]
+
def load_data_transfer_settings( self, trans ):
trans.app.external_service_types.reload( self.external_service_type_id )
self.data_transfer = {}
@@ -3442,33 +3490,39 @@
automatic_transfer = data_transfer_obj.config.get( 'automatic_transfer', 'false' )
http_configs[ 'automatic_transfer' ] = galaxy.util.string_as_bool( automatic_transfer )
self.data_transfer[ self.data_transfer_protocol.HTTP ] = http_configs
+
def populate_actions( self, trans, item, param_dict=None ):
return self.get_external_service_type( trans ).actions.populate( self, item, param_dict=param_dict )
+
class RequestType( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'name', 'desc' )
dict_element_visible_keys = ( 'id', 'name', 'desc', 'request_form_id', 'sample_form_id' )
- rename_dataset_options = Bunch( NO = 'Do not rename',
- SAMPLE_NAME = 'Preprend sample name',
- EXPERIMENT_NAME = 'Prepend experiment name',
- EXPERIMENT_AND_SAMPLE_NAME = 'Prepend experiment and sample name')
+ rename_dataset_options = Bunch( NO='Do not rename',
+ SAMPLE_NAME='Preprend sample name',
+ EXPERIMENT_NAME='Prepend experiment name',
+ EXPERIMENT_AND_SAMPLE_NAME='Prepend experiment and sample name')
permitted_actions = get_permitted_actions( filter='REQUEST_TYPE' )
+
def __init__( self, name=None, desc=None, request_form=None, sample_form=None ):
self.name = name
self.desc = desc
self.request_form = request_form
self.sample_form = sample_form
+
@property
def external_services( self ):
external_services = []
for rtesa in self.external_service_associations:
external_services.append( rtesa.external_service )
return external_services
+
def get_external_service( self, external_service_type_id ):
for rtesa in self.external_service_associations:
if rtesa.external_service.external_service_type_id == external_service_type_id:
return rtesa.external_service
return None
+
def get_external_services_for_manual_data_transfer( self, trans ):
'''Returns all external services that use manual data transfer'''
external_services = []
@@ -3481,6 +3535,7 @@
if not transfer_type_settings[ 'automatic_transfer' ]:
external_services.append( external_service )
return external_services
+
def delete_external_service_associations( self, trans ):
'''Deletes all external service associations.'''
flush_needed = False
@@ -3489,20 +3544,24 @@
flush_needed = True
if flush_needed:
trans.sa_session.flush()
+
def add_external_service_association( self, trans, external_service ):
rtesa = trans.model.RequestTypeExternalServiceAssociation( self, external_service )
trans.sa_session.add( rtesa )
trans.sa_session.flush()
+
@property
def final_sample_state( self ):
# The states mapper for this object orders ascending
return self.states[-1]
+
@property
def run_details( self ):
if self.run:
# self.run[0] is [RequestTypeRunAssociation]
return self.run[0]
return None
+
def get_template_widgets( self, trans, get_contents=True ):
# See if we have any associated templates. The get_contents param
# is passed by callers that are inheriting a template - these are
@@ -3520,23 +3579,27 @@
return template.get_widgets( trans.user )
return []
+
class RequestTypeExternalServiceAssociation( object ):
def __init__( self, request_type, external_service ):
self.request_type = request_type
self.external_service = external_service
+
class RequestTypePermissions( object ):
def __init__( self, action, request_type, role ):
self.action = action
self.request_type = request_type
self.role = role
+
class Sample( object, Dictifiable ):
# The following form_builder classes are supported by the Sample class.
supported_field_types = [ CheckboxField, SelectField, TextField, WorkflowField, WorkflowMappingField, HistoryField ]
- bulk_operations = Bunch( CHANGE_STATE = 'Change state',
- SELECT_LIBRARY = 'Select data library and folder' )
+ bulk_operations = Bunch( CHANGE_STATE='Change state',
+ SELECT_LIBRARY='Select data library and folder' )
dict_collection_visible_keys = ( 'id', 'name' )
+
def __init__(self, name=None, desc=None, request=None, form_values=None, bar_code=None, library=None, folder=None, workflow=None, history=None):
self.name = name
self.desc = desc
@@ -3547,17 +3610,20 @@
self.folder = folder
self.history = history
self.workflow = workflow
+
@property
def state( self ):
latest_event = self.latest_event
if latest_event:
return latest_event.state
return None
+
@property
def latest_event( self ):
if self.events:
return self.events[0]
return None
+
@property
def adding_to_library_dataset_files( self ):
adding_to_library_datasets = []
@@ -3565,6 +3631,7 @@
if dataset.status == SampleDataset.transfer_status.ADD_TO_LIBRARY:
adding_to_library_datasets.append( dataset )
return adding_to_library_datasets
+
@property
def inprogress_dataset_files( self ):
inprogress_datasets = []
@@ -3572,6 +3639,7 @@
if dataset.status not in [ SampleDataset.transfer_status.NOT_STARTED, SampleDataset.transfer_status.COMPLETE ]:
inprogress_datasets.append( dataset )
return inprogress_datasets
+
@property
def queued_dataset_files( self ):
queued_datasets = []
@@ -3579,6 +3647,7 @@
if dataset.status == SampleDataset.transfer_status.IN_QUEUE:
queued_datasets.append( dataset )
return queued_datasets
+
@property
def transfer_error_dataset_files( self ):
transfer_error_datasets = []
@@ -3586,6 +3655,7 @@
if dataset.status == SampleDataset.transfer_status.ERROR:
transfer_error_datasets.append( dataset )
return transfer_error_datasets
+
@property
def transferred_dataset_files( self ):
transferred_datasets = []
@@ -3593,6 +3663,7 @@
if dataset.status == SampleDataset.transfer_status.COMPLETE:
transferred_datasets.append( dataset )
return transferred_datasets
+
@property
def transferring_dataset_files( self ):
transferring_datasets = []
@@ -3600,6 +3671,7 @@
if dataset.status == SampleDataset.transfer_status.TRANSFERRING:
transferring_datasets.append( dataset )
return transferring_datasets
+
@property
def untransferred_dataset_files( self ):
untransferred_datasets = []
@@ -3607,6 +3679,7 @@
if dataset.status != SampleDataset.transfer_status.COMPLETE:
untransferred_datasets.append( dataset )
return untransferred_datasets
+
def get_untransferred_dataset_size( self, filepath, scp_configs ):
def print_ticks( d ):
pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0