galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
January 2014
- 1 participants
- 280 discussions
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3cafaba9aaf2/
Changeset: 3cafaba9aaf2
User: jmchilton
Date: 2014-01-10 22:10:52
Summary: Pull extract_payload_from_request out of expose_api.
Needs to be used in new version of API decorator and resulted in too much nesting in there the way it was.
Affected #: 1 file
diff -r 4ab2017348e25958bcd8babd8e110f574dff24df -r 3cafaba9aaf288d0abf257032db225fc52ca7287 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -103,6 +103,35 @@
return decorator
return argcatcher
+
+def __extract_payload_from_request(trans, func, kwargs):
+ content_type = trans.request.headers['content-type']
+ if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
+ # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
+ # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
+ # are not a part of the request body. This is a problem because it's not possible to differentiate between values
+ # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
+ # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
+ # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
+ payload = kwargs.copy()
+ named_args, _, _, _ = inspect.getargspec(func)
+ for arg in named_args:
+ payload.pop(arg, None)
+ for k, v in payload.iteritems():
+ if isinstance(v, (str, unicode)):
+ try:
+ payload[k] = from_json_string(v)
+ except:
+ # may not actually be json, just continue
+ pass
+ payload = util.recursively_stringify_dictionary_keys( payload )
+ else:
+ # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
+ # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
+ # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
+ payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
+ return payload
+
def expose_api_raw( func ):
"""
Expose this function via the API but don't dump the results
@@ -140,35 +169,8 @@
error_message = "API Authentication Required for this request"
return error
if trans.request.body:
- def extract_payload_from_request(trans, func, kwargs):
- content_type = trans.request.headers['content-type']
- if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
- # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
- # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
- # are not a part of the request body. This is a problem because it's not possible to differentiate between values
- # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
- # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
- # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
- payload = kwargs.copy()
- named_args, _, _, _ = inspect.getargspec(func)
- for arg in named_args:
- payload.pop(arg, None)
- for k, v in payload.iteritems():
- if isinstance(v, (str, unicode)):
- try:
- payload[k] = from_json_string(v)
- except:
- # may not actually be json, just continue
- pass
- payload = util.recursively_stringify_dictionary_keys( payload )
- else:
- # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
- # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
- # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
- return payload
try:
- kwargs['payload'] = extract_payload_from_request(trans, func, kwargs)
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
except ValueError:
error_status = '400 Bad Request'
error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
https://bitbucket.org/galaxy/galaxy-central/commits/39b2ae68b153/
Changeset: 39b2ae68b153
User: jmchilton
Date: 2014-01-10 22:10:52
Summary: Introduce new generation of API decorator...
Add improved error handling. Introduce error code.
Use new decorator with tested methods in histories API.
Affected #: 5 files
diff -r 3cafaba9aaf288d0abf257032db225fc52ca7287 -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 lib/galaxy/exceptions/__init__.py
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -6,33 +6,66 @@
eggs.require( "Paste" )
from paste import httpexceptions
+from ..exceptions import error_codes
+
class MessageException( Exception ):
"""
- Exception to make throwing errors from deep in controllers easier
+ Exception to make throwing errors from deep in controllers easier.
"""
- def __init__( self, err_msg, type="info" ):
- self.err_msg = err_msg
+ # status code to be set when used with API.
+ status_code = 400
+ # Error code information embedded into API json responses.
+ err_code = error_codes.UNKNOWN
+
+ def __init__( self, err_msg=None, type="info", **extra_error_info ):
+ self.err_msg = err_msg or self.err_code.default_error_message
self.type = type
+ self.extra_error_info = extra_error_info
+
def __str__( self ):
return self.err_msg
+
class ItemDeletionException( MessageException ):
pass
+
class ItemAccessibilityException( MessageException ):
- pass
+ status_code = 403
+ err_code = error_codes.USER_CANNOT_ACCESS_ITEM
+
class ItemOwnershipException( MessageException ):
- pass
+ status_code = 403
+ err_code = error_codes.USER_DOES_NOT_OWN_ITEM
+
+
+class DuplicatedSlugException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_SLUG_DUPLICATE
+
+
+class ObjectAttributeInvalidException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_OBJECT_ATTRIBUTE_INVALID
+
+
+class ObjectAttributeMissingException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_OBJECT_ATTRIBUTE_MISSING
+
class ActionInputError( MessageException ):
def __init__( self, err_msg, type="error" ):
super( ActionInputError, self ).__init__( err_msg, type )
-class ObjectNotFound( Exception ):
+
+class ObjectNotFound( MessageException ):
""" Accessed object was not found """
- pass
+ status_code = 404
+ err_code = error_codes.USER_OBJECT_NOT_FOUND
+
class ObjectInvalid( Exception ):
""" Accessed object store ID is invalid """
diff -r 3cafaba9aaf288d0abf257032db225fc52ca7287 -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 lib/galaxy/exceptions/error_codes.py
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.py
@@ -0,0 +1,33 @@
+# Error codes are provided as a convience to Galaxy API clients, but at this
+# time they do represent part of the more stable interface. They can change
+# without warning between releases.
+UNKNOWN_ERROR_MESSAGE = "Unknown error occurred while processing request."
+
+
+class ErrorCode( object ):
+
+ def __init__( self, code, default_error_message ):
+ self.code = code
+ self.default_error_message = default_error_message or UNKNOWN_ERROR_MESSAGE
+
+ def __str__( self ):
+ return str( self.default_error_message )
+
+ def __int__( self ):
+ return int( self.code )
+
+# TODO: Guidelines for error message langauge?
+UNKNOWN = ErrorCode(0, UNKNOWN_ERROR_MESSAGE)
+
+USER_CANNOT_RUN_AS = ErrorCode(400001, "User does not have permissions to run jobs as another user.")
+USER_INVALID_RUN_AS = ErrorCode(400002, "Invalid run_as request - run_as user does not exist.")
+USER_INVALID_JSON = ErrorCode(400003, "Your request did not appear to be valid JSON, please consult the API documentation.")
+USER_OBJECT_ATTRIBUTE_INVALID = ErrorCode(400004, "Attempted to create or update object with invalid attribute value.")
+USER_OBJECT_ATTRIBUTE_MISSING = ErrorCode(400005, "Attempted to create object without required attribute.")
+USER_SLUG_DUPLICATE = ErrorCode(400006, "Slug must be unique per user.")
+
+USER_NO_API_KEY = ErrorCode(403001, "API Authentication Required for this request")
+USER_CANNOT_ACCESS_ITEM = ErrorCode(403002, "User cannot access specified item.")
+USER_DOES_NOT_OWN_ITEM = ErrorCode(403003, "User does not own specified item.")
+
+USER_OBJECT_NOT_FOUND = ErrorCode(404001, "No such object not found.")
diff -r 3cafaba9aaf288d0abf257032db225fc52ca7287 -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -15,3 +15,7 @@
from framework import expose_api_raw
from framework import expose_api_raw_anonymous
from framework.base import httpexceptions
+
+# TODO: Drop and make these the default.
+from framework import _future_expose_api
+from framework import _future_expose_api_anonymous
diff -r 3cafaba9aaf288d0abf257032db225fc52ca7287 -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -10,9 +10,9 @@
import socket
import string
import time
-
+from traceback import format_exc
+from Cookie import CookieError
from functools import wraps
-from Cookie import CookieError
pkg_resources.require( "Cheetah" )
from Cheetah.Template import Template
@@ -23,6 +23,7 @@
from galaxy import util
from galaxy.exceptions import MessageException
+from galaxy.exceptions import error_codes
from galaxy.util import asbool
from galaxy.util import safe_str_cmp
from galaxy.util.backports.importlib import import_module
@@ -212,6 +213,143 @@
decorator.exposed = True
return decorator
+API_RESPONSE_CONTENT_TYPE = "application/json"
+
+
+def __api_error_message( trans, **kwds ):
+ exception = kwds.get( "exception", None )
+ if exception:
+ # If we are passed a MessageException use err_msg.
+ default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
+ default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
+ extra_error_info = getattr( exception, 'extra_error_info', {} )
+ if not isinstance( extra_error_info, dict ):
+ extra_error_info = {}
+ else:
+ default_error_message = "Error processing API request."
+ default_error_code = error_codes.UNKNOWN
+ extra_error_info = {}
+ traceback_string = kwds.get( "traceback", "No traceback available." )
+ err_msg = kwds.get( "err_msg", default_error_message )
+ error_code_object = kwds.get( "err_code", default_error_code )
+ try:
+ error_code = error_code_object.code
+ except AttributeError:
+ # Some sort of bad error code sent in, logic failure on part of
+ # Galaxy developer.
+ error_code = error_codes.UNKNOWN.code
+ # Would prefer the terminology of error_code and error_message, but
+ # err_msg used a good number of places already. Might as well not change
+ # it?
+ error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
+ if trans.debug: # TODO: Should admins get to see traceback as well?
+ error_response[ "traceback" ] = traceback_string
+ return error_response
+
+
+def __api_error_response( trans, **kwds ):
+ error_dict = __api_error_message( trans, **kwds )
+ exception = kwds.get( "exception", None )
+ # If we are given an status code directly - use it - otherwise check
+ # the exception for a status_code attribute.
+ if "status_code" in kwds:
+ status_code = int( kwds.get( "status_code" ) )
+ elif hasattr( exception, "status_code" ):
+ status_code = int( exception.status_code )
+ else:
+ status_code = 500
+ response = trans.response
+ if not response.status or str(response.status).startswith("20"):
+ # Unset status code appears to be string '200 OK', if anything
+ # non-success (i.e. not 200 or 201) has been set, do not override
+ # underlying controller.
+ response.status = status_code
+ return to_json_string( error_dict )
+
+
+# TODO: rename as expose_api and make default.
+def _future_expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require a set user.
+ """
+ return _future_expose_api( func, to_json=to_json, user_required=False )
+
+
+# TODO: rename as expose_api and make default.
+def _future_expose_api( func, to_json=True, user_required=True ):
+ """
+ Expose this function via the API.
+ """
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ if trans.error_message:
+ # TODO: Document this branch, when can this happen,
+ # I don't understand it.
+ return __api_error_response( trans, err_msg=trans.error_message )
+ if user_required and trans.anonymous:
+ error_code = error_codes.USER_NO_API_KEY
+ # Use error codes default error message.
+ return __api_error_response( trans, err_code=error_code, status_code=403 )
+ if trans.request.body:
+ try:
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+ except ValueError:
+ error_code = error_codes.USER_INVALID_JSON
+ return __api_error_response( trans, status_code=400, err_code=error_code )
+
+ trans.response.set_content_type( API_RESPONSE_CONTENT_TYPE )
+ # send 'do not cache' headers to handle IE's caching of ajax get responses
+ trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+ # TODO: Refactor next block out into a helper procedure.
+ # Perform api_run_as processing, possibly changing identity
+ if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+ if not trans.user_can_do_run_as():
+ error_code = error_codes.USER_CANNOT_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=403 )
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+ except TypeError:
+ error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
+ trans.set_user(user)
+ except:
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=400 )
+ try:
+ rval = func( self, trans, *args, **kwargs)
+ if to_json and trans.debug:
+ rval = to_json_string( rval, indent=4, sort_keys=True )
+ elif to_json:
+ rval = to_json_string( rval )
+ return rval
+ except MessageException as e:
+ traceback_string = format_exc()
+ return __api_error_response( trans, exception=e, traceback=traceback_string )
+ except paste.httpexceptions.HTTPException:
+ # TODO: Allow to pass or format for the API???
+ raise # handled
+ except Exception as e:
+ traceback_string = format_exc()
+ error_message = 'Uncaught exception in exposed API method:'
+ log.exception( error_message )
+ return __api_error_response(
+ trans,
+ status_code=500,
+ exception=e,
+ traceback=traceback_string,
+ err_msg=error_message,
+ err_code=error_codes.UNKNOWN
+ )
+ if not hasattr(func, '_orig'):
+ decorator._orig = func
+ decorator.exposed = True
+ return decorator
+
+
def require_admin( func ):
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
diff -r 3cafaba9aaf288d0abf257032db225fc52ca7287 -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -9,6 +9,8 @@
from paste.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPInternalServerError, HTTPException
from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
from galaxy.util import string_as_bool, restore_text
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web.base.controller import BaseAPIController, UsesHistoryMixin, UsesTagsMixin
@@ -20,7 +22,7 @@
class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin ):
- @web.expose_api_anonymous
+ @expose_api_anonymous
def index( self, trans, deleted='False', **kwd ):
"""
index( trans, deleted='False' )
@@ -152,7 +154,7 @@
return history_data
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
"""
create( trans, payload )
https://bitbucket.org/galaxy/galaxy-central/commits/b41d51baaae1/
Changeset: b41d51baaae1
User: jmchilton
Date: 2014-01-10 22:10:52
Summary: Add functional tests for pages API.
Touch up pages API in response to test cases. Update pages API to use newer API decorator.
Affected #: 5 files
diff -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 lib/galaxy/webapps/galaxy/api/page_revisions.py
--- a/lib/galaxy/webapps/galaxy/api/page_revisions.py
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -2,8 +2,9 @@
API for updating Galaxy Pages
"""
import logging
-from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.sanitize_html import sanitize_html
@@ -12,7 +13,7 @@
class PageRevisionsController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
- @web.expose_api
+ @expose_api
def index( self, trans, page_id, **kwd ):
"""
index( self, trans, page_id, **kwd )
@@ -24,14 +25,16 @@
:rtype: list
:returns: dictionaries containing different revisions of the page
"""
+ page = self._get_page( trans, page_id )
+ self._verify_page_ownership( trans, page )
+
r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
out = []
for page in r:
- if self.security_check( trans, page, True, True ):
- out.append( self.encode_all_ids( trans, page.to_dict(), True) )
+ out.append( self.encode_all_ids( trans, page.to_dict(), True) )
return out
- @web.expose_api
+ @expose_api
def create( self, trans, page_id, payload, **kwd ):
"""
create( self, trans, page_id, payload **kwd )
@@ -46,39 +49,42 @@
:rtype: dictionary
:returns: Dictionary with 'success' or 'error' element to indicate the result of the request
"""
- error_str = ""
+ content = payload.get("content", None)
+ if not content:
+ raise exceptions.ObjectAttributeMissingException("content undefined or empty")
- if not page_id:
- error_str = "page_id is required"
- elif not payload.get("content", None):
- error_str = "content is required"
+ page = self._get_page( trans, page_id )
+ self._verify_page_ownership( trans, page )
+
+ if 'title' in payload:
+ title = payload['title']
else:
+ title = page.title
- # Create the new stored page
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = title
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+
+ # Persist
+ session = trans.sa_session
+ session.flush()
+
+ return page_revision.to_dict( view="element" )
+
+ def _get_page( self, trans, page_id ):
+ page = None
+ try:
page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id(page_id) )
- if page is None:
- return { "error" : "page not found"}
+ except Exception:
+ pass
+ if not page:
+ raise exceptions.ObjectNotFound()
+ return page
- if not self.security_check( trans, page, True, True ):
- return { "error" : "page not found"}
-
- if 'title' in payload:
- title = payload['title']
- else:
- title = page.title
-
- content = payload.get("content", "")
- content = sanitize_html( content, 'utf-8', 'text/html' )
-
- page_revision = trans.app.model.PageRevision()
- page_revision.title = title
- page_revision.page = page
- page.latest_revision = page_revision
- page_revision.content = content
- # Persist
- session = trans.sa_session
- session.flush()
-
- return { "success" : "revision posted" }
-
- return { "error" : error_str }
+ def _verify_page_ownership( self, trans, page ):
+ if not self.security_check( trans, page, True, True ):
+ raise exceptions.ItemOwnershipException()
diff -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -2,8 +2,9 @@
API for updating Galaxy Pages
"""
import logging
-from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.sanitize_html import sanitize_html
@@ -12,7 +13,7 @@
class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
- @web.expose_api
+ @expose_api
def index( self, trans, deleted=False, **kwd ):
"""
index( self, trans, deleted=False, **kwd )
@@ -47,7 +48,7 @@
return out
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
"""
create( self, trans, payload, **kwd )
@@ -64,45 +65,41 @@
:returns: Dictionary return of the Page.to_dict call
"""
user = trans.get_user()
- error_str = ""
if not payload.get("title", None):
- error_str = "Page name is required"
+ raise exceptions.ObjectAttributeMissingException( "Page name is required" )
elif not payload.get("slug", None):
- error_str = "Page id is required"
+ raise exceptions.ObjectAttributeMissingException( "Page id is required" )
elif not self._is_valid_slug( payload["slug"] ):
- error_str = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+ raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" )
elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
- error_str = "Page id must be unique"
- else:
+ raise exceptions.DuplicatedSlugException( "Page slug must be unique" )
- content = payload.get("content", "")
- content = sanitize_html( content, 'utf-8', 'text/html' )
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
- # Create the new stored page
- page = trans.app.model.Page()
- page.title = payload['title']
- page.slug = payload['slug']
- page_annotation = sanitize_html( payload.get( "annotation", "" ), 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
- page.user = user
- # And the first (empty) page revision
- page_revision = trans.app.model.PageRevision()
- page_revision.title = payload['title']
- page_revision.page = page
- page.latest_revision = page_revision
- page_revision.content = content
- # Persist
- session = trans.sa_session
- session.add( page )
- session.flush()
+ # Create the new stored page
+ page = trans.app.model.Page()
+ page.title = payload['title']
+ page.slug = payload['slug']
+ page_annotation = sanitize_html( payload.get( "annotation", "" ), 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+ page.user = user
+ # And the first (empty) page revision
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = payload['title']
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+ # Persist
+ session = trans.sa_session
+ session.add( page )
+ session.flush()
- rval = self.encode_all_ids( trans, page.to_dict(), True )
- return rval
+ rval = self.encode_all_ids( trans, page.to_dict(), True )
+ return rval
- return { "error" : error_str }
-
- @web.expose_api
+ @expose_api
def delete( self, trans, id, **kwd ):
"""
delete( self, trans, id, **kwd )
@@ -114,22 +111,14 @@
:rtype: dict
:returns: Dictionary with 'success' or 'error' element to indicate the result of the request
"""
- page_id = id
- try:
- page = trans.sa_session.query(self.app.model.Page).get(trans.security.decode_id(page_id))
- except Exception, e:
- return { "error" : "Page with ID='%s' can not be found\n Exception: %s" % (page_id, str( e )) }
+ page = self._get_page( trans, id )
- # check to see if user has permissions to selected workflow
- if page.user != trans.user and not trans.user_is_admin():
- return { "error" : "Workflow is not owned by or shared with current user" }
-
- #Mark a workflow as deleted
+ #Mark a page as deleted
page.deleted = True
trans.sa_session.flush()
- return { "success" : "Deleted", "id" : page_id }
+ return '' # TODO: Figure out what to return on DELETE, document in guidelines!
- @web.expose_api
+ @expose_api
def show( self, trans, id, **kwd ):
"""
show( self, trans, id, **kwd )
@@ -141,8 +130,22 @@
:rtype: dict
:returns: Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
"""
- page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ page = self._get_page( trans, id )
self.security_check( trans, page, check_ownership=False, check_accessible=True)
rval = self.encode_all_ids( trans, page.to_dict(), True )
rval['content'] = page.latest_revision.content
return rval
+
+ def _get_page( self, trans, id ): # Fetches page object and verifies security.
+ try:
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ except Exception:
+ page = None
+
+ if not page:
+ raise exceptions.ObjectNotFound()
+
+ if page.user != trans.user and not trans.user_is_admin():
+ raise exceptions.ItemOwnershipException()
+
+ return page
diff -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 test/base/api.py
--- a/test/base/api.py
+++ b/test/base/api.py
@@ -60,6 +60,12 @@
for key in keys:
assert key in response, "Response [%s] does not contain key [%s]" % ( response, key )
+ def _assert_error_code_is( self, response, error_code ):
+ if hasattr( response, "json" ):
+ response = response.json()
+ self._assert_has_keys( response, "err_code" )
+ self.assertEquals( response[ "err_code" ], int( error_code ) )
+
def _random_key( self ): # Used for invalid request testing...
return "1234567890123456"
diff -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 test/functional/api/test_page_revisions.py
--- /dev/null
+++ b/test/functional/api/test_page_revisions.py
@@ -0,0 +1,35 @@
+from galaxy.exceptions import error_codes
+from functional.api.pages import BasePageApiTestCase
+
+
+class PageRevisionsApiTestCase( BasePageApiTestCase ):
+
+ def test_create( self ):
+ page_json = self._create_valid_page_with_slug( "pr1" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+ self._assert_status_code_is( page_revision_response, 200 )
+ page_revision_json = page_revision_response.json()
+ self._assert_has_keys( page_revision_json, 'id', 'content' )
+
+ def test_403_if_create_revision_on_unowned_page( self ):
+ page_json = self._create_valid_page_as( "pr2(a)bx.psu.edu", "pr2" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+ self._assert_status_code_is( page_revision_response, 403 )
+
+ def test_revision_index( self ):
+ page_json = self._create_valid_page_with_slug( "pr3" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ revisions_url = "pages/%s/revisions" % page_json[ 'id' ]
+ self._post( revisions_url, data=revision_data )
+ revisions_response = self._get( revisions_url )
+ self._assert_status_code_is( revisions_response, 200 )
+ revisions_json = revisions_response.json()
+ assert len( revisions_json ) == 2 # Original revision and new one
+
+ def test_404_if_index_unknown_page( self ):
+ revisions_url = "pages/%s/revisions" % self._random_key()
+ revisions_response = self._get( revisions_url )
+ self._assert_status_code_is( revisions_response, 404 )
+ self._assert_error_code_is( revisions_response, error_codes.USER_OBJECT_NOT_FOUND )
diff -r 39b2ae68b153ce701e821b0f4609c8ba8f0f5ad5 -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 test/functional/api/test_pages.py
--- /dev/null
+++ b/test/functional/api/test_pages.py
@@ -0,0 +1,105 @@
+from galaxy.exceptions import error_codes
+from base import api
+from base.interactor import delete_request
+
+from operator import itemgetter
+
+
+class BasePageApiTestCase( api.ApiTestCase ):
+
+ def _create_valid_page_with_slug( self, slug ):
+ page_request = self._test_page_payload( slug=slug )
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 200 )
+ return page_response.json()
+
+ def _create_valid_page_as( self, other_email, slug ):
+ run_as_user = self._setup_user( other_email )
+ page_request = self._test_page_payload( slug=slug )
+ page_request[ "run_as" ] = run_as_user[ "id" ]
+ page_response = self._post( "pages", page_request, admin=True )
+ self._assert_status_code_is( page_response, 200 )
+ return page_response.json()
+
+ def _test_page_payload( self, **kwds ):
+ request = dict(
+ slug="mypage",
+ title="MY PAGE",
+ content="<p>Page!</p>",
+ )
+ request.update( **kwds )
+ return request
+
+
+class PageApiTestCase( BasePageApiTestCase ):
+
+ def test_create( self ):
+ response_json = self._create_valid_page_with_slug( "mypage" )
+ self._assert_has_keys( response_json, "slug", "title", "id" )
+
+ def test_index( self ):
+ create_response_json = self._create_valid_page_with_slug( "indexpage" )
+ assert self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+ def test_index_doesnt_show_unavailable_pages( self ):
+ create_response_json = self._create_valid_page_as( "others_page_index(a)bx.psu.edu", "otherspageindex" )
+ assert not self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+ def test_cannot_create_pages_with_same_slug( self ):
+ page_request = self._test_page_payload( slug="mypage1" )
+ page_response_1 = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response_1, 200 )
+ page_response_2 = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response_2, 400 )
+ self._assert_error_code_is( page_response_2, error_codes.USER_SLUG_DUPLICATE )
+
+ def test_page_requires_name( self ):
+ page_request = self._test_page_payload()
+ del page_request[ 'title' ]
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 400 )
+ self._assert_error_code_is( page_response, error_codes.USER_OBJECT_ATTRIBUTE_MISSING )
+
+ def test_page_requires_slug( self ):
+ page_request = self._test_page_payload()
+ del page_request[ 'slug' ]
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 400 )
+
+ def test_delete( self ):
+ response_json = self._create_valid_page_with_slug( "testdelete" )
+ delete_response = delete_request( self._api_url( "pages/%s" % response_json[ 'id' ], use_key=True ) )
+ self._assert_status_code_is( delete_response, 200 )
+
+ def test_404_on_delete_unknown_page( self ):
+ delete_response = delete_request( self._api_url( "pages/%s" % self._random_key(), use_key=True ) )
+ self._assert_status_code_is( delete_response, 404 )
+ self._assert_error_code_is( delete_response, error_codes.USER_OBJECT_NOT_FOUND )
+
+ def test_403_on_delete_unowned_page( self ):
+ page_response = self._create_valid_page_as( "others_page(a)bx.psu.edu", "otherspage" )
+ delete_response = delete_request( self._api_url( "pages/%s" % page_response[ "id" ], use_key=True ) )
+ self._assert_status_code_is( delete_response, 403 )
+ self._assert_error_code_is( delete_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+ def test_show( self ):
+ response_json = self._create_valid_page_with_slug( "pagetoshow" )
+ show_response = self._get( "pages/%s" % response_json['id'] )
+ self._assert_status_code_is( show_response, 200 )
+ show_json = show_response.json()
+ self._assert_has_keys( show_json, "slug", "title", "id" )
+ self.assertEquals( show_json["slug"], "pagetoshow" )
+ self.assertEquals( show_json["title"], "MY PAGE" )
+ self.assertEquals( show_json["content"], "<p>Page!</p>" )
+
+ def test_403_on_unowner_show( self ):
+ response_json = self._create_valid_page_as( "others_page_show(a)bx.psu.edu", "otherspageshow" )
+ show_response = self._get( "pages/%s" % response_json['id'] )
+ self._assert_status_code_is( show_response, 403 )
+ self._assert_error_code_is( show_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+ def _users_index_has_page_with_id( self, id ):
+ index_response = self._get( "pages" )
+ self._assert_status_code_is( index_response, 200 )
+ pages = index_response.json()
+ return id in map( itemgetter( "id" ), pages )
https://bitbucket.org/galaxy/galaxy-central/commits/503836d4cea3/
Changeset: 503836d4cea3
User: jmchilton
Date: 2014-01-10 22:10:52
Summary: Parse error codes/messages from new JSON file.
Same python interface to this data, but the data can now be reused by clients in other languages (e.g. potentially automating the creation of typed Java exceptions).
Affected #: 2 files
diff -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 -r 503836d4cea399f366a02f7e48b75095a8bb876f lib/galaxy/exceptions/error_codes.json
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.json
@@ -0,0 +1,57 @@
+[
+ {
+ "name": "UNKNOWN",
+ "code": 0,
+ "message": "Unknown error occurred while processing request."
+ },
+ {
+ "name": "USER_CANNOT_RUN_AS",
+ "code": 400001,
+ "message": "User does not have permissions to run jobs as another user."
+ },
+ {
+ "name": "USER_INVALID_RUN_AS",
+ "code": 400002,
+ "message": "Invalid run_as request - run_as user does not exist."
+ },
+ {
+ "name": "USER_INVALID_JSON",
+ "code": 400003,
+ "message": "Your request did not appear to be valid JSON, please consult the API documentation."
+ },
+ {
+ "name": "USER_OBJECT_ATTRIBUTE_INVALID",
+ "code": 400004,
+ "message": "Attempted to create or update object with invalid attribute value."
+ },
+ {
+ "name": "USER_OBJECT_ATTRIBUTE_MISSING",
+ "code": 400005,
+ "message": "Attempted to create object without required attribute."
+ },
+ {
+ "name": "USER_SLUG_DUPLICATE",
+ "code": 400006,
+ "message": "Slug must be unique per user."
+ },
+ {
+ "name": "USER_NO_API_KEY",
+ "code": 403001,
+ "message": "API Authentication Required for this request"
+ },
+ {
+ "name": "USER_CANNOT_ACCESS_ITEM",
+ "code": 403002,
+ "message": "User cannot access specified item."
+ },
+ {
+ "name": "USER_DOES_NOT_OWN_ITEM",
+ "code": 403003,
+ "message": "User does not own specified item."
+ },
+ {
+ "name": "USER_OBJECT_NOT_FOUND",
+ "code": 404001,
+ "message": "No such object not found."
+ }
+]
diff -r b41d51baaae16c4d6e7c4fb20a28cf9aeb5f1f43 -r 503836d4cea399f366a02f7e48b75095a8bb876f lib/galaxy/exceptions/error_codes.py
--- a/lib/galaxy/exceptions/error_codes.py
+++ b/lib/galaxy/exceptions/error_codes.py
@@ -1,3 +1,6 @@
+from pkg_resources import resource_string
+from json import loads
+
# Error codes are provided as a convience to Galaxy API clients, but at this
# time they do represent part of the more stable interface. They can change
# without warning between releases.
@@ -16,18 +19,14 @@
def __int__( self ):
return int( self.code )
-# TODO: Guidelines for error message langauge?
-UNKNOWN = ErrorCode(0, UNKNOWN_ERROR_MESSAGE)
+ @staticmethod
+ def from_dict( entry ):
+ name = entry.get("name")
+ code = entry.get("code")
+ message = entry.get("message")
+ return ( name, ErrorCode( code, message ) )
-USER_CANNOT_RUN_AS = ErrorCode(400001, "User does not have permissions to run jobs as another user.")
-USER_INVALID_RUN_AS = ErrorCode(400002, "Invalid run_as request - run_as user does not exist.")
-USER_INVALID_JSON = ErrorCode(400003, "Your request did not appear to be valid JSON, please consult the API documentation.")
-USER_OBJECT_ATTRIBUTE_INVALID = ErrorCode(400004, "Attempted to create or update object with invalid attribute value.")
-USER_OBJECT_ATTRIBUTE_MISSING = ErrorCode(400005, "Attempted to create object without required attribute.")
-USER_SLUG_DUPLICATE = ErrorCode(400006, "Slug must be unique per user.")
-
-USER_NO_API_KEY = ErrorCode(403001, "API Authentication Required for this request")
-USER_CANNOT_ACCESS_ITEM = ErrorCode(403002, "User cannot access specified item.")
-USER_DOES_NOT_OWN_ITEM = ErrorCode(403003, "User does not own specified item.")
-
-USER_OBJECT_NOT_FOUND = ErrorCode(404001, "No such object not found.")
+error_codes_json = resource_string( __name__, 'error_codes.json' )
+for entry in loads( error_codes_json ):
+ name, error_code_obj = ErrorCode.from_dict( entry )
+ globals()[ name ] = error_code_obj
https://bitbucket.org/galaxy/galaxy-central/commits/fa698b544051/
Changeset: fa698b544051
User: dannon
Date: 2014-01-15 14:30:38
Summary: Merged in jmchilton/galaxy-central-fork-1 (pull request #294)
New API Decorator
Affected #: 11 files
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/exceptions/__init__.py
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -6,33 +6,66 @@
eggs.require( "Paste" )
from paste import httpexceptions
+from ..exceptions import error_codes
+
class MessageException( Exception ):
"""
- Exception to make throwing errors from deep in controllers easier
+ Exception to make throwing errors from deep in controllers easier.
"""
- def __init__( self, err_msg, type="info" ):
- self.err_msg = err_msg
+ # status code to be set when used with API.
+ status_code = 400
+ # Error code information embedded into API json responses.
+ err_code = error_codes.UNKNOWN
+
+ def __init__( self, err_msg=None, type="info", **extra_error_info ):
+ self.err_msg = err_msg or self.err_code.default_error_message
self.type = type
+ self.extra_error_info = extra_error_info
+
def __str__( self ):
return self.err_msg
+
class ItemDeletionException( MessageException ):
pass
+
class ItemAccessibilityException( MessageException ):
- pass
+ status_code = 403
+ err_code = error_codes.USER_CANNOT_ACCESS_ITEM
+
class ItemOwnershipException( MessageException ):
- pass
+ status_code = 403
+ err_code = error_codes.USER_DOES_NOT_OWN_ITEM
+
+
+class DuplicatedSlugException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_SLUG_DUPLICATE
+
+
+class ObjectAttributeInvalidException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_OBJECT_ATTRIBUTE_INVALID
+
+
+class ObjectAttributeMissingException( MessageException ):
+ status_code = 400
+ err_code = error_codes.USER_OBJECT_ATTRIBUTE_MISSING
+
class ActionInputError( MessageException ):
def __init__( self, err_msg, type="error" ):
super( ActionInputError, self ).__init__( err_msg, type )
-class ObjectNotFound( Exception ):
+
+class ObjectNotFound( MessageException ):
""" Accessed object was not found """
- pass
+ status_code = 404
+ err_code = error_codes.USER_OBJECT_NOT_FOUND
+
class ObjectInvalid( Exception ):
""" Accessed object store ID is invalid """
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/exceptions/error_codes.json
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.json
@@ -0,0 +1,57 @@
+[
+ {
+ "name": "UNKNOWN",
+ "code": 0,
+ "message": "Unknown error occurred while processing request."
+ },
+ {
+ "name": "USER_CANNOT_RUN_AS",
+ "code": 400001,
+ "message": "User does not have permissions to run jobs as another user."
+ },
+ {
+ "name": "USER_INVALID_RUN_AS",
+ "code": 400002,
+ "message": "Invalid run_as request - run_as user does not exist."
+ },
+ {
+ "name": "USER_INVALID_JSON",
+ "code": 400003,
+ "message": "Your request did not appear to be valid JSON, please consult the API documentation."
+ },
+ {
+ "name": "USER_OBJECT_ATTRIBUTE_INVALID",
+ "code": 400004,
+ "message": "Attempted to create or update object with invalid attribute value."
+ },
+ {
+ "name": "USER_OBJECT_ATTRIBUTE_MISSING",
+ "code": 400005,
+ "message": "Attempted to create object without required attribute."
+ },
+ {
+ "name": "USER_SLUG_DUPLICATE",
+ "code": 400006,
+ "message": "Slug must be unique per user."
+ },
+ {
+ "name": "USER_NO_API_KEY",
+ "code": 403001,
+ "message": "API Authentication Required for this request"
+ },
+ {
+ "name": "USER_CANNOT_ACCESS_ITEM",
+ "code": 403002,
+ "message": "User cannot access specified item."
+ },
+ {
+ "name": "USER_DOES_NOT_OWN_ITEM",
+ "code": 403003,
+ "message": "User does not own specified item."
+ },
+ {
+ "name": "USER_OBJECT_NOT_FOUND",
+ "code": 404001,
+ "message": "No such object not found."
+ }
+]
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/exceptions/error_codes.py
--- /dev/null
+++ b/lib/galaxy/exceptions/error_codes.py
@@ -0,0 +1,32 @@
+from pkg_resources import resource_string
+from json import loads
+
+# Error codes are provided as a convience to Galaxy API clients, but at this
+# time they do represent part of the more stable interface. They can change
+# without warning between releases.
+UNKNOWN_ERROR_MESSAGE = "Unknown error occurred while processing request."
+
+
+class ErrorCode( object ):
+
+ def __init__( self, code, default_error_message ):
+ self.code = code
+ self.default_error_message = default_error_message or UNKNOWN_ERROR_MESSAGE
+
+ def __str__( self ):
+ return str( self.default_error_message )
+
+ def __int__( self ):
+ return int( self.code )
+
+ @staticmethod
+ def from_dict( entry ):
+ name = entry.get("name")
+ code = entry.get("code")
+ message = entry.get("message")
+ return ( name, ErrorCode( code, message ) )
+
+error_codes_json = resource_string( __name__, 'error_codes.json' )
+for entry in loads( error_codes_json ):
+ name, error_code_obj = ErrorCode.from_dict( entry )
+ globals()[ name ] = error_code_obj
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -15,3 +15,7 @@
from framework import expose_api_raw
from framework import expose_api_raw_anonymous
from framework.base import httpexceptions
+
+# TODO: Drop and make these the default.
+from framework import _future_expose_api
+from framework import _future_expose_api_anonymous
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -10,9 +10,9 @@
import socket
import string
import time
-
+from traceback import format_exc
+from Cookie import CookieError
from functools import wraps
-from Cookie import CookieError
pkg_resources.require( "Cheetah" )
from Cheetah.Template import Template
@@ -23,6 +23,7 @@
from galaxy import util
from galaxy.exceptions import MessageException
+from galaxy.exceptions import error_codes
from galaxy.util import asbool
from galaxy.util import safe_str_cmp
from galaxy.util.backports.importlib import import_module
@@ -103,6 +104,35 @@
return decorator
return argcatcher
+
+def __extract_payload_from_request(trans, func, kwargs):
+ content_type = trans.request.headers['content-type']
+ if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
+ # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
+ # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
+ # are not a part of the request body. This is a problem because it's not possible to differentiate between values
+ # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
+ # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
+ # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
+ payload = kwargs.copy()
+ named_args, _, _, _ = inspect.getargspec(func)
+ for arg in named_args:
+ payload.pop(arg, None)
+ for k, v in payload.iteritems():
+ if isinstance(v, (str, unicode)):
+ try:
+ payload[k] = from_json_string(v)
+ except:
+ # may not actually be json, just continue
+ pass
+ payload = util.recursively_stringify_dictionary_keys( payload )
+ else:
+ # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
+ # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
+ # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
+ payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
+ return payload
+
def expose_api_raw( func ):
"""
Expose this function via the API but don't dump the results
@@ -140,35 +170,8 @@
error_message = "API Authentication Required for this request"
return error
if trans.request.body:
- def extract_payload_from_request(trans, func, kwargs):
- content_type = trans.request.headers['content-type']
- if content_type.startswith('application/x-www-form-urlencoded') or content_type.startswith('multipart/form-data'):
- # If the content type is a standard type such as multipart/form-data, the wsgi framework parses the request body
- # and loads all field values into kwargs. However, kwargs also contains formal method parameters etc. which
- # are not a part of the request body. This is a problem because it's not possible to differentiate between values
- # which are a part of the request body, and therefore should be a part of the payload, and values which should not be
- # in the payload. Therefore, the decorated method's formal arguments are discovered through reflection and removed from
- # the payload dictionary. This helps to prevent duplicate argument conflicts in downstream methods.
- payload = kwargs.copy()
- named_args, _, _, _ = inspect.getargspec(func)
- for arg in named_args:
- payload.pop(arg, None)
- for k, v in payload.iteritems():
- if isinstance(v, (str, unicode)):
- try:
- payload[k] = from_json_string(v)
- except:
- # may not actually be json, just continue
- pass
- payload = util.recursively_stringify_dictionary_keys( payload )
- else:
- # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check
- # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding
- # such as multipart/form-data. Leaving it as is for backward compatibility, just in case.
- payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) )
- return payload
try:
- kwargs['payload'] = extract_payload_from_request(trans, func, kwargs)
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
except ValueError:
error_status = '400 Bad Request'
error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
@@ -210,6 +213,143 @@
decorator.exposed = True
return decorator
+API_RESPONSE_CONTENT_TYPE = "application/json"
+
+
+def __api_error_message( trans, **kwds ):
+ exception = kwds.get( "exception", None )
+ if exception:
+ # If we are passed a MessageException use err_msg.
+ default_error_code = getattr( exception, "err_code", error_codes.UNKNOWN )
+ default_error_message = getattr( exception, "err_msg", default_error_code.default_error_message )
+ extra_error_info = getattr( exception, 'extra_error_info', {} )
+ if not isinstance( extra_error_info, dict ):
+ extra_error_info = {}
+ else:
+ default_error_message = "Error processing API request."
+ default_error_code = error_codes.UNKNOWN
+ extra_error_info = {}
+ traceback_string = kwds.get( "traceback", "No traceback available." )
+ err_msg = kwds.get( "err_msg", default_error_message )
+ error_code_object = kwds.get( "err_code", default_error_code )
+ try:
+ error_code = error_code_object.code
+ except AttributeError:
+ # Some sort of bad error code sent in, logic failure on part of
+ # Galaxy developer.
+ error_code = error_codes.UNKNOWN.code
+ # Would prefer the terminology of error_code and error_message, but
+ # err_msg used a good number of places already. Might as well not change
+ # it?
+ error_response = dict( err_msg=err_msg, err_code=error_code, **extra_error_info )
+ if trans.debug: # TODO: Should admins get to see traceback as well?
+ error_response[ "traceback" ] = traceback_string
+ return error_response
+
+
+def __api_error_response( trans, **kwds ):
+ error_dict = __api_error_message( trans, **kwds )
+ exception = kwds.get( "exception", None )
+ # If we are given an status code directly - use it - otherwise check
+ # the exception for a status_code attribute.
+ if "status_code" in kwds:
+ status_code = int( kwds.get( "status_code" ) )
+ elif hasattr( exception, "status_code" ):
+ status_code = int( exception.status_code )
+ else:
+ status_code = 500
+ response = trans.response
+ if not response.status or str(response.status).startswith("20"):
+ # Unset status code appears to be string '200 OK', if anything
+ # non-success (i.e. not 200 or 201) has been set, do not override
+ # underlying controller.
+ response.status = status_code
+ return to_json_string( error_dict )
+
+
+# TODO: rename as expose_api and make default.
+def _future_expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require a set user.
+ """
+ return _future_expose_api( func, to_json=to_json, user_required=False )
+
+
+# TODO: rename as expose_api and make default.
+def _future_expose_api( func, to_json=True, user_required=True ):
+ """
+ Expose this function via the API.
+ """
+ @wraps(func)
+ def decorator( self, trans, *args, **kwargs ):
+ if trans.error_message:
+ # TODO: Document this branch, when can this happen,
+ # I don't understand it.
+ return __api_error_response( trans, err_msg=trans.error_message )
+ if user_required and trans.anonymous:
+ error_code = error_codes.USER_NO_API_KEY
+ # Use error codes default error message.
+ return __api_error_response( trans, err_code=error_code, status_code=403 )
+ if trans.request.body:
+ try:
+ kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
+ except ValueError:
+ error_code = error_codes.USER_INVALID_JSON
+ return __api_error_response( trans, status_code=400, err_code=error_code )
+
+ trans.response.set_content_type( API_RESPONSE_CONTENT_TYPE )
+ # send 'do not cache' headers to handle IE's caching of ajax get responses
+ trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+ # TODO: Refactor next block out into a helper procedure.
+ # Perform api_run_as processing, possibly changing identity
+ if 'payload' in kwargs and 'run_as' in kwargs['payload']:
+ if not trans.user_can_do_run_as():
+ error_code = error_codes.USER_CANNOT_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=403 )
+ try:
+ decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
+ except TypeError:
+ error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
+ try:
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ trans.api_inherit_admin = trans.user_is_admin()
+ trans.set_user(user)
+ except:
+ error_code = error_codes.USER_INVALID_RUN_AS
+ return __api_error_response( trans, err_code=error_code, status_code=400 )
+ try:
+ rval = func( self, trans, *args, **kwargs)
+ if to_json and trans.debug:
+ rval = to_json_string( rval, indent=4, sort_keys=True )
+ elif to_json:
+ rval = to_json_string( rval )
+ return rval
+ except MessageException as e:
+ traceback_string = format_exc()
+ return __api_error_response( trans, exception=e, traceback=traceback_string )
+ except paste.httpexceptions.HTTPException:
+ # TODO: Allow to pass or format for the API???
+ raise # handled
+ except Exception as e:
+ traceback_string = format_exc()
+ error_message = 'Uncaught exception in exposed API method:'
+ log.exception( error_message )
+ return __api_error_response(
+ trans,
+ status_code=500,
+ exception=e,
+ traceback=traceback_string,
+ err_msg=error_message,
+ err_code=error_codes.UNKNOWN
+ )
+ if not hasattr(func, '_orig'):
+ decorator._orig = func
+ decorator.exposed = True
+ return decorator
+
+
def require_admin( func ):
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -9,6 +9,8 @@
from paste.httpexceptions import HTTPBadRequest, HTTPForbidden, HTTPInternalServerError, HTTPException
from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
from galaxy.util import string_as_bool, restore_text
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web.base.controller import BaseAPIController, UsesHistoryMixin, UsesTagsMixin
@@ -20,7 +22,7 @@
class HistoriesController( BaseAPIController, UsesHistoryMixin, UsesTagsMixin ):
- @web.expose_api_anonymous
+ @expose_api_anonymous
def index( self, trans, deleted='False', **kwd ):
"""
index( trans, deleted='False' )
@@ -152,7 +154,7 @@
return history_data
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
"""
create( trans, payload )
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/webapps/galaxy/api/page_revisions.py
--- a/lib/galaxy/webapps/galaxy/api/page_revisions.py
+++ b/lib/galaxy/webapps/galaxy/api/page_revisions.py
@@ -2,8 +2,9 @@
API for updating Galaxy Pages
"""
import logging
-from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.sanitize_html import sanitize_html
@@ -12,7 +13,7 @@
class PageRevisionsController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
- @web.expose_api
+ @expose_api
def index( self, trans, page_id, **kwd ):
"""
index( self, trans, page_id, **kwd )
@@ -24,14 +25,16 @@
:rtype: list
:returns: dictionaries containing different revisions of the page
"""
+ page = self._get_page( trans, page_id )
+ self._verify_page_ownership( trans, page )
+
r = trans.sa_session.query( trans.app.model.PageRevision ).filter_by( page_id=trans.security.decode_id(page_id) )
out = []
for page in r:
- if self.security_check( trans, page, True, True ):
- out.append( self.encode_all_ids( trans, page.to_dict(), True) )
+ out.append( self.encode_all_ids( trans, page.to_dict(), True) )
return out
- @web.expose_api
+ @expose_api
def create( self, trans, page_id, payload, **kwd ):
"""
create( self, trans, page_id, payload **kwd )
@@ -46,39 +49,42 @@
:rtype: dictionary
:returns: Dictionary with 'success' or 'error' element to indicate the result of the request
"""
- error_str = ""
+ content = payload.get("content", None)
+ if not content:
+ raise exceptions.ObjectAttributeMissingException("content undefined or empty")
- if not page_id:
- error_str = "page_id is required"
- elif not payload.get("content", None):
- error_str = "content is required"
+ page = self._get_page( trans, page_id )
+ self._verify_page_ownership( trans, page )
+
+ if 'title' in payload:
+ title = payload['title']
else:
+ title = page.title
- # Create the new stored page
+ content = sanitize_html( content, 'utf-8', 'text/html' )
+
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = title
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+
+ # Persist
+ session = trans.sa_session
+ session.flush()
+
+ return page_revision.to_dict( view="element" )
+
+ def _get_page( self, trans, page_id ):
+ page = None
+ try:
page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id(page_id) )
- if page is None:
- return { "error" : "page not found"}
+ except Exception:
+ pass
+ if not page:
+ raise exceptions.ObjectNotFound()
+ return page
- if not self.security_check( trans, page, True, True ):
- return { "error" : "page not found"}
-
- if 'title' in payload:
- title = payload['title']
- else:
- title = page.title
-
- content = payload.get("content", "")
- content = sanitize_html( content, 'utf-8', 'text/html' )
-
- page_revision = trans.app.model.PageRevision()
- page_revision.title = title
- page_revision.page = page
- page.latest_revision = page_revision
- page_revision.content = content
- # Persist
- session = trans.sa_session
- session.flush()
-
- return { "success" : "revision posted" }
-
- return { "error" : error_str }
+ def _verify_page_ownership( self, trans, page ):
+ if not self.security_check( trans, page, True, True ):
+ raise exceptions.ItemOwnershipException()
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 lib/galaxy/webapps/galaxy/api/pages.py
--- a/lib/galaxy/webapps/galaxy/api/pages.py
+++ b/lib/galaxy/webapps/galaxy/api/pages.py
@@ -2,8 +2,9 @@
API for updating Galaxy Pages
"""
import logging
-from galaxy import web
+from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import SharableItemSecurityMixin, BaseAPIController, SharableMixin
+from galaxy import exceptions
from galaxy.model.item_attrs import UsesAnnotations
from galaxy.util.sanitize_html import sanitize_html
@@ -12,7 +13,7 @@
class PagesController( BaseAPIController, SharableItemSecurityMixin, UsesAnnotations, SharableMixin ):
- @web.expose_api
+ @expose_api
def index( self, trans, deleted=False, **kwd ):
"""
index( self, trans, deleted=False, **kwd )
@@ -47,7 +48,7 @@
return out
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
"""
create( self, trans, payload, **kwd )
@@ -64,45 +65,41 @@
:returns: Dictionary return of the Page.to_dict call
"""
user = trans.get_user()
- error_str = ""
if not payload.get("title", None):
- error_str = "Page name is required"
+ raise exceptions.ObjectAttributeMissingException( "Page name is required" )
elif not payload.get("slug", None):
- error_str = "Page id is required"
+ raise exceptions.ObjectAttributeMissingException( "Page id is required" )
elif not self._is_valid_slug( payload["slug"] ):
- error_str = "Page identifier must consist of only lowercase letters, numbers, and the '-' character"
+ raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" )
elif trans.sa_session.query( trans.app.model.Page ).filter_by( user=user, slug=payload["slug"], deleted=False ).first():
- error_str = "Page id must be unique"
- else:
+ raise exceptions.DuplicatedSlugException( "Page slug must be unique" )
- content = payload.get("content", "")
- content = sanitize_html( content, 'utf-8', 'text/html' )
+ content = payload.get("content", "")
+ content = sanitize_html( content, 'utf-8', 'text/html' )
- # Create the new stored page
- page = trans.app.model.Page()
- page.title = payload['title']
- page.slug = payload['slug']
- page_annotation = sanitize_html( payload.get( "annotation", "" ), 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
- page.user = user
- # And the first (empty) page revision
- page_revision = trans.app.model.PageRevision()
- page_revision.title = payload['title']
- page_revision.page = page
- page.latest_revision = page_revision
- page_revision.content = content
- # Persist
- session = trans.sa_session
- session.add( page )
- session.flush()
+ # Create the new stored page
+ page = trans.app.model.Page()
+ page.title = payload['title']
+ page.slug = payload['slug']
+ page_annotation = sanitize_html( payload.get( "annotation", "" ), 'utf-8', 'text/html' )
+ self.add_item_annotation( trans.sa_session, trans.get_user(), page, page_annotation )
+ page.user = user
+ # And the first (empty) page revision
+ page_revision = trans.app.model.PageRevision()
+ page_revision.title = payload['title']
+ page_revision.page = page
+ page.latest_revision = page_revision
+ page_revision.content = content
+ # Persist
+ session = trans.sa_session
+ session.add( page )
+ session.flush()
- rval = self.encode_all_ids( trans, page.to_dict(), True )
- return rval
+ rval = self.encode_all_ids( trans, page.to_dict(), True )
+ return rval
- return { "error" : error_str }
-
- @web.expose_api
+ @expose_api
def delete( self, trans, id, **kwd ):
"""
delete( self, trans, id, **kwd )
@@ -114,22 +111,14 @@
:rtype: dict
:returns: Dictionary with 'success' or 'error' element to indicate the result of the request
"""
- page_id = id
- try:
- page = trans.sa_session.query(self.app.model.Page).get(trans.security.decode_id(page_id))
- except Exception, e:
- return { "error" : "Page with ID='%s' can not be found\n Exception: %s" % (page_id, str( e )) }
+ page = self._get_page( trans, id )
- # check to see if user has permissions to selected workflow
- if page.user != trans.user and not trans.user_is_admin():
- return { "error" : "Workflow is not owned by or shared with current user" }
-
- #Mark a workflow as deleted
+ #Mark a page as deleted
page.deleted = True
trans.sa_session.flush()
- return { "success" : "Deleted", "id" : page_id }
+ return '' # TODO: Figure out what to return on DELETE, document in guidelines!
- @web.expose_api
+ @expose_api
def show( self, trans, id, **kwd ):
"""
show( self, trans, id, **kwd )
@@ -141,8 +130,22 @@
:rtype: dict
:returns: Dictionary return of the Page.to_dict call with the 'content' field populated by the most recent revision
"""
- page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ page = self._get_page( trans, id )
self.security_check( trans, page, check_ownership=False, check_accessible=True)
rval = self.encode_all_ids( trans, page.to_dict(), True )
rval['content'] = page.latest_revision.content
return rval
+
+ def _get_page( self, trans, id ): # Fetches page object and verifies security.
+ try:
+ page = trans.sa_session.query( trans.app.model.Page ).get( trans.security.decode_id( id ) )
+ except Exception:
+ page = None
+
+ if not page:
+ raise exceptions.ObjectNotFound()
+
+ if page.user != trans.user and not trans.user_is_admin():
+ raise exceptions.ItemOwnershipException()
+
+ return page
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 test/base/api.py
--- a/test/base/api.py
+++ b/test/base/api.py
@@ -60,6 +60,12 @@
for key in keys:
assert key in response, "Response [%s] does not contain key [%s]" % ( response, key )
+ def _assert_error_code_is( self, response, error_code ):
+ if hasattr( response, "json" ):
+ response = response.json()
+ self._assert_has_keys( response, "err_code" )
+ self.assertEquals( response[ "err_code" ], int( error_code ) )
+
def _random_key( self ): # Used for invalid request testing...
return "1234567890123456"
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 test/functional/api/test_page_revisions.py
--- /dev/null
+++ b/test/functional/api/test_page_revisions.py
@@ -0,0 +1,35 @@
+from galaxy.exceptions import error_codes
+from functional.api.pages import BasePageApiTestCase
+
+
+class PageRevisionsApiTestCase( BasePageApiTestCase ):
+
+ def test_create( self ):
+ page_json = self._create_valid_page_with_slug( "pr1" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+ self._assert_status_code_is( page_revision_response, 200 )
+ page_revision_json = page_revision_response.json()
+ self._assert_has_keys( page_revision_json, 'id', 'content' )
+
+ def test_403_if_create_revision_on_unowned_page( self ):
+ page_json = self._create_valid_page_as( "pr2(a)bx.psu.edu", "pr2" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ page_revision_response = self._post( "pages/%s/revisions" % page_json[ 'id' ], data=revision_data )
+ self._assert_status_code_is( page_revision_response, 403 )
+
+ def test_revision_index( self ):
+ page_json = self._create_valid_page_with_slug( "pr3" )
+ revision_data = dict( content="<p>NewContent!</p>" )
+ revisions_url = "pages/%s/revisions" % page_json[ 'id' ]
+ self._post( revisions_url, data=revision_data )
+ revisions_response = self._get( revisions_url )
+ self._assert_status_code_is( revisions_response, 200 )
+ revisions_json = revisions_response.json()
+ assert len( revisions_json ) == 2 # Original revision and new one
+
+ def test_404_if_index_unknown_page( self ):
+ revisions_url = "pages/%s/revisions" % self._random_key()
+ revisions_response = self._get( revisions_url )
+ self._assert_status_code_is( revisions_response, 404 )
+ self._assert_error_code_is( revisions_response, error_codes.USER_OBJECT_NOT_FOUND )
diff -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 -r fa698b544051c61a6b895f04840309a7807ce5a7 test/functional/api/test_pages.py
--- /dev/null
+++ b/test/functional/api/test_pages.py
@@ -0,0 +1,105 @@
+from galaxy.exceptions import error_codes
+from base import api
+from base.interactor import delete_request
+
+from operator import itemgetter
+
+
+class BasePageApiTestCase( api.ApiTestCase ):
+
+ def _create_valid_page_with_slug( self, slug ):
+ page_request = self._test_page_payload( slug=slug )
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 200 )
+ return page_response.json()
+
+ def _create_valid_page_as( self, other_email, slug ):
+ run_as_user = self._setup_user( other_email )
+ page_request = self._test_page_payload( slug=slug )
+ page_request[ "run_as" ] = run_as_user[ "id" ]
+ page_response = self._post( "pages", page_request, admin=True )
+ self._assert_status_code_is( page_response, 200 )
+ return page_response.json()
+
+ def _test_page_payload( self, **kwds ):
+ request = dict(
+ slug="mypage",
+ title="MY PAGE",
+ content="<p>Page!</p>",
+ )
+ request.update( **kwds )
+ return request
+
+
+class PageApiTestCase( BasePageApiTestCase ):
+
+ def test_create( self ):
+ response_json = self._create_valid_page_with_slug( "mypage" )
+ self._assert_has_keys( response_json, "slug", "title", "id" )
+
+ def test_index( self ):
+ create_response_json = self._create_valid_page_with_slug( "indexpage" )
+ assert self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+ def test_index_doesnt_show_unavailable_pages( self ):
+ create_response_json = self._create_valid_page_as( "others_page_index(a)bx.psu.edu", "otherspageindex" )
+ assert not self._users_index_has_page_with_id( create_response_json[ "id" ] )
+
+ def test_cannot_create_pages_with_same_slug( self ):
+ page_request = self._test_page_payload( slug="mypage1" )
+ page_response_1 = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response_1, 200 )
+ page_response_2 = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response_2, 400 )
+ self._assert_error_code_is( page_response_2, error_codes.USER_SLUG_DUPLICATE )
+
+ def test_page_requires_name( self ):
+ page_request = self._test_page_payload()
+ del page_request[ 'title' ]
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 400 )
+ self._assert_error_code_is( page_response, error_codes.USER_OBJECT_ATTRIBUTE_MISSING )
+
+ def test_page_requires_slug( self ):
+ page_request = self._test_page_payload()
+ del page_request[ 'slug' ]
+ page_response = self._post( "pages", page_request )
+ self._assert_status_code_is( page_response, 400 )
+
+ def test_delete( self ):
+ response_json = self._create_valid_page_with_slug( "testdelete" )
+ delete_response = delete_request( self._api_url( "pages/%s" % response_json[ 'id' ], use_key=True ) )
+ self._assert_status_code_is( delete_response, 200 )
+
+ def test_404_on_delete_unknown_page( self ):
+ delete_response = delete_request( self._api_url( "pages/%s" % self._random_key(), use_key=True ) )
+ self._assert_status_code_is( delete_response, 404 )
+ self._assert_error_code_is( delete_response, error_codes.USER_OBJECT_NOT_FOUND )
+
+ def test_403_on_delete_unowned_page( self ):
+ page_response = self._create_valid_page_as( "others_page(a)bx.psu.edu", "otherspage" )
+ delete_response = delete_request( self._api_url( "pages/%s" % page_response[ "id" ], use_key=True ) )
+ self._assert_status_code_is( delete_response, 403 )
+ self._assert_error_code_is( delete_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+ def test_show( self ):
+ response_json = self._create_valid_page_with_slug( "pagetoshow" )
+ show_response = self._get( "pages/%s" % response_json['id'] )
+ self._assert_status_code_is( show_response, 200 )
+ show_json = show_response.json()
+ self._assert_has_keys( show_json, "slug", "title", "id" )
+ self.assertEquals( show_json["slug"], "pagetoshow" )
+ self.assertEquals( show_json["title"], "MY PAGE" )
+ self.assertEquals( show_json["content"], "<p>Page!</p>" )
+
+ def test_403_on_unowner_show( self ):
+ response_json = self._create_valid_page_as( "others_page_show(a)bx.psu.edu", "otherspageshow" )
+ show_response = self._get( "pages/%s" % response_json['id'] )
+ self._assert_status_code_is( show_response, 403 )
+ self._assert_error_code_is( show_response, error_codes.USER_DOES_NOT_OWN_ITEM )
+
+ def _users_index_has_page_with_id( self, id ):
+ index_response = self._get( "pages" )
+ self._assert_status_code_is( index_response, 200 )
+ pages = index_response.json()
+ return id in map( itemgetter( "id" ), pages )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix messaging broken in my last commit - thanks to Nicola Soranzo!
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/89dc1fd43e7b/
Changeset: 89dc1fd43e7b
User: greg
Date: 2014-01-15 02:05:28
Summary: Fix messaging broken in my last commit - thanks to Nicola Soranzo!
Affected #: 1 file
diff -r 55a2d1cced87aa2401a99ff3bc33d759f9b6e82c -r 89dc1fd43e7ba1156580e59bb310d09d95ccdb94 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -2002,8 +2002,8 @@
','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
else:
- message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. '
- message += 'You may need to enable cookies in your browser. ' % galaxy_url
+ message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. ' % galaxy_url
+ message += 'You may need to enable cookies in your browser. '
status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_valid_categories',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/77e5cbd3bcfe/
Changeset: 77e5cbd3bcfe
User: jmchilton
Date: 2014-01-14 23:02:11
Summary: Fix R env handling in fabric_util broken with 864f8f4.
Thanks BjoernGruening.
Affected #: 1 file
diff -r 027eb5827babc5a6b07b6c3e098bacd09185adf4 -r 77e5cbd3bcfebf50ce463c0b320a51e288f00a1e lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -409,7 +409,7 @@
with lcd( current_dir ):
with settings( warn_only=True ):
for tarball_name in tarball_names:
- cmd = '''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS; &&
+ cmd = '''PATH=$PATH:$R_HOME/bin; export PATH; R_LIBS=$INSTALL_DIR; export R_LIBS;
Rscript -e "install.packages(c('%s'),lib='$INSTALL_DIR', repos=NULL, dependencies=FALSE)"''' % ( str( tarball_name ) )
cmd = install_environment.build_command( td_common_util.evaluate_template( cmd, install_dir ) )
return_code = handle_command( app, tool_dependency, install_dir, cmd )
https://bitbucket.org/galaxy/galaxy-central/commits/55a2d1cced87/
Changeset: 55a2d1cced87
User: jmchilton
Date: 2014-01-14 23:05:01
Summary: Merge latest.
Affected #: 1 file
diff -r 77e5cbd3bcfebf50ce463c0b320a51e288f00a1e -r 55a2d1cced87aa2401a99ff3bc33d759f9b6e82c lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -497,9 +497,10 @@
suc.repository_was_previously_installed( trans, tool_shed_url, name, tmp_repo_info_tuple )
if installed_repository:
current_changeset_revision = str( installed_repository.changeset_revision )
- message = 'Revision %s of repository %s owned by %s has already been installed.'
+ message = 'Revision <b>%s</b> of repository <b>%s</b> owned by <b>%s</b> has already been installed.' % \
+ ( latest_downloadable_revision, name, owner )
if current_changeset_revision != latest_downloadable_revision:
- message += ' The current changeset revision is %s.' % current_changeset_revision
+ message += ' The current changeset revision is <b>%s</b>.' % current_changeset_revision
status = 'error'
else:
# Install the latest downloadable revision of the repository.
@@ -508,10 +509,10 @@
( name, owner, latest_downloadable_revision, web.url_for( '/', qualified=True ) ) )
return trans.response.send_redirect( url )
else:
- message = 'Cannot locate installed tool shed repository with encoded id %s.' % str( repository_id )
+ message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id )
status = 'error'
else:
- message = 'The request parameters did not include the required encoded id of installed repository.'
+ message = 'The request parameters did not include the required encoded <b>id</b> of installed repository.'
status = 'error'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Improved messaging when upgrading an installed repository in Galaxy.
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9165ed69a678/
Changeset: 9165ed69a678
User: greg
Date: 2014-01-14 22:54:09
Summary: Improved messaging when upgrading an installed repository in Galaxy.
Affected #: 1 file
diff -r 027eb5827babc5a6b07b6c3e098bacd09185adf4 -r 9165ed69a678010aff37fdc93f3a25661aa44e79 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -497,9 +497,10 @@
suc.repository_was_previously_installed( trans, tool_shed_url, name, tmp_repo_info_tuple )
if installed_repository:
current_changeset_revision = str( installed_repository.changeset_revision )
- message = 'Revision %s of repository %s owned by %s has already been installed.'
+ message = 'Revision <b>%s</b> of repository <b>%s</b> owned by <b>%s</b> has already been installed.' % \
+ ( latest_downloadable_revision, name, owner )
if current_changeset_revision != latest_downloadable_revision:
- message += ' The current changeset revision is %s.' % current_changeset_revision
+ message += ' The current changeset revision is <b>%s</b>.' % current_changeset_revision
status = 'error'
else:
# Install the latest downloadable revision of the repository.
@@ -508,10 +509,10 @@
( name, owner, latest_downloadable_revision, web.url_for( '/', qualified=True ) ) )
return trans.response.send_redirect( url )
else:
- message = 'Cannot locate installed tool shed repository with encoded id %s.' % str( repository_id )
+ message = 'Cannot locate installed tool shed repository with encoded id <b>%s</b>.' % str( repository_id )
status = 'error'
else:
- message = 'The request parameters did not include the required encoded id of installed repository.'
+ message = 'The request parameters did not include the required encoded <b>id</b> of installed repository.'
status = 'error'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/6cba7c5f3635/
Changeset: 6cba7c5f3635
User: carlfeberhard
Date: 2014-01-14 22:41:20
Summary: Histories API: apply None rule from 5c5e543 to history update
Affected #: 1 file
diff -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 -r 6cba7c5f36358f7e71168cff6118e15642d01cef lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -344,6 +344,8 @@
)
validated_payload = {}
for key, val in payload.items():
+ if val is None:
+ continue
if key in ( 'name', 'genome_build', 'annotation' ):
validated_payload[ key ] = self.validate_and_sanitize_basestring( key, val )
if key in ( 'deleted', 'published' ):
https://bitbucket.org/galaxy/galaxy-central/commits/027eb5827bab/
Changeset: 027eb5827bab
User: carlfeberhard
Date: 2014-01-14 22:41:37
Summary: merge
Affected #: 1 file
diff -r 6cba7c5f36358f7e71168cff6118e15642d01cef -r 027eb5827babc5a6b07b6c3e098bacd09185adf4 templates/webapps/galaxy/tool_form.mako
--- a/templates/webapps/galaxy/tool_form.mako
+++ b/templates/webapps/galaxy/tool_form.mako
@@ -268,12 +268,12 @@
<%def name="row_for_rerun()">
%if trans.app.config.track_jobs_in_database and tool_state.rerun_remap_job_id is not None:
- <div class="form-row">
+ <div id="remap-row" class="form-row"><input type="checkbox" name="rerun_remap_job_id" value="${tool_state.rerun_remap_job_id}"> Resume dependencies from this job
<div class="toolParamHelp" style="clear: both;">
The previous run of this tool failed and other tools were waiting for it to finish successfully, use this option to resume those tools using the outputs of this tool run.
</div>
- <div>
+ </div><div style="clear: both;"></div>
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Close remap div in tool_form.
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9eb10fd69c98/
Changeset: 9eb10fd69c98
User: jmchilton
Date: 2014-01-14 22:40:27
Summary: Close remap div in tool_form.
Fixes up the GUI slightly and is needed for me to selectively disable feature downstream.
Affected #: 1 file
diff -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 -r 9eb10fd69c986fcd01a4ca0393e28ac18b1c3518 templates/webapps/galaxy/tool_form.mako
--- a/templates/webapps/galaxy/tool_form.mako
+++ b/templates/webapps/galaxy/tool_form.mako
@@ -268,12 +268,12 @@
<%def name="row_for_rerun()">
%if trans.app.config.track_jobs_in_database and tool_state.rerun_remap_job_id is not None:
- <div class="form-row">
+ <div id="remap-row" class="form-row"><input type="checkbox" name="rerun_remap_job_id" value="${tool_state.rerun_remap_job_id}"> Resume dependencies from this job
<div class="toolParamHelp" style="clear: both;">
The previous run of this tool failed and other tools were waiting for it to finish successfully, use this option to resume those tools using the outputs of this tool run.
</div>
- <div>
+ </div><div style="clear: both;"></div>
%endif
</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add the ability to install the latest installable revision of a repository that is already installed into Galaxy via the installed repository's pop-up menu on the Manage installed Tool Shed repositories grid in Galaxy. This is a new grid operation that is available only for those repositories that have a revision upgrade available in the Tool Shed.
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/be5f0c5df96f/
Changeset: be5f0c5df96f
User: greg
Date: 2014-01-14 22:37:56
Summary: Add the ability to install the latest installable revision of a repository that is already installed into Galaxy via the installed repository's pop-up menu on the Manage installed Tool Shed repositories grid in Galaxy. This is a new grid operation that is available only for those repositories that have a revision upgrade available in the Tool Shed.
Affected #: 5 files
diff -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -106,10 +106,11 @@
if operation == "activate or reinstall":
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if repository.uninstalled:
- # Since we're reinstalling the repository we need to find the latest changeset revision to which it can be updated so that we
- # can reset the metadata if necessary. This will ensure that information about repository dependencies and tool dependencies
- # will be current. Only allow selecting a different section in the tool panel if the repository was uninstalled and it contained
- # tools that should be displayed in the tool panel.
+ # Since we're reinstalling the repository we need to find the latest changeset revision to which it can
+ # be updated so that we can reset the metadata if necessary. This will ensure that information about
+ # repository dependencies and tool dependencies will be current. Only allow selecting a different section
+ # in the tool panel if the repository was uninstalled and it contained tools that should be displayed in
+ # the tool panel.
changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, repository )
current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
@@ -121,11 +122,12 @@
**kwd ) )
else:
# The uninstalled repository has updates available in the tool shed.
- updated_repo_info_dict = self.get_updated_repository_information( trans=trans,
- repository_id=trans.security.encode_id( repository.id ),
- repository_name=repository.name,
- repository_owner=repository.owner,
- changeset_revision=current_changeset_revision )
+ updated_repo_info_dict = \
+ self.get_updated_repository_information( trans=trans,
+ repository_id=trans.security.encode_id( repository.id ),
+ repository_name=repository.name,
+ repository_owner=repository.owner,
+ changeset_revision=current_changeset_revision )
json_repo_info_dict = json.to_json_string( updated_repo_info_dict )
encoded_repo_info_dict = encoding_util.tool_shed_encode( json_repo_info_dict )
kwd[ 'latest_changeset_revision' ] = current_changeset_revision
@@ -150,6 +152,10 @@
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='deactivate_or_uninstall_repository',
**kwd ) )
+ if operation == "install latest revision":
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='install_latest_repository_revision',
+ **kwd ) )
return self.installed_repository_grid( trans, **kwd )
@web.expose
@@ -460,6 +466,61 @@
@web.expose
@web.require_admin
+ def install_latest_repository_revision( self, trans, **kwd ):
+ """Install the latest installable revision of a repository that has been previously installed."""
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ repository_id = kwd.get( 'id', None )
+ if repository_id is not None:
+ repository = suc.get_installed_tool_shed_repository( trans, repository_id )
+ if repository is not None:
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
+ name = str( repository.name )
+ owner = str( repository.owner )
+ url = suc.url_join( tool_shed_url,
+ 'repository/get_latest_downloadable_changeset_revision?galaxy_url=%s&name=%s&owner=%s' % \
+ ( web.url_for( '/', qualified=True ), name, owner ) )
+ raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ latest_downloadable_revision = json.from_json_string( raw_text )
+ if latest_downloadable_revision == suc.INITIAL_CHANGELOG_HASH:
+ message = 'Error retrieving the latest downloadable revision for this repository via the url <b>%s</b>.' % url
+ status = 'error'
+ else:
+ # Make sure the latest changeset_revision of the repository has not already been installed.
+ # Updates to installed repository revisions may have occurred, so make sure to locate the
+ # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple
+ # with the following entries to handle this.
+ # ( description, clone_url, changeset_revision, ctx_rev, owner, repository_dependencies, tool_dependencies )
+ tmp_clone_url = suc.url_join( tool_shed_url, 'repos', owner, name )
+ tmp_repo_info_tuple = ( None, tmp_clone_url, latest_downloadable_revision, None, owner, None, None )
+ installed_repository, installed_changeset_revision = \
+ suc.repository_was_previously_installed( trans, tool_shed_url, name, tmp_repo_info_tuple )
+ if installed_repository:
+ current_changeset_revision = str( installed_repository.changeset_revision )
+ message = 'Revision %s of repository %s owned by %s has already been installed.'
+ if current_changeset_revision != latest_downloadable_revision:
+ message += ' The current changeset revision is %s.' % current_changeset_revision
+ status = 'error'
+ else:
+ # Install the latest downloadable revision of the repository.
+ url = suc.url_join( tool_shed_url,
+ 'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
+ ( name, owner, latest_downloadable_revision, web.url_for( '/', qualified=True ) ) )
+ return trans.response.send_redirect( url )
+ else:
+ message = 'Cannot locate installed tool shed repository with encoded id %s.' % str( repository_id )
+ status = 'error'
+ else:
+ message = 'The request parameters did not include the required encoded id of installed repository.'
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
+
+
+ @web.expose
+ @web.require_admin
def install_tool_dependencies( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
@@ -600,12 +661,17 @@
def manage_repository( self, trans, **kwd ):
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
- repository_id = kwd[ 'id' ]
+ repository_id = kwd.get( 'id', None )
+ if repository_id is None:
+ return trans.show_error_message( 'Missing required encoded repository id.' )
operation = kwd.get( 'operation', None )
repository = suc.get_installed_tool_shed_repository( trans, repository_id )
- if not repository:
+ if repository is None:
return trans.show_error_message( 'Invalid repository specified.' )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, repository.tool_shed )
+ name = str( repository.name )
+ owner = str( repository.owner )
+ installed_changeset_revision = str( repository.installed_changeset_revision )
if repository.status in [ trans.install_model.ToolShedRepository.installation_status.CLONING ]:
tool_shed_repository_ids = [ repository_id ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
@@ -615,12 +681,12 @@
# Send a request to the tool shed to install the repository.
url = suc.url_join( tool_shed_url,
'repository/install_repositories_by_revision?name=%s&owner=%s&changeset_revisions=%s&galaxy_url=%s' % \
- ( repository.name, repository.owner, repository.installed_changeset_revision, ( web.url_for( '/', qualified=True ) ) ) )
+ ( name, owner, installed_changeset_revision, ( web.url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = kwd.get( 'description', repository.description )
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
+ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
else:
repo_files_dir = None
if repository.in_error_state:
@@ -841,7 +907,7 @@
# Get the information necessary to install each repository.
url = suc.url_join( tool_shed_url,
'repository/get_repository_information?repository_ids=%s&changeset_revisions=%s' % \
- ( repository_ids, changeset_revisions ) )
+ ( str( repository_ids ), str( changeset_revisions ) ) )
raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
repo_information_dict = json.from_json_string( raw_text )
for encoded_repo_info_dict in repo_information_dict.get( 'repo_info_dicts', [] ):
diff -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1581,28 +1581,52 @@
items=functional_test_results )
@web.json
+ def get_latest_downloadable_changeset_revision( self, trans, **kwd ):
+ """
+ Return the latest installable changeset revision for the repository associated with the received
+ name and owner. This method is called from Galaxy when attempting to install the latest revision
+ of an installed repository.
+ """
+ repository_name = kwd.get( 'name', None )
+ repository_owner = kwd.get( 'owner', None )
+ if repository_name is not None and repository_owner is not None:
+ repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+ if repository:
+ repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+ return suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
+ return suc.INITIAL_CHANGELOG_HASH
+
+ @web.json
def get_readme_files( self, trans, **kwd ):
"""
- This method is called when installing or re-installing a single repository into a Galaxy instance. If the received changeset_revision
- includes one or more readme files, return them in a dictionary.
+ This method is called when installing or re-installing a single repository into a Galaxy instance.
+ If the received changeset_revision includes one or more readme files, return them in a dictionary.
"""
- repository_name = kwd[ 'name' ]
- repository_owner = kwd[ 'owner' ]
- changeset_revision = kwd[ 'changeset_revision' ]
- repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
- if repository:
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- return readme_util.build_readme_files_dict( trans, repository, changeset_revision, repository_metadata.metadata )
+ repository_name = kwd.get( 'name', None )
+ repository_owner = kwd.get( 'owner', None )
+ changeset_revision = kwd.get( 'changeset_revision', None )
+ if repository_name is not None and repository_owner is not None and changeset_revision is not None:
+ repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
+ if repository:
+ repository_metadata = \
+ suc.get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ return readme_util.build_readme_files_dict( trans,
+ repository,
+ changeset_revision,
+ repository_metadata.metadata )
return {}
@web.json
def get_repository_dependencies( self, trans, **kwd ):
- """Return an encoded dictionary of all repositories upon which the contents of the received repository depends."""
+ """
+ Return an encoded dictionary of all repositories upon which the contents of the received repository
+ depends.
+ """
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
changeset_revision = kwd.get( 'changeset_revision', None )
@@ -1630,8 +1654,8 @@
@web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
"""
- Generate a list of dictionaries, each of which contains the information about a repository that will be necessary for installing it into
- a local Galaxy instance.
+ Generate a list of dictionaries, each of which contains the information about a repository that will
+ be necessary for installing it into a local Galaxy instance.
"""
includes_tools = False
includes_tools_for_display_in_tool_panel = False
@@ -1665,8 +1689,8 @@
@web.json
def get_required_repo_info_dict( self, trans, encoded_str=None ):
"""
- Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
- repositories defined by the received encoded_str.
+ Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the
+ information needed to install the list of repositories defined by the received encoded_str.
"""
repo_info_dict = {}
if encoded_str:
@@ -1689,7 +1713,10 @@
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
- """Handle a request from a Galaxy instance to get the tool_dependencies entry from the metadata for a specified changeset revision."""
+ """
+ Handle a request from a Galaxy instance to get the tool_dependencies entry from the metadata
+ for a specified changeset revision.
+ """
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
changeset_revision = kwd.get( 'changeset_revision', None )
@@ -1705,7 +1732,10 @@
@web.expose
def get_tool_dependencies_config_contents( self, trans, **kwd ):
- """Handle a request from a Galaxy instance to get the tool_dependencies.xml file contents for a specified changeset revision."""
+ """
+ Handle a request from a Galaxy instance to get the tool_dependencies.xml file contents for a
+ specified changeset revision.
+ """
name = kwd.get( 'name', None )
owner = kwd.get( 'owner', None )
changeset_revision = kwd.get( 'changeset_revision', None )
@@ -1726,8 +1756,8 @@
@web.expose
def get_tool_versions( self, trans, **kwd ):
"""
- For each valid /downloadable change set (up to the received changeset_revision) in the repository's change log, append the change
- set's tool_versions dictionary to the list that will be returned.
+ For each valid /downloadable change set (up to the received changeset_revision) in the repository's
+ change log, append the changeset tool_versions dictionary to the list that will be returned.
"""
name = kwd[ 'name' ]
owner = kwd[ 'owner' ]
@@ -1751,7 +1781,10 @@
@web.json
def get_updated_repository_information( self, trans, name, owner, changeset_revision, **kwd ):
- """Generate a dictionary that contains the information about a repository that is necessary for installing it into a local Galaxy instance."""
+ """
+ Generate a dictionary that contains the information about a repository that is necessary for installing
+ it into a local Galaxy instance.
+ """
repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
repository_id = trans.security.encode_id( repository.id )
repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
@@ -1828,7 +1861,10 @@
repo_info_dict=repo_info_dict )
def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
- """Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
+ """
+ Return the tool lineage in descendant order for the received guid contained in the received
+ repsitory_metadata.tool_versions.
+ """
encoded_id = trans.security.encode_id( repository.id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
@@ -1846,7 +1882,9 @@
current_child_guid = parent_guid
# Get all descendant guids of the received guid.
current_parent_guid = guid
- for changeset in suc.reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip( trans.app ) ):
+ for changeset in suc.reversed_lower_upper_bounded_changelog( repo,
+ repository_metadata.changeset_revision,
+ repository.tip( trans.app ) ):
ctx = repo.changectx( changeset )
rm = suc.get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
if rm:
@@ -1943,8 +1981,9 @@
@web.expose
def install_repositories_by_revision( self, trans, **kwd ):
"""
- Send the list of repository_ids and changeset_revisions to Galaxy so it can begin the installation process. If the value of
- repository_ids is not received, then the name and owner of a single repository must be received to install a single repository.
+ Send the list of repository_ids and changeset_revisions to Galaxy so it can begin the installation
+ process. If the value of repository_ids is not received, then the name and owner of a single repository
+ must be received to install a single repository.
"""
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
@@ -1958,10 +1997,13 @@
# Redirect back to local Galaxy to perform install.
url = suc.url_join( galaxy_url,
'admin_toolshed/prepare_for_install?tool_shed_url=%s&repository_ids=%s&changeset_revisions=%s' % \
- ( web.url_for( '/', qualified=True ), ','.join( util.listify( repository_ids ) ), ','.join( util.listify( changeset_revisions ) ) ) )
+ ( web.url_for( '/', qualified=True ),
+ ','.join( util.listify( repository_ids ) ),
+ ','.join( util.listify( changeset_revisions ) ) ) )
return trans.response.send_redirect( url )
else:
- message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. You may need to enable cookies in your browser. ' % galaxy_url
+ message = 'Repository installation is not possible due to an invalid Galaxy URL: <b>%s</b>. '
+ message += 'You may need to enable cookies in your browser. ' % galaxy_url
status = 'error'
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_valid_categories',
diff -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
--- a/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
+++ b/lib/tool_shed/galaxy_install/grids/admin_toolshed_grids.py
@@ -131,73 +131,99 @@
args = { self.key: val }
accepted_filters.append( grids.GridColumnFilter( label, args) )
return accepted_filters
+
# Grid definition
title = "Installed tool shed repositories"
model_class = tool_shed_install.ToolShedRepository
template='/admin/tool_shed_repository/grid.mako'
default_sort_key = "name"
columns = [
- ToolShedStatusColumn( "",
- attach_popup=False ),
- NameColumn( "Name",
+ ToolShedStatusColumn( label="" ),
+ NameColumn( label="Name",
key="name",
link=( lambda item: iff( item.status in [ tool_shed_install.ToolShedRepository.installation_status.CLONING ],
None,
dict( operation="manage_repository", id=item.id ) ) ),
attach_popup=True ),
- DescriptionColumn( "Description" ),
- OwnerColumn( "Owner" ),
- RevisionColumn( "Revision" ),
- StatusColumn( "Installation Status",
+ DescriptionColumn( label="Description" ),
+ OwnerColumn( label="Owner" ),
+ RevisionColumn( label="Revision" ),
+ StatusColumn( label="Installation Status",
filterable="advanced" ),
- ToolShedColumn( "Tool shed" ),
+ ToolShedColumn( label="Tool shed" ),
# Columns that are valid for filtering but are not visible.
- DeletedColumn( "Status",
+ DeletedColumn( label="Status",
key="deleted",
visible=False,
filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
- cols_to_filter=[ columns[0] ],
+ cols_to_filter=[ columns[ 1 ] ],
key="free-text-search",
visible=False,
filterable="standard" ) )
global_actions = [
- grids.GridAction( "Update tool shed status",
- dict( controller='admin_toolshed', action='update_tool_shed_status_for_installed_repository', all_installed_repositories=True ) )
+ grids.GridAction( label="Update tool shed status",
+ url_args=dict( controller='admin_toolshed',
+ action='update_tool_shed_status_for_installed_repository',
+ all_installed_repositories=True ),
+ inbound=False )
]
- operations = [ grids.GridOperation( "Update tool shed status",
+ operations = [ grids.GridOperation( label="Update tool shed status",
+ condition=( lambda item: not item.deleted ),
allow_multiple=False,
- condition=( lambda item: not item.deleted ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='update tool shed status' ) ),
- grids.GridOperation( "Get updates",
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='update tool shed status' ) ),
+ grids.GridOperation( label="Get updates",
+ condition=( lambda item: \
+ not item.deleted and \
+ item.revision_update_available and \
+ item.status not in \
+ [ tool_shed_install.ToolShedRepository.installation_status.ERROR,
+ tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
allow_multiple=False,
- condition=( lambda item: not item.deleted and item.revision_update_available and item.status not in \
- [ tool_shed_install.ToolShedRepository.installation_status.ERROR, tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='get updates' ) ),
- grids.GridOperation( "Install",
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='get updates' ) ),
+ grids.GridOperation( label="Install latest revision",
+ condition=( lambda item: item.upgrade_available ),
allow_multiple=False,
- condition=( lambda item: not item.deleted and item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='manage_repository', operation='install' ) ),
- grids.GridOperation( "Deactivate or uninstall",
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='install latest revision' ) ),
+ grids.GridOperation( label="Install",
+ condition=( lambda item: \
+ not item.deleted and \
+ item.status == tool_shed_install.ToolShedRepository.installation_status.NEW ),
allow_multiple=False,
- condition=( lambda item: not item.deleted and item.status not in \
- [ tool_shed_install.ToolShedRepository.installation_status.ERROR, tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='deactivate or uninstall' ) ),
- grids.GridOperation( "Reset to install",
+ url_args=dict( controller='admin_toolshed',
+ action='manage_repository',
+ operation='install' ) ),
+ grids.GridOperation( label="Deactivate or uninstall",
+ condition=( lambda item: \
+ not item.deleted and \
+ item.status not in \
+ [ tool_shed_install.ToolShedRepository.installation_status.ERROR,
+ tool_shed_install.ToolShedRepository.installation_status.NEW ] ),
allow_multiple=False,
- condition=( lambda item: ( item.status == tool_shed_install.ToolShedRepository.installation_status.ERROR ) ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='reset to install' ) ),
- grids.GridOperation( "Activate or reinstall",
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='deactivate or uninstall' ) ),
+ grids.GridOperation( label="Reset to install",
+ condition=( lambda item: \
+ ( item.status == tool_shed_install.ToolShedRepository.installation_status.ERROR ) ),
allow_multiple=False,
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='reset to install' ) ),
+ grids.GridOperation( label="Activate or reinstall",
condition=( lambda item: item.deleted ),
- async_compatible=False,
- url_args=dict( controller='admin_toolshed', action='browse_repositories', operation='activate or reinstall' ) ) ]
+ allow_multiple=False,
+ target=None,
+ url_args=dict( controller='admin_toolshed',
+ action='browse_repositories',
+ operation='activate or reinstall' ) ) ]
standard_filters = []
default_filter = dict( deleted="False" )
num_rows_per_page = 50
diff -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -224,8 +224,9 @@
def get_installed_and_missing_repository_dependencies_for_new_install( trans, repo_info_tuple ):
"""
- Parse the received repository_dependencies dictionary that is associated with a repository being installed into Galaxy for the first time
- and attempt to determine repository dependencies that are already installed and those that are not.
+ Parse the received repository_dependencies dictionary that is associated with a repository being
+ installed into Galaxy for the first time and attempt to determine repository dependencies that are
+ already installed and those that are not.
"""
missing_repository_dependencies = {}
installed_repository_dependencies = {}
@@ -236,20 +237,25 @@
if repository_dependencies:
description = repository_dependencies[ 'description' ]
root_key = repository_dependencies[ 'root_key' ]
- # The repository dependencies container will include only the immediate repository dependencies of this repository, so the container will be
- # only a single level in depth.
+ # The repository dependencies container will include only the immediate repository dependencies of
+ # this repository, so the container will be only a single level in depth.
for key, rd_tups in repository_dependencies.items():
if key in [ 'description', 'root_key' ]:
continue
for rd_tup in rd_tups:
tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
common_util.parse_repository_dependency_tuple( rd_tup )
- # Updates to installed repository revisions may have occurred, so make sure to locate the appropriate repository revision if one exists.
- # We need to create a temporary repo_info_tuple that includes the correct repository owner which we get from the current rd_tup. The current
- # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td )
+ # Updates to installed repository revisions may have occurred, so make sure to locate the
+ # appropriate repository revision if one exists. We need to create a temporary repo_info_tuple
+ # that includes the correct repository owner which we get from the current rd_tup. The current
+ # tuple looks like: ( description, repository_clone_url, changeset_revision, ctx_rev, repository_owner,
+ # repository_dependencies, installed_td )
tmp_clone_url = suc.generate_clone_url_from_repo_info_tup( rd_tup )
tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
- repository, installed_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
+ repository, installed_changeset_revision = suc.repository_was_previously_installed( trans,
+ tool_shed,
+ name,
+ tmp_repo_info_tuple )
if repository:
new_rd_tup = [ tool_shed,
name,
@@ -263,10 +269,11 @@
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
else:
- # A repository dependency that is not installed will not be considered missing if it's value for only_if_compiling_contained_td is
- # True This is because this type of repository dependency will only be considered at the time that the specified tool dependency
- # is being installed, and even then only if the compiled binary of the tool dependency could not be installed due to the unsupported
- # installation environment.
+ # A repository dependency that is not installed will not be considered missing if it's value
+ # for only_if_compiling_contained_td is True This is because this type of repository dependency
+ # will only be considered at the time that the specified tool dependency is being installed, and
+ # even then only if the compiled binary of the tool dependency could not be installed due to the
+ # unsupported installation environment.
if not util.asbool( only_if_compiling_contained_td ):
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
@@ -280,8 +287,8 @@
None,
'Never installed' ]
if not util.asbool( only_if_compiling_contained_td ):
- # A repository dependency that is not installed will not be considered missing if it's value for only_if_compiling_contained_td is
- # True - see above...
+ # A repository dependency that is not installed will not be considered missing if it's value for
+ # only_if_compiling_contained_td is True - see above...
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
if installed_rd_tups:
@@ -295,7 +302,10 @@
return installed_repository_dependencies, missing_repository_dependencies
def get_installed_and_missing_tool_dependencies_for_installing_repository( trans, tool_shed_url, tool_dependencies_dict ):
- """Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy."""
+ """
+ Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories
+ being installed into Galaxy.
+ """
installed_tool_dependencies = {}
missing_tool_dependencies = {}
if tool_dependencies_dict:
diff -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e -r be5f0c5df96fba1a12e3e96fdb6f1805703f0b73 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1622,10 +1622,12 @@
def repository_was_previously_installed( trans, tool_shed_url, repository_name, repo_info_tuple ):
"""
- Find out if a repository is already installed into Galaxy - there are several scenarios where this is necessary. For example, this method
- will handle the case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
- in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one that was
- previously installed. We'll look in the database instead of on disk since the repository may be currently uninstalled.
+ Find out if a repository is already installed into Galaxy - there are several scenarios where this
+ is necessary. For example, this method will handle the case where the repository was previously
+ installed using an older changeset_revsion, but later the repository was updated in the tool shed
+ and now we're trying to install the latest changeset revision of the same repository instead of
+ updating the one that was previously installed. We'll look in the database instead of on disk since
+ the repository may be currently uninstalled.
"""
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
get_repo_info_tuple_contents( repo_info_tuple )
@@ -1638,8 +1640,8 @@
changeset_revision )
if tool_shed_repository:
return tool_shed_repository, changeset_revision
- # Get all previous changeset revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
- # revision to see if it was previously installed using one of them.
+ # Get all previous changeset revisions from the tool shed for the repository back to, but excluding,
+ # the previous valid changeset revision to see if it was previously installed using one of them.
url = url_join( tool_shed_url,
'repository/previous_changeset_revisions?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
( url_for( '/', qualified=True ), repository_name, repository_owner, changeset_revision ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Visualizations Registry: update unit test to remove expected visualizations, small fixes
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8ac00a14c7fd/
Changeset: 8ac00a14c7fd
User: carlfeberhard
Date: 2014-01-14 21:52:01
Summary: Visualizations Registry: update unit test to remove expected visualizations, small fixes
Affected #: 4 files
diff -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e lib/galaxy/visualization/registry.py
--- a/lib/galaxy/visualization/registry.py
+++ b/lib/galaxy/visualization/registry.py
@@ -19,8 +19,6 @@
__TODO__ = """
BUGS:
- anon users clicking a viz link gets 'must be' msg in galaxy_main (w/ masthead)
- should not show visualizations (no icon)?
newick files aren't being sniffed prop? - datatype is txt
have parsers create objects instead of dicts
@@ -29,12 +27,9 @@
some confused vocabulary in docs, var names
tests:
anding, grouping, not
- has_dataprovider
user is admin
data_sources:
lists of
-add description element to visualization.
-
user_pref for ordering/ex/inclusion of particular visualizations
"""
@@ -258,6 +253,7 @@
if test_fn( target_object, test_result ):
#log.debug( '\t test passed' )
return True
+
return False
def get_visualization_url( self, trans, target_object, visualization_name, param_data ):
diff -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e static/scripts/mvc/dataset/hda-edit.js
--- a/static/scripts/mvc/dataset/hda-edit.js
+++ b/static/scripts/mvc/dataset/hda-edit.js
@@ -250,7 +250,7 @@
});
// No need for popup menu because there's a single visualization.
- if( _.keys( visualizations ).length === 1 ) {
+ if( visualizations.length === 1 ) {
var onlyVisualization = visualizations[0];
$icon.attr( 'data-original-title', _l( 'Visualize in ' ) + onlyVisualization.html );
$icon.attr( 'href', onlyVisualization.href );
diff -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e static/scripts/packed/mvc/dataset/hda-edit.js
--- a/static/scripts/packed/mvc/dataset/hda-edit.js
+++ b/static/scripts/packed/mvc/dataset/hda-edit.js
@@ -1,1 +1,1 @@
-define(["mvc/dataset/hda-model","mvc/dataset/hda-base"],function(d,a){var f=a.HDABaseView.extend(LoggableMixin).extend({initialize:function(g){a.HDABaseView.prototype.initialize.call(this,g);this.hasUser=g.hasUser;this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton];this.tagsEditorShown=g.tagsEditorShown||false;this.annotationEditorShown=g.annotationEditorShown||false},_render_titleButtons:function(){return a.HDABaseView.prototype._render_titleButtons.call(this).concat([this._render_editButton(),this._render_deleteButton()])},_render_editButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.DISCARDED)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var i=this.model.get("purged"),g=this.model.get("deleted"),h={title:_l("Edit attributes"),href:this.urls.edit,target:this.linkTarget,classes:"dataset-edit"};if(g||i){h.disabled=true;if(i){h.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(g){h.title=_l("Undelete dataset to edit attributes")}}}else{if(this.model.get("state")===d.HistoryDatasetAssociation.STATES.UPLOAD){h.disabled=true;h.title=_l("This dataset must finish uploading before it can be edited")}}h.faIcon="fa-pencil";return faIconButton(h)},_render_deleteButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var g=this,h={title:_l("Delete"),classes:"dataset-delete",onclick:function(){g.$el.find(".icon-btn.dataset-delete").trigger("mouseout");g.model["delete"]()}};if(this.model.get("deleted")||this.model.get("purged")){h={title:_l("Dataset is already deleted"),disabled:true}}h.faIcon="fa-times";return faIconButton(h)},_render_errButton:function(){if(this.model.get("state")!==d.HistoryDatasetAssociation.STATES.ERROR){return null}return faIconButton({title:_l("View or report this error"),href:this.urls.report_error,classes:"dataset-report-error-btn",target:this.linkTarget,faIcon:"fa-bug"})},_render_rerunButton:function(){return faIconButton({title:_l("Run this job again"),href:this.urls.rerun,classes:"dataset-rerun-btn",target:this.linkTarget,faIcon:"fa-refresh"})},_render_visualizationsButton:function(){var n=this.model.get("visualizations");if((!this.hasUser)||(!this.model.hasData())||(_.isEmpty(n))){return null}if(_.isObject(n[0])){return this._render_visualizationsFrameworkButton(n)}if(!this.urls.visualization){return null}var k=this.model.get("dbkey"),g=this.urls.visualization,j={},h={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(k){h.dbkey=k}var l=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});var m=this;function i(p){switch(p){case"trackster":return b(g,h,k);case"scatterplot":return e(g,h,m.linkTarget);default:return function(){Galaxy.frame.add({title:"Visualization",type:"url",content:g+"/"+p+"?"+$.param(h)})}}}function o(p){return p.charAt(0).toUpperCase()+p.slice(1)}if(n.length===1){l.attr("data-original-title",_l("Visualize in ")+_l(o(n[0])));l.click(i(n[0]))}else{_.each(n,function(p){j[_l(o(p))]=i(p)});make_popupmenu(l,j)}return l},_render_visualizationsFrameworkButton:function(g){if(!(this.model.hasData())||!(g&&!_.isEmpty(g))){return null}var i=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});if(_.keys(g).length===1){var h=g[0];i.attr("data-original-title",_l("Visualize in ")+h.html);i.attr("href",h.href)}else{var j=[];_.each(g,function(k){k.func=function(){if(Galaxy.frame.active){Galaxy.frame.add({title:"Visualization",type:"url",content:k.href});return false}return true};j.push(k);return false});PopupMenu.create(i,j)}return i},_buildNewRender:function(){var g=a.HDABaseView.prototype._buildNewRender.call(this);g.find(".dataset-deleted-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-undelete">here</a> to undelete it or <a href="javascript:void(0);" class="dataset-purge">here</a> to immediately remove it from disk'));g.find(".dataset-hidden-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-unhide">here</a> to unhide it'));return g},_render_body_failed_metadata:function(){var h=$("<a/>").attr({href:this.urls.edit,target:this.linkTarget}).text(_l("set it manually or retry auto-detection")),g=$("<span/>").text(". "+_l("You may be able to")+" ").append(h),i=a.HDABaseView.prototype._render_body_failed_metadata.call(this);i.find(".warningmessagesmall strong").append(g);return i},_render_body_error:function(){var g=a.HDABaseView.prototype._render_body_error.call(this);g.find(".dataset-actions .left").prepend(this._render_errButton());return g},_render_body_ok:function(){var g=a.HDABaseView.prototype._render_body_ok.call(this);if(this.model.isDeletedOrPurged()){return g}this.makeDbkeyEditLink(g);if(this.hasUser){g.find(".dataset-actions .left").append(this._render_visualizationsButton());this._renderTags(g);this._renderAnnotation(g)}return g},_renderTags:function(g){var h=this;this.tagsEditor=new TagsEditor({model:this.model,el:g.find(".tags-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.tagsEditorShown=true},onhide:function(){h.tagsEditorShown=false},$activator:faIconButton({title:_l("Edit dataset tags"),classes:"dataset-tag-btn",faIcon:"fa-tags"}).appendTo(g.find(".dataset-actions .right"))});if(this.tagsEditorShown){this.tagsEditor.toggle(true)}},_renderAnnotation:function(g){var h=this;this.annotationEditor=new AnnotationEditor({model:this.model,el:g.find(".annotation-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.annotationEditorShown=true},onhide:function(){h.annotationEditorShown=false},$activator:faIconButton({title:_l("Edit dataset annotation"),classes:"dataset-annotate-btn",faIcon:"fa-comment"}).appendTo(g.find(".dataset-actions .right"))});if(this.annotationEditorShown){this.annotationEditor.toggle(true)}},makeDbkeyEditLink:function(h){if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){var g=$('<a class="value">?</a>').attr("href",this.urls.edit).attr("target",this.linkTarget);h.find(".dataset-dbkey .value").replaceWith(g)}},events:_.extend(_.clone(a.HDABaseView.prototype.events),{"click .dataset-undelete":function(g){this.model.undelete();return false},"click .dataset-unhide":function(g){this.model.unhide();return false},"click .dataset-purge":"confirmPurge"}),confirmPurge:function c(g){this.model.purge();return false},toString:function(){var g=(this.model)?(this.model+""):("(no model)");return"HDAView("+g+")"}});function e(g,i,h){action=function(){Galaxy.frame.add({title:"Scatterplot",type:"url",content:g+"/scatterplot?"+$.param(i),target:h,scratchbook:true});$("div.popmenu-wrapper").remove();return false};return action}function b(g,i,h){return function(){var j={};if(h){j["f-dbkey"]=h}$.ajax({url:g+"/list_tracks?"+$.param(j),dataType:"html",error:function(){alert(("Could not add this dataset to browser")+".")},success:function(k){var l=window.parent;l.Galaxy.modal.show({title:"View Data in a New or Saved Visualization",buttons:{Cancel:function(){l.Galaxy.modal.hide()},"View in saved visualization":function(){l.Galaxy.modal.show({title:"Add Data to Saved Visualization",body:k,buttons:{Cancel:function(){l.Galaxy.modal.hide()},"Add to visualization":function(){$(l.document).find("input[name=id]:checked").each(function(){l.Galaxy.modal.hide();var m=$(this).val();i.id=m;l.Galaxy.frame.add({title:"Trackster",type:"url",content:g+"/trackster?"+$.param(i),scratchbook:true})})}}})},"View in new visualization":function(){l.Galaxy.modal.hide();var m=g+"/trackster?"+$.param(i);l.Galaxy.frame.add({title:"Trackster",type:"url",content:m,scratchbook:true})}}})}});return false}}return{HDAEditView:f}});
\ No newline at end of file
+define(["mvc/dataset/hda-model","mvc/dataset/hda-base"],function(d,a){var f=a.HDABaseView.extend(LoggableMixin).extend({initialize:function(g){a.HDABaseView.prototype.initialize.call(this,g);this.hasUser=g.hasUser;this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton];this.tagsEditorShown=g.tagsEditorShown||false;this.annotationEditorShown=g.annotationEditorShown||false},_render_titleButtons:function(){return a.HDABaseView.prototype._render_titleButtons.call(this).concat([this._render_editButton(),this._render_deleteButton()])},_render_editButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.DISCARDED)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var i=this.model.get("purged"),g=this.model.get("deleted"),h={title:_l("Edit attributes"),href:this.urls.edit,target:this.linkTarget,classes:"dataset-edit"};if(g||i){h.disabled=true;if(i){h.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(g){h.title=_l("Undelete dataset to edit attributes")}}}else{if(this.model.get("state")===d.HistoryDatasetAssociation.STATES.UPLOAD){h.disabled=true;h.title=_l("This dataset must finish uploading before it can be edited")}}h.faIcon="fa-pencil";return faIconButton(h)},_render_deleteButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var g=this,h={title:_l("Delete"),classes:"dataset-delete",onclick:function(){g.$el.find(".icon-btn.dataset-delete").trigger("mouseout");g.model["delete"]()}};if(this.model.get("deleted")||this.model.get("purged")){h={title:_l("Dataset is already deleted"),disabled:true}}h.faIcon="fa-times";return faIconButton(h)},_render_errButton:function(){if(this.model.get("state")!==d.HistoryDatasetAssociation.STATES.ERROR){return null}return faIconButton({title:_l("View or report this error"),href:this.urls.report_error,classes:"dataset-report-error-btn",target:this.linkTarget,faIcon:"fa-bug"})},_render_rerunButton:function(){return faIconButton({title:_l("Run this job again"),href:this.urls.rerun,classes:"dataset-rerun-btn",target:this.linkTarget,faIcon:"fa-refresh"})},_render_visualizationsButton:function(){var n=this.model.get("visualizations");if((!this.hasUser)||(!this.model.hasData())||(_.isEmpty(n))){return null}if(_.isObject(n[0])){return this._render_visualizationsFrameworkButton(n)}if(!this.urls.visualization){return null}var k=this.model.get("dbkey"),g=this.urls.visualization,j={},h={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(k){h.dbkey=k}var l=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});var m=this;function i(p){switch(p){case"trackster":return b(g,h,k);case"scatterplot":return e(g,h,m.linkTarget);default:return function(){Galaxy.frame.add({title:"Visualization",type:"url",content:g+"/"+p+"?"+$.param(h)})}}}function o(p){return p.charAt(0).toUpperCase()+p.slice(1)}if(n.length===1){l.attr("data-original-title",_l("Visualize in ")+_l(o(n[0])));l.click(i(n[0]))}else{_.each(n,function(p){j[_l(o(p))]=i(p)});make_popupmenu(l,j)}return l},_render_visualizationsFrameworkButton:function(g){if(!(this.model.hasData())||!(g&&!_.isEmpty(g))){return null}var i=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});if(g.length===1){var h=g[0];i.attr("data-original-title",_l("Visualize in ")+h.html);i.attr("href",h.href)}else{var j=[];_.each(g,function(k){k.func=function(){if(Galaxy.frame.active){Galaxy.frame.add({title:"Visualization",type:"url",content:k.href});return false}return true};j.push(k);return false});PopupMenu.create(i,j)}return i},_buildNewRender:function(){var g=a.HDABaseView.prototype._buildNewRender.call(this);g.find(".dataset-deleted-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-undelete">here</a> to undelete it or <a href="javascript:void(0);" class="dataset-purge">here</a> to immediately remove it from disk'));g.find(".dataset-hidden-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-unhide">here</a> to unhide it'));return g},_render_body_failed_metadata:function(){var h=$("<a/>").attr({href:this.urls.edit,target:this.linkTarget}).text(_l("set it manually or retry auto-detection")),g=$("<span/>").text(". "+_l("You may be able to")+" ").append(h),i=a.HDABaseView.prototype._render_body_failed_metadata.call(this);i.find(".warningmessagesmall strong").append(g);return i},_render_body_error:function(){var g=a.HDABaseView.prototype._render_body_error.call(this);g.find(".dataset-actions .left").prepend(this._render_errButton());return g},_render_body_ok:function(){var g=a.HDABaseView.prototype._render_body_ok.call(this);if(this.model.isDeletedOrPurged()){return g}this.makeDbkeyEditLink(g);if(this.hasUser){g.find(".dataset-actions .left").append(this._render_visualizationsButton());this._renderTags(g);this._renderAnnotation(g)}return g},_renderTags:function(g){var h=this;this.tagsEditor=new TagsEditor({model:this.model,el:g.find(".tags-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.tagsEditorShown=true},onhide:function(){h.tagsEditorShown=false},$activator:faIconButton({title:_l("Edit dataset tags"),classes:"dataset-tag-btn",faIcon:"fa-tags"}).appendTo(g.find(".dataset-actions .right"))});if(this.tagsEditorShown){this.tagsEditor.toggle(true)}},_renderAnnotation:function(g){var h=this;this.annotationEditor=new AnnotationEditor({model:this.model,el:g.find(".annotation-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.annotationEditorShown=true},onhide:function(){h.annotationEditorShown=false},$activator:faIconButton({title:_l("Edit dataset annotation"),classes:"dataset-annotate-btn",faIcon:"fa-comment"}).appendTo(g.find(".dataset-actions .right"))});if(this.annotationEditorShown){this.annotationEditor.toggle(true)}},makeDbkeyEditLink:function(h){if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){var g=$('<a class="value">?</a>').attr("href",this.urls.edit).attr("target",this.linkTarget);h.find(".dataset-dbkey .value").replaceWith(g)}},events:_.extend(_.clone(a.HDABaseView.prototype.events),{"click .dataset-undelete":function(g){this.model.undelete();return false},"click .dataset-unhide":function(g){this.model.unhide();return false},"click .dataset-purge":"confirmPurge"}),confirmPurge:function c(g){this.model.purge();return false},toString:function(){var g=(this.model)?(this.model+""):("(no model)");return"HDAView("+g+")"}});function e(g,i,h){action=function(){Galaxy.frame.add({title:"Scatterplot",type:"url",content:g+"/scatterplot?"+$.param(i),target:h,scratchbook:true});$("div.popmenu-wrapper").remove();return false};return action}function b(g,i,h){return function(){var j={};if(h){j["f-dbkey"]=h}$.ajax({url:g+"/list_tracks?"+$.param(j),dataType:"html",error:function(){alert(("Could not add this dataset to browser")+".")},success:function(k){var l=window.parent;l.Galaxy.modal.show({title:"View Data in a New or Saved Visualization",buttons:{Cancel:function(){l.Galaxy.modal.hide()},"View in saved visualization":function(){l.Galaxy.modal.show({title:"Add Data to Saved Visualization",body:k,buttons:{Cancel:function(){l.Galaxy.modal.hide()},"Add to visualization":function(){$(l.document).find("input[name=id]:checked").each(function(){l.Galaxy.modal.hide();var m=$(this).val();i.id=m;l.Galaxy.frame.add({title:"Trackster",type:"url",content:g+"/trackster?"+$.param(i),scratchbook:true})})}}})},"View in new visualization":function(){l.Galaxy.modal.hide();var m=g+"/trackster?"+$.param(i);l.Galaxy.frame.add({title:"Trackster",type:"url",content:m,scratchbook:true})}}})}});return false}}return{HDAEditView:f}});
\ No newline at end of file
diff -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec -r 8ac00a14c7fd8fcf263184aeab4786a66cdb4b0e test/unit/visualizations/registry/test_VisualizationsRegistry.py
--- a/test/unit/visualizations/registry/test_VisualizationsRegistry.py
+++ b/test/unit/visualizations/registry/test_VisualizationsRegistry.py
@@ -49,18 +49,8 @@
template_cache_dir=template_cache_dir )
expected_plugins_path = os.path.join( glx_dir, vis_reg_path )
- expected_plugin_names = [
- 'circster',
- 'graphview',
- 'phyloviz',
- 'scatterplot',
- 'sweepster',
- 'trackster',
- ]
-
self.assertEqual( plugin_mgr.base_url, 'visualizations' )
self.assertItemsEqual( plugin_mgr.directories, [ expected_plugins_path ] )
- self.assertItemsEqual( plugin_mgr.plugins.keys(), expected_plugin_names )
scatterplot = plugin_mgr.plugins[ 'scatterplot' ]
self.assertEqual( scatterplot.name, 'scatterplot' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8244a5142f90/
Changeset: 8244a5142f90
User: jmchilton
Date: 2014-01-14 21:43:39
Summary: Add tool execution unit test for rerun_remapping.
Affected #: 2 files
diff -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 -r 8244a5142f90a0d7f1c8b2f2f8886e7d30e0240d test/unit/tools/test_execution.py
--- a/test/unit/tools/test_execution.py
+++ b/test/unit/tools/test_execution.py
@@ -89,6 +89,18 @@
runtool_btn="dummy",
)
assert template == "tool_executed.mako"
+ # Didn't specify a rerun_remap_id so this should be None
+ assert self.tool_action.execution_call_args[ 0 ][ "rerun_remap_job_id" ] is None
+
+ def test_remap_job( self ):
+ self.__init_tool( SIMPLE_TOOL_CONTENTS )
+ template, template_vars = self.__handle_with_incoming(
+ param1="moo",
+ rerun_remap_job_id=self.app.security.encode_id(123),
+ runtool_btn="dummy",
+ )
+ assert template == "tool_executed.mako"
+ assert self.tool_action.execution_call_args[ 0 ][ "rerun_remap_job_id" ] == 123
def test_repeat_state_updates( self ):
self.__init_tool( REPEAT_TOOL_CONTENTS )
diff -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 -r 8244a5142f90a0d7f1c8b2f2f8886e7d30e0240d test/unit/tools_support.py
--- a/test/unit/tools_support.py
+++ b/test/unit/tools_support.py
@@ -9,6 +9,7 @@
import shutil
from galaxy.util.bunch import Bunch
+from galaxy.web.security import SecurityHelper
import galaxy.model
from galaxy.model import mapping
@@ -54,6 +55,7 @@
self.model[ module_member_name ] = module_member
self.toolbox = None
self.object_store = None
+ self.security = SecurityHelper(id_secret="testing")
class MockContext(object):
https://bitbucket.org/galaxy/galaxy-central/commits/c5fbdfd31b38/
Changeset: c5fbdfd31b38
User: jmchilton
Date: 2014-01-14 21:43:39
Summary: Refactor tool so incoming dict isn't passed to __handle_tool_execute.
Slightly confusing that state params and raw incoming passed to that method, so pull out rerun_remap_job_id sooner and just pass that along (it was the only incoming was used for). Use the oppertunity to isolate potential errors with decoding rerun_remap_job_id and include more informative error message.
Add unit test to test invalid rerun_remap_job_ids.
Affected #: 2 files
diff -r 8244a5142f90a0d7f1c8b2f2f8886e7d30e0240d -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1847,6 +1847,14 @@
that is why this is not just called for_api.
"""
all_pages = ( process_state == "populate" ) # If process_state = update, handle all pages at once.
+ rerun_remap_job_id = None
+ if 'rerun_remap_job_id' in incoming:
+ try:
+ rerun_remap_job_id = trans.app.security.decode_id( incoming[ 'rerun_remap_job_id' ] )
+ except Exception:
+ message = 'Failure executing tool (attempting to rerun invalid job).'
+ return 'message.mako', dict( status='error', message=message, refresh_frames=[] )
+
state, state_new = self.__fetch_state( trans, incoming, history, all_pages=all_pages )
if state_new:
# This feels a bit like a hack. It allows forcing full processing
@@ -1874,7 +1882,7 @@
return "tool_form.mako", dict( errors=errors, tool_state=state, incoming=incoming, error_message=error_message )
# If we've completed the last page we can execute the tool
elif all_pages or state.page == self.last_page:
- return self.__handle_tool_execute( trans, incoming, params, history )
+ return self.__handle_tool_execute( trans, rerun_remap_job_id, params, history )
# Otherwise move on to the next page
else:
return self.__handle_page_advance( trans, state, errors )
@@ -1882,11 +1890,8 @@
def __should_refresh_state( self, incoming ):
return not( 'runtool_btn' in incoming or 'URL' in incoming or 'ajax_upload' in incoming )
- def __handle_tool_execute( self, trans, incoming, params, history ):
+ def __handle_tool_execute( self, trans, rerun_remap_job_id, params, history ):
try:
- rerun_remap_job_id = None
- if 'rerun_remap_job_id' in incoming:
- rerun_remap_job_id = trans.app.security.decode_id(incoming['rerun_remap_job_id'])
_, out_data = self.execute( trans, incoming=params, history=history, rerun_remap_job_id=rerun_remap_job_id )
except httpexceptions.HTTPFound, e:
#if it's a paste redirect exception, pass it up the stack
diff -r 8244a5142f90a0d7f1c8b2f2f8886e7d30e0240d -r c5fbdfd31b387056d1dfdad5e9e356c0020c5cec test/unit/tools/test_execution.py
--- a/test/unit/tools/test_execution.py
+++ b/test/unit/tools/test_execution.py
@@ -102,6 +102,17 @@
assert template == "tool_executed.mako"
assert self.tool_action.execution_call_args[ 0 ][ "rerun_remap_job_id" ] == 123
+ def test_invalid_remap_job( self ):
+ self.__init_tool( SIMPLE_TOOL_CONTENTS )
+ template, template_vars = self.__handle_with_incoming(
+ param1="moo",
+ rerun_remap_job_id='123', # Not encoded
+ runtool_btn="dummy",
+ )
+ assert template == "message.mako"
+ assert template_vars[ "status" ] == "error"
+ assert "invalid job" in template_vars[ "message" ]
+
def test_repeat_state_updates( self ):
self.__init_tool( REPEAT_TOOL_CONTENTS )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Visualizations Registry: remove linkText config val (use visualization name attribute), change render_location to render_target, disable circster; fix rendering of visualization button in HDA when only one visualization
by commits-noreply@bitbucket.org 14 Jan '14
by commits-noreply@bitbucket.org 14 Jan '14
14 Jan '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bb0b6a7e73d7/
Changeset: bb0b6a7e73d7
User: carlfeberhard
Date: 2014-01-14 21:26:44
Summary: Visualizations Registry: remove linkText config val (use visualization name attribute), change render_location to render_target, disable circster; fix rendering of visualization button in HDA when only one visualization
Affected #: 9 files
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/circster/config/circster.xml
--- a/config/plugins/visualizations/circster/config/circster.xml
+++ b/config/plugins/visualizations/circster/config/circster.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE visualization SYSTEM "../../visualization.dtd">
-<visualization name="circster">
+<visualization name="Circster" disabled="true"><data_sources><data_source><model_class>HistoryDatasetAssociation</model_class>
@@ -24,5 +24,5 @@
<!-- template_root and template are currently ignored for the 'built-in' visualizations --><template_root>webapps/galaxy/visualization</template_root><template>circster.mako</template>
- <render_location>_top</render_location>
+ <render_target>_top</render_target></visualization>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/phyloviz/config/phyloviz.xml
--- a/config/plugins/visualizations/phyloviz/config/phyloviz.xml
+++ b/config/plugins/visualizations/phyloviz/config/phyloviz.xml
@@ -1,11 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE visualization SYSTEM "../../visualization.dtd">
-<visualization name="phyloviz">
+<visualization name="Phyloviz"><data_sources><data_source><model_class>HistoryDatasetAssociation</model_class><test type="isinstance" test_attr="datatype" result_type="datatype">data.Newick</test><test type="isinstance" test_attr="datatype" result_type="datatype">data.Nexus</test>
+ <test type="isinstance" test_attr="datatype" result_type="datatype">xml.Phyloxml</test><to_param param_attr="id">dataset_id</to_param></data_source></data_sources>
@@ -16,5 +17,5 @@
<!-- template_root and template are currently ignored for the 'built-in' visualizations --><template_root>webapps/galaxy/visualization</template_root><template>phyloviz.mako</template>
- <render_location>_top</render_location>
+ <render_target>_top</render_target></visualization>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/scatterplot/config/scatterplot.xml
--- a/config/plugins/visualizations/scatterplot/config/scatterplot.xml
+++ b/config/plugins/visualizations/scatterplot/config/scatterplot.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE visualization SYSTEM "../../visualization.dtd">
-<visualization name="scatterplot">
+<visualization name="Scatterplot"><data_sources><data_source><model_class>HistoryDatasetAssociation</model_class>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/sweepster/config/sweepster.xml
--- a/config/plugins/visualizations/sweepster/config/sweepster.xml
+++ b/config/plugins/visualizations/sweepster/config/sweepster.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE visualization SYSTEM "../../visualization.dtd">
-<visualization name="sweepster" disabled="true">
+<visualization name="Sweepster" disabled="true"><data_sources><data_source><model_class>HistoryDatasetAssociation</model_class>
@@ -23,5 +23,5 @@
<!-- template_root and template are currently ignored for the 'built-in' visualizations --><template_root>webapps/galaxy/visualization</template_root><template>sweepster.mako</template>
- <render_location>_top</render_location>
+ <render_target>_top</render_target></visualization>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/trackster/config/trackster.xml
--- a/config/plugins/visualizations/trackster/config/trackster.xml
+++ b/config/plugins/visualizations/trackster/config/trackster.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE visualization SYSTEM "../../visualization.dtd">
-<visualization name="trackster">
+<visualization name="Trackster"><!--not tested yet --><data_sources><data_source>
@@ -26,5 +26,5 @@
<!-- template_root and template are currently ignored for the 'built-in' visualizations --><template_root>webapps/galaxy/visualization/tracks</template_root><template>browser.mako</template>
- <render_location>_top</render_location>
+ <render_target>_top</render_target></visualization>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 config/plugins/visualizations/visualization.dtd
--- a/config/plugins/visualizations/visualization.dtd
+++ b/config/plugins/visualizations/visualization.dtd
@@ -1,7 +1,7 @@
<!-- each visualization must have a template (all other elements are optional) -->
-<!ELEMENT visualization (description*,data_sources*,params*,template_root*,template,link_text*,render_location*)>
+<!ELEMENT visualization (description*,data_sources*,params*,template_root*,template,render_target*)><!-- visualization
- name: some name for the visualization (e.g. 'trackster', 'scatterplot', etc.) REQUIRED
+ name: the title/display name of the visualization (e.g. 'Trackster', 'Fastq Stats', etc.) REQUIRED
disabled: if included (value does not matter), this attribute will prevent the visualization being loaded
--><!ATTLIST visualization
@@ -128,10 +128,8 @@
<!ELEMENT template_root (#PCDATA)><!-- template: the template used to render the visualization. REQUIRED --><!ELEMENT template (#PCDATA)>
-<!-- link_text: the text component of an html anchor displayed when the registry builds the link information -->
-<!ELEMENT link_text (#PCDATA)>
-<!-- render_location: used as the target attribute of the link to the visualization.
+<!-- render_target: used as the target attribute of the link to the visualization.
Can be 'galaxy_main', '_top', '_blank'. DEFAULT: 'galaxy_main'
--><!-- TODO: rename -> render_target -->
-<!ELEMENT render_location (#PCDATA)>
+<!ELEMENT render_target (#PCDATA)>
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 lib/galaxy/visualization/registry.py
--- a/lib/galaxy/visualization/registry.py
+++ b/lib/galaxy/visualization/registry.py
@@ -219,16 +219,13 @@
param_data = data_source[ 'to_params' ]
url = self.get_visualization_url( trans, target_object, visualization_name, param_data )
- link_text = visualization.config.get( 'link_text', None )
- if not link_text:
- # default to visualization name, titlecase, and replace underscores
- link_text = visualization_name.title().replace( '_', ' ' )
- render_location = visualization.config.get( 'render_location', 'galaxy_main' )
+ display_name = visualization.config.get( 'name', None )
+ render_target = visualization.config.get( 'render_target', 'galaxy_main' )
# remap some of these vars for direct use in ui.js, PopupMenu (e.g. text->html)
return {
'href' : url,
- 'html' : link_text,
- 'target': render_location
+ 'html' : display_name,
+ 'target': render_target
}
return None
@@ -362,7 +359,7 @@
-- what provides the data
-- what information needs to be added to the query string
"""
- VALID_RENDER_LOCATIONS = [ 'galaxy_main', '_top', '_blank' ]
+ VALID_RENDER_TARGETS = [ 'galaxy_main', '_top', '_blank' ]
def __init__( self, debug=False ):
self.debug = debug
@@ -397,6 +394,12 @@
if 'disabled' in xml_tree.attrib:
return None
+ # a text display name for end user links
+ returned[ 'name' ] = xml_tree.attrib.get( 'name', None )
+ if not returned[ 'name' ]:
+ raise ParsingException( 'visualization needs a name attribute' )
+ print returned[ 'name' ]
+
# a (for now) text description of what the visualization does
description = xml_tree.find( 'description' )
returned[ 'description' ] = description.text.strip() if description is not None else None
@@ -458,14 +461,14 @@
if link_text != None and link_text.text:
returned[ 'link_text' ] = link_text
- # render_location: where in the browser to open the rendered visualization
+ # render_target: where in the browser to open the rendered visualization
# defaults to: galaxy_main
- render_location = xml_tree.find( 'render_location' )
- if( ( render_location != None and render_location.text )
- and ( render_location.text in self.VALID_RENDER_LOCATIONS ) ):
- returned[ 'render_location' ] = render_location.text
+ render_target = xml_tree.find( 'render_target' )
+ if( ( render_target != None and render_target.text )
+ and ( render_target.text in self.VALID_RENDER_TARGETS ) ):
+ returned[ 'render_target' ] = render_target.text
else:
- returned[ 'render_location' ] = 'galaxy_main'
+ returned[ 'render_target' ] = 'galaxy_main'
# consider unifying the above into it's own element and parsing method
return returned
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 static/scripts/mvc/dataset/hda-edit.js
--- a/static/scripts/mvc/dataset/hda-edit.js
+++ b/static/scripts/mvc/dataset/hda-edit.js
@@ -251,8 +251,9 @@
// No need for popup menu because there's a single visualization.
if( _.keys( visualizations ).length === 1 ) {
- $icon.attr( 'title', _.keys( visualizations )[0] );
- $icon.attr( 'href', _.values( visualizations )[0] );
+ var onlyVisualization = visualizations[0];
+ $icon.attr( 'data-original-title', _l( 'Visualize in ' ) + onlyVisualization.html );
+ $icon.attr( 'href', onlyVisualization.href );
// >1: Populate menu dict with visualization fns, make the popupmenu
} else {
diff -r fb1c758d0eb990f2ce59de422d1184647f051cc8 -r bb0b6a7e73d7154377b414939b9fa44da1fb6306 static/scripts/packed/mvc/dataset/hda-edit.js
--- a/static/scripts/packed/mvc/dataset/hda-edit.js
+++ b/static/scripts/packed/mvc/dataset/hda-edit.js
@@ -1,1 +1,1 @@
-define(["mvc/dataset/hda-model","mvc/dataset/hda-base"],function(d,a){var f=a.HDABaseView.extend(LoggableMixin).extend({initialize:function(g){a.HDABaseView.prototype.initialize.call(this,g);this.hasUser=g.hasUser;this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton];this.tagsEditorShown=g.tagsEditorShown||false;this.annotationEditorShown=g.annotationEditorShown||false},_render_titleButtons:function(){return a.HDABaseView.prototype._render_titleButtons.call(this).concat([this._render_editButton(),this._render_deleteButton()])},_render_editButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.DISCARDED)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var i=this.model.get("purged"),g=this.model.get("deleted"),h={title:_l("Edit attributes"),href:this.urls.edit,target:this.linkTarget,classes:"dataset-edit"};if(g||i){h.disabled=true;if(i){h.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(g){h.title=_l("Undelete dataset to edit attributes")}}}else{if(this.model.get("state")===d.HistoryDatasetAssociation.STATES.UPLOAD){h.disabled=true;h.title=_l("This dataset must finish uploading before it can be edited")}}h.faIcon="fa-pencil";return faIconButton(h)},_render_deleteButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var g=this,h={title:_l("Delete"),classes:"dataset-delete",onclick:function(){g.$el.find(".icon-btn.dataset-delete").trigger("mouseout");g.model["delete"]()}};if(this.model.get("deleted")||this.model.get("purged")){h={title:_l("Dataset is already deleted"),disabled:true}}h.faIcon="fa-times";return faIconButton(h)},_render_errButton:function(){if(this.model.get("state")!==d.HistoryDatasetAssociation.STATES.ERROR){return null}return faIconButton({title:_l("View or report this error"),href:this.urls.report_error,classes:"dataset-report-error-btn",target:this.linkTarget,faIcon:"fa-bug"})},_render_rerunButton:function(){return faIconButton({title:_l("Run this job again"),href:this.urls.rerun,classes:"dataset-rerun-btn",target:this.linkTarget,faIcon:"fa-refresh"})},_render_visualizationsButton:function(){var n=this.model.get("visualizations");if((!this.hasUser)||(!this.model.hasData())||(_.isEmpty(n))){return null}if(_.isObject(n[0])){return this._render_visualizationsFrameworkButton(n)}if(!this.urls.visualization){return null}var k=this.model.get("dbkey"),g=this.urls.visualization,j={},h={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(k){h.dbkey=k}var l=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});var m=this;function i(p){switch(p){case"trackster":return b(g,h,k);case"scatterplot":return e(g,h,m.linkTarget);default:return function(){Galaxy.frame.add({title:"Visualization",type:"url",content:g+"/"+p+"?"+$.param(h)})}}}function o(p){return p.charAt(0).toUpperCase()+p.slice(1)}if(n.length===1){l.attr("data-original-title",_l("Visualize in ")+_l(o(n[0])));l.click(i(n[0]))}else{_.each(n,function(p){j[_l(o(p))]=i(p)});make_popupmenu(l,j)}return l},_render_visualizationsFrameworkButton:function(g){if(!(this.model.hasData())||!(g&&!_.isEmpty(g))){return null}var h=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});if(_.keys(g).length===1){h.attr("title",_.keys(g)[0]);h.attr("href",_.values(g)[0])}else{var i=[];_.each(g,function(j){j.func=function(){if(Galaxy.frame.active){Galaxy.frame.add({title:"Visualization",type:"url",content:j.href});return false}return true};i.push(j);return false});PopupMenu.create(h,i)}return h},_buildNewRender:function(){var g=a.HDABaseView.prototype._buildNewRender.call(this);g.find(".dataset-deleted-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-undelete">here</a> to undelete it or <a href="javascript:void(0);" class="dataset-purge">here</a> to immediately remove it from disk'));g.find(".dataset-hidden-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-unhide">here</a> to unhide it'));return g},_render_body_failed_metadata:function(){var h=$("<a/>").attr({href:this.urls.edit,target:this.linkTarget}).text(_l("set it manually or retry auto-detection")),g=$("<span/>").text(". "+_l("You may be able to")+" ").append(h),i=a.HDABaseView.prototype._render_body_failed_metadata.call(this);i.find(".warningmessagesmall strong").append(g);return i},_render_body_error:function(){var g=a.HDABaseView.prototype._render_body_error.call(this);g.find(".dataset-actions .left").prepend(this._render_errButton());return g},_render_body_ok:function(){var g=a.HDABaseView.prototype._render_body_ok.call(this);if(this.model.isDeletedOrPurged()){return g}this.makeDbkeyEditLink(g);if(this.hasUser){g.find(".dataset-actions .left").append(this._render_visualizationsButton());this._renderTags(g);this._renderAnnotation(g)}return g},_renderTags:function(g){var h=this;this.tagsEditor=new TagsEditor({model:this.model,el:g.find(".tags-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.tagsEditorShown=true},onhide:function(){h.tagsEditorShown=false},$activator:faIconButton({title:_l("Edit dataset tags"),classes:"dataset-tag-btn",faIcon:"fa-tags"}).appendTo(g.find(".dataset-actions .right"))});if(this.tagsEditorShown){this.tagsEditor.toggle(true)}},_renderAnnotation:function(g){var h=this;this.annotationEditor=new AnnotationEditor({model:this.model,el:g.find(".annotation-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.annotationEditorShown=true},onhide:function(){h.annotationEditorShown=false},$activator:faIconButton({title:_l("Edit dataset annotation"),classes:"dataset-annotate-btn",faIcon:"fa-comment"}).appendTo(g.find(".dataset-actions .right"))});if(this.annotationEditorShown){this.annotationEditor.toggle(true)}},makeDbkeyEditLink:function(h){if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){var g=$('<a class="value">?</a>').attr("href",this.urls.edit).attr("target",this.linkTarget);h.find(".dataset-dbkey .value").replaceWith(g)}},events:_.extend(_.clone(a.HDABaseView.prototype.events),{"click .dataset-undelete":function(g){this.model.undelete();return false},"click .dataset-unhide":function(g){this.model.unhide();return false},"click .dataset-purge":"confirmPurge"}),confirmPurge:function c(g){this.model.purge();return false},toString:function(){var g=(this.model)?(this.model+""):("(no model)");return"HDAView("+g+")"}});function e(g,i,h){action=function(){Galaxy.frame.add({title:"Scatterplot",type:"url",content:g+"/scatterplot?"+$.param(i),target:h,scratchbook:true});$("div.popmenu-wrapper").remove();return false};return action}function b(g,i,h){return function(){var j={};if(h){j["f-dbkey"]=h}$.ajax({url:g+"/list_tracks?"+$.param(j),dataType:"html",error:function(){alert(("Could not add this dataset to browser")+".")},success:function(k){var l=window.parent;l.Galaxy.modal.show({title:"View Data in a New or Saved Visualization",buttons:{Cancel:function(){l.Galaxy.modal.hide()},"View in saved visualization":function(){l.Galaxy.modal.show({title:"Add Data to Saved Visualization",body:k,buttons:{Cancel:function(){l.Galaxy.modal.hide()},"Add to visualization":function(){$(l.document).find("input[name=id]:checked").each(function(){l.Galaxy.modal.hide();var m=$(this).val();i.id=m;l.Galaxy.frame.add({title:"Trackster",type:"url",content:g+"/trackster?"+$.param(i),scratchbook:true})})}}})},"View in new visualization":function(){l.Galaxy.modal.hide();var m=g+"/trackster?"+$.param(i);l.Galaxy.frame.add({title:"Trackster",type:"url",content:m,scratchbook:true})}}})}});return false}}return{HDAEditView:f}});
\ No newline at end of file
+define(["mvc/dataset/hda-model","mvc/dataset/hda-base"],function(d,a){var f=a.HDABaseView.extend(LoggableMixin).extend({initialize:function(g){a.HDABaseView.prototype.initialize.call(this,g);this.hasUser=g.hasUser;this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton];this.tagsEditorShown=g.tagsEditorShown||false;this.annotationEditorShown=g.annotationEditorShown||false},_render_titleButtons:function(){return a.HDABaseView.prototype._render_titleButtons.call(this).concat([this._render_editButton(),this._render_deleteButton()])},_render_editButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.DISCARDED)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var i=this.model.get("purged"),g=this.model.get("deleted"),h={title:_l("Edit attributes"),href:this.urls.edit,target:this.linkTarget,classes:"dataset-edit"};if(g||i){h.disabled=true;if(i){h.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(g){h.title=_l("Undelete dataset to edit attributes")}}}else{if(this.model.get("state")===d.HistoryDatasetAssociation.STATES.UPLOAD){h.disabled=true;h.title=_l("This dataset must finish uploading before it can be edited")}}h.faIcon="fa-pencil";return faIconButton(h)},_render_deleteButton:function(){if((this.model.get("state")===d.HistoryDatasetAssociation.STATES.NEW)||(this.model.get("state")===d.HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){return null}var g=this,h={title:_l("Delete"),classes:"dataset-delete",onclick:function(){g.$el.find(".icon-btn.dataset-delete").trigger("mouseout");g.model["delete"]()}};if(this.model.get("deleted")||this.model.get("purged")){h={title:_l("Dataset is already deleted"),disabled:true}}h.faIcon="fa-times";return faIconButton(h)},_render_errButton:function(){if(this.model.get("state")!==d.HistoryDatasetAssociation.STATES.ERROR){return null}return faIconButton({title:_l("View or report this error"),href:this.urls.report_error,classes:"dataset-report-error-btn",target:this.linkTarget,faIcon:"fa-bug"})},_render_rerunButton:function(){return faIconButton({title:_l("Run this job again"),href:this.urls.rerun,classes:"dataset-rerun-btn",target:this.linkTarget,faIcon:"fa-refresh"})},_render_visualizationsButton:function(){var n=this.model.get("visualizations");if((!this.hasUser)||(!this.model.hasData())||(_.isEmpty(n))){return null}if(_.isObject(n[0])){return this._render_visualizationsFrameworkButton(n)}if(!this.urls.visualization){return null}var k=this.model.get("dbkey"),g=this.urls.visualization,j={},h={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(k){h.dbkey=k}var l=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});var m=this;function i(p){switch(p){case"trackster":return b(g,h,k);case"scatterplot":return e(g,h,m.linkTarget);default:return function(){Galaxy.frame.add({title:"Visualization",type:"url",content:g+"/"+p+"?"+$.param(h)})}}}function o(p){return p.charAt(0).toUpperCase()+p.slice(1)}if(n.length===1){l.attr("data-original-title",_l("Visualize in ")+_l(o(n[0])));l.click(i(n[0]))}else{_.each(n,function(p){j[_l(o(p))]=i(p)});make_popupmenu(l,j)}return l},_render_visualizationsFrameworkButton:function(g){if(!(this.model.hasData())||!(g&&!_.isEmpty(g))){return null}var i=faIconButton({title:_l("Visualize"),classes:"dataset-visualize-btn",faIcon:"fa-bar-chart-o"});if(_.keys(g).length===1){var h=g[0];i.attr("data-original-title",_l("Visualize in ")+h.html);i.attr("href",h.href)}else{var j=[];_.each(g,function(k){k.func=function(){if(Galaxy.frame.active){Galaxy.frame.add({title:"Visualization",type:"url",content:k.href});return false}return true};j.push(k);return false});PopupMenu.create(i,j)}return i},_buildNewRender:function(){var g=a.HDABaseView.prototype._buildNewRender.call(this);g.find(".dataset-deleted-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-undelete">here</a> to undelete it or <a href="javascript:void(0);" class="dataset-purge">here</a> to immediately remove it from disk'));g.find(".dataset-hidden-msg").append(_l('Click <a href="javascript:void(0);" class="dataset-unhide">here</a> to unhide it'));return g},_render_body_failed_metadata:function(){var h=$("<a/>").attr({href:this.urls.edit,target:this.linkTarget}).text(_l("set it manually or retry auto-detection")),g=$("<span/>").text(". "+_l("You may be able to")+" ").append(h),i=a.HDABaseView.prototype._render_body_failed_metadata.call(this);i.find(".warningmessagesmall strong").append(g);return i},_render_body_error:function(){var g=a.HDABaseView.prototype._render_body_error.call(this);g.find(".dataset-actions .left").prepend(this._render_errButton());return g},_render_body_ok:function(){var g=a.HDABaseView.prototype._render_body_ok.call(this);if(this.model.isDeletedOrPurged()){return g}this.makeDbkeyEditLink(g);if(this.hasUser){g.find(".dataset-actions .left").append(this._render_visualizationsButton());this._renderTags(g);this._renderAnnotation(g)}return g},_renderTags:function(g){var h=this;this.tagsEditor=new TagsEditor({model:this.model,el:g.find(".tags-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.tagsEditorShown=true},onhide:function(){h.tagsEditorShown=false},$activator:faIconButton({title:_l("Edit dataset tags"),classes:"dataset-tag-btn",faIcon:"fa-tags"}).appendTo(g.find(".dataset-actions .right"))});if(this.tagsEditorShown){this.tagsEditor.toggle(true)}},_renderAnnotation:function(g){var h=this;this.annotationEditor=new AnnotationEditor({model:this.model,el:g.find(".annotation-display"),onshowFirstTime:function(){this.render()},onshow:function(){h.annotationEditorShown=true},onhide:function(){h.annotationEditorShown=false},$activator:faIconButton({title:_l("Edit dataset annotation"),classes:"dataset-annotate-btn",faIcon:"fa-comment"}).appendTo(g.find(".dataset-actions .right"))});if(this.annotationEditorShown){this.annotationEditor.toggle(true)}},makeDbkeyEditLink:function(h){if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){var g=$('<a class="value">?</a>').attr("href",this.urls.edit).attr("target",this.linkTarget);h.find(".dataset-dbkey .value").replaceWith(g)}},events:_.extend(_.clone(a.HDABaseView.prototype.events),{"click .dataset-undelete":function(g){this.model.undelete();return false},"click .dataset-unhide":function(g){this.model.unhide();return false},"click .dataset-purge":"confirmPurge"}),confirmPurge:function c(g){this.model.purge();return false},toString:function(){var g=(this.model)?(this.model+""):("(no model)");return"HDAView("+g+")"}});function e(g,i,h){action=function(){Galaxy.frame.add({title:"Scatterplot",type:"url",content:g+"/scatterplot?"+$.param(i),target:h,scratchbook:true});$("div.popmenu-wrapper").remove();return false};return action}function b(g,i,h){return function(){var j={};if(h){j["f-dbkey"]=h}$.ajax({url:g+"/list_tracks?"+$.param(j),dataType:"html",error:function(){alert(("Could not add this dataset to browser")+".")},success:function(k){var l=window.parent;l.Galaxy.modal.show({title:"View Data in a New or Saved Visualization",buttons:{Cancel:function(){l.Galaxy.modal.hide()},"View in saved visualization":function(){l.Galaxy.modal.show({title:"Add Data to Saved Visualization",body:k,buttons:{Cancel:function(){l.Galaxy.modal.hide()},"Add to visualization":function(){$(l.document).find("input[name=id]:checked").each(function(){l.Galaxy.modal.hide();var m=$(this).val();i.id=m;l.Galaxy.frame.add({title:"Trackster",type:"url",content:g+"/trackster?"+$.param(i),scratchbook:true})})}}})},"View in new visualization":function(){l.Galaxy.modal.hide();var m=g+"/trackster?"+$.param(i);l.Galaxy.frame.add({title:"Trackster",type:"url",content:m,scratchbook:true})}}})}});return false}}return{HDAEditView:f}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0