galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2015
- 2 participants
- 305 discussions
commit/galaxy-central: carlfeberhard: API, users: use _future_expose_api and minor clean up
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1d57664c6d9c/
Changeset: 1d57664c6d9c
User: carlfeberhard
Date: 2015-02-02 22:48:31+00:00
Summary: API, users: use _future_expose_api and minor clean up
Affected #: 1 file
diff -r 83414d1a3943a6902fe63509b07ed46cba874951 -r 1d57664c6d9c4875e5c818220fa8199cf3967de0 lib/galaxy/webapps/galaxy/api/users.py
--- a/lib/galaxy/webapps/galaxy/api/users.py
+++ b/lib/galaxy/webapps/galaxy/api/users.py
@@ -1,23 +1,29 @@
"""
API operations on User objects.
"""
-import logging
-from paste.httpexceptions import HTTPBadRequest, HTTPNotImplemented
-from galaxy import util, web, exceptions
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesTagsMixin
+from galaxy.web.base.controller import CreatesApiKeysMixin
+from galaxy.web.base.controller import CreatesUsersMixin
+
from galaxy.security.validate_user_input import validate_email
from galaxy.security.validate_user_input import validate_password
from galaxy.security.validate_user_input import validate_publicname
-from galaxy.web.base.controller import BaseAPIController, UsesTagsMixin
-from galaxy.web.base.controller import CreatesApiKeysMixin
-from galaxy.web.base.controller import CreatesUsersMixin
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from galaxy import web
+from galaxy import util
+from galaxy import exceptions
+
+import logging
log = logging.getLogger( __name__ )
class UserAPIController( BaseAPIController, UsesTagsMixin, CreatesUsersMixin, CreatesApiKeysMixin ):
- @web.expose_api
+ @expose_api
def index( self, trans, deleted='False', f_email=None, **kwd ):
"""
GET /api/users
@@ -46,7 +52,7 @@
rval.append( item )
return rval
- @web.expose_api_anonymous
+ @expose_api_anonymous
def show( self, trans, id, deleted='False', **kwd ):
"""
GET /api/users/{encoded_user_id}
@@ -73,30 +79,28 @@
assert trans.user == user
assert not user.deleted
except:
- if trans.user_is_admin():
- raise
- else:
- raise HTTPBadRequest( detail='Invalid user id ( %s ) specified' % id )
+ raise exceptions.RequestParameterInvalidException( 'Invalid user id specified', id=id )
+
item = user.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id,
'total_disk_usage': float } )
# add a list of tags used by the user (as strings)
item[ 'tags_used' ] = self.get_user_tags_used( trans, user=user )
# TODO: move into api_values (needs trans, tho - can we do that with api_keys/@property??)
# TODO: works with other users (from admin)??
- item['quota_percent'] = trans.app.quota_agent.get_percent( trans=trans )
- item['is_admin'] = trans.user_is_admin()
+ item[ 'quota_percent' ] = trans.app.quota_agent.get_percent( trans=trans )
+ item[ 'is_admin' ] = trans.user_is_admin()
return item
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
"""
POST /api/users
Creates a new Galaxy user.
"""
if not trans.app.config.allow_user_creation:
- raise HTTPNotImplemented( detail='User creation is not allowed in this Galaxy instance' )
+ raise exceptions.ConfigDoesNotAllowException( 'User creation is not allowed in this Galaxy instance' )
if trans.app.config.use_remote_user and trans.user_is_admin():
- user = trans.get_or_create_remote_user(remote_user_email=payload['remote_user_email'])
+ user = trans.get_or_create_remote_user( remote_user_email=payload['remote_user_email'] )
elif trans.user_is_admin():
username = payload[ 'username' ]
email = payload[ 'email' ]
@@ -109,12 +113,12 @@
else:
user = self.create_user( trans=trans, email=email, username=username, password=password )
else:
- raise HTTPNotImplemented()
+ raise exceptions.NotImplemented()
item = user.to_dict( view='element', value_mapper={ 'id': trans.security.encode_id,
'total_disk_usage': float } )
return item
- @web.expose_api
+ @expose_api
@web.require_admin
def api_key( self, trans, user_id, **kwd ):
"""
@@ -125,17 +129,17 @@
key = self.create_api_key( trans, user )
return key
- @web.expose_api
+ @expose_api
def update( self, trans, **kwd ):
- raise HTTPNotImplemented()
+ raise exceptions.NotImplemented()
- @web.expose_api
+ @expose_api
def delete( self, trans, **kwd ):
- raise HTTPNotImplemented()
+ raise exceptions.NotImplemented()
- @web.expose_api
+ @expose_api
def undelete( self, trans, **kwd ):
- raise HTTPNotImplemented()
+ raise exceptions.NotImplemented()
# TODO: move to more basal, common resource than this
def anon_user_api_value( self, trans ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/119898e03506/
Changeset: 119898e03506
User: carlfeberhard
Date: 2015-02-02 22:08:17+00:00
Summary: API, tags & annotations: use _future_expose_api and minor clean up
Affected #: 2 files
diff -r 117db343f853b815b6f584679c23d0f44277c07b -r 119898e035065e3ffd1ca9b3e84f23f380faf023 lib/galaxy/webapps/galaxy/api/annotations.py
--- a/lib/galaxy/webapps/galaxy/api/annotations.py
+++ b/lib/galaxy/webapps/galaxy/api/annotations.py
@@ -1,27 +1,31 @@
"""
API operations on annotations.
"""
-import logging
-from galaxy import web
+
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesStoredWorkflowMixin
from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.web.base.controller import BaseAPIController, HTTPNotImplemented, UsesStoredWorkflowMixin
from galaxy import managers
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.util import sanitize_html
+
+import logging
log = logging.getLogger( __name__ )
class BaseAnnotationsController( BaseAPIController, UsesStoredWorkflowMixin, UsesAnnotations ):
- @web.expose_api
+ @expose_api
def index( self, trans, **kwd ):
idnum = kwd[self.tagged_item_id]
item = self._get_item_from_id(trans, idnum)
if item is not None:
return self.get_item_annotation_str( trans.sa_session, trans.get_user(), item )
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwd ):
if "text" not in payload:
return ""
@@ -29,24 +33,24 @@
item = self._get_item_from_id(trans, idnum)
if item is not None:
new_annotation = payload.get("text")
- # Sanitize annotation before adding it.
- new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
+ #TODO: sanitize on display not entry
+ new_annotation = sanitize_html.sanitize_html( new_annotation, 'utf-8', 'text/html' )
self.add_item_annotation( trans.sa_session, trans.get_user(), item, new_annotation )
trans.sa_session.flush()
return new_annotation
return ""
- @web.expose_api
+ @expose_api
def delete( self, trans, **kwd ):
idnum = kwd[self.tagged_item_id]
item = self._get_item_from_id(trans, idnum)
if item is not None:
return self.delete_item_annotation( trans.sa_session, trans.get_user(), item )
- @web.expose_api
+ @expose_api
def undelete( self, trans, **kwd ):
- raise HTTPNotImplemented()
+ raise exceptions.NotImplemented()
class HistoryAnnotationsController(BaseAnnotationsController):
@@ -63,7 +67,7 @@
return history
-class HistoryContentAnnotationsController(BaseAnnotationsController):
+class HistoryContentAnnotationsController( BaseAnnotationsController ):
controller_name = "history_content_annotations"
tagged_item_id = "history_content_id"
@@ -78,10 +82,10 @@
return hda
-class WorkflowAnnotationsController(BaseAnnotationsController):
+class WorkflowAnnotationsController( BaseAnnotationsController ):
controller_name = "workflow_annotations"
tagged_item_id = "workflow_id"
def _get_item_from_id(self, trans, idstr):
- hda = self.get_stored_workflow(trans, idstr)
+ hda = self.get_stored_workflow( trans, idstr )
return hda
diff -r 117db343f853b815b6f584679c23d0f44277c07b -r 119898e035065e3ffd1ca9b3e84f23f380faf023 lib/galaxy/webapps/galaxy/api/item_tags.py
--- a/lib/galaxy/webapps/galaxy/api/item_tags.py
+++ b/lib/galaxy/webapps/galaxy/api/item_tags.py
@@ -1,52 +1,55 @@
"""
API operations related to tagging items.
"""
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesTagsMixin
+
+from galaxy.web import _future_expose_api as expose_api
+from galaxy import exceptions
+
import logging
-from galaxy import web
-from galaxy.web.base.controller import BaseAPIController, UsesTagsMixin
-from paste.httpexceptions import HTTPBadRequest
-
log = logging.getLogger( __name__ )
class BaseItemTagsController( BaseAPIController, UsesTagsMixin ):
"""
"""
- @web.expose_api
+ @expose_api
def index( self, trans, **kwd ):
"""
"""
- tags = self._get_user_tags(trans, self.tagged_item_class, kwd[self.tagged_item_id])
+ tags = self._get_user_tags(trans, self.tagged_item_class, kwd[ self.tagged_item_id ])
return [ self._api_value( tag, trans, view='collection' ) for tag in tags ]
- @web.expose_api
+ @expose_api
def show( self, trans, tag_name, **kwd ):
"""
"""
- tag = self._get_item_tag_assoc( trans, self.tagged_item_class, kwd[self.tagged_item_id], tag_name )
+ tag = self._get_item_tag_assoc( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name )
if not tag:
- raise HTTPBadRequest("Failed to retrieve specified tag.")
+ raise exceptions.ObjectNotFound( "Failed to retrieve specified tag." )
return self._api_value( tag, trans )
- @web.expose_api
+ @expose_api
def create( self, trans, tag_name, payload=None, **kwd ):
"""
"""
payload = payload or {}
- value = payload.get("value", None)
- tag = self._apply_item_tag( trans, self.tagged_item_class, kwd[self.tagged_item_id], tag_name, value )
+ value = payload.get( "value", None )
+ tag = self._apply_item_tag( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name, value )
return self._api_value( tag, trans )
# Not handling these differently at this time
update = create
- @web.expose_api
+ @expose_api
def delete( self, trans, tag_name, **kwd ):
"""
"""
- deleted = self._remove_items_tag( trans, self.tagged_item_class, kwd[self.tagged_item_id], tag_name )
+ deleted = self._remove_items_tag( trans, self.tagged_item_class, kwd[ self.tagged_item_id ], tag_name )
if not deleted:
- raise HTTPBadRequest("Failed to delete specified tag.")
+ raise exceptions.RequestParameterInvalidException( "Failed to delete specified tag." )
+ #TODO: ugh - 204 would be better
return 'OK'
def _api_value( self, tag, trans, view='element' ):
https://bitbucket.org/galaxy/galaxy-central/commits/83414d1a3943/
Changeset: 83414d1a3943
User: carlfeberhard
Date: 2015-02-02 22:09:12+00:00
Summary: Core, startup: add deprecation log.warn if using 'pack_scripts' option
Affected #: 1 file
diff -r 119898e035065e3ffd1ca9b3e84f23f380faf023 -r 83414d1a3943a6902fe63509b07ed46cba874951 lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -90,6 +90,7 @@
webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[ app.visualizations_registry ], **kwargs )
#webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=None, **kwargs )
if asbool(kwargs.get('pack_scripts', False)):
+ log.warn( "The 'pack_scripts' option is deprecated" )
pack_scripts()
# Close any pooled database connections before forking
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: API, visualizations: use _future_expose_api, some clean-up
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/117db343f853/
Changeset: 117db343f853
User: carlfeberhard
Date: 2015-02-02 21:38:30+00:00
Summary: API, visualizations: use _future_expose_api, some clean-up
Affected #: 1 file
diff -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd -r 117db343f853b815b6f584679c23d0f44277c07b lib/galaxy/webapps/galaxy/api/visualizations.py
--- a/lib/galaxy/webapps/galaxy/api/visualizations.py
+++ b/lib/galaxy/webapps/galaxy/api/visualizations.py
@@ -5,68 +5,52 @@
may change often.
"""
-import pkg_resources
-pkg_resources.require( 'SQLAlchemy >= 0.4' )
-from sqlalchemy import or_
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.web.base.controller import UsesVisualizationMixin
+from galaxy.web.base.controller import SharableMixin
+from galaxy.model.item_attrs import UsesAnnotations
-from galaxy import web, util
-from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin, SharableMixin
-from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.exceptions import ( ItemAccessibilityException, ItemDeletionException, ItemOwnershipException,
- MessageException )
-
-from galaxy.web import url_for
+from galaxy.web import _future_expose_api as expose_api
+from galaxy import web
+from galaxy import util
+from galaxy import exceptions
import logging
log = logging.getLogger( __name__ )
+
class VisualizationsController( BaseAPIController, UsesVisualizationMixin, SharableMixin, UsesAnnotations ):
"""
RESTful controller for interactions with visualizations.
"""
- @web.expose_api
+ @expose_api
def index( self, trans, **kwargs ):
"""
GET /api/visualizations:
"""
- #TODO: search for vizsesses that apply to an object (sending model class and id? - how to do this?)
rval = []
- try:
- if not trans.user:
- raise ItemAccessibilityException( 'You must be logged in to access visualizations' )
- user = trans.user
+ user = trans.user
- #TODO: search for: title, made by user, creation time range, type (vis name), dbkey, etc.
- #TODO: limit, offset, order_by
- #TODO: deleted
+ #TODO: search for: title, made by user, creation time range, type (vis name), dbkey, etc.
+ #TODO: limit, offset, order_by
+ #TODO: deleted
- # this is the default search - user's vis, vis shared with user, published vis
- visualizations = self.get_visualizations_by_user( trans, user )
- visualizations += self.get_visualizations_shared_with_user( trans, user )
- visualizations += self.get_published_visualizations( trans, exclude_user=user )
- #TODO: the admin case - everything
+ # this is the default search - user's vis, vis shared with user, published vis
+ visualizations = self.get_visualizations_by_user( trans, user )
+ visualizations += self.get_visualizations_shared_with_user( trans, user )
+ visualizations += self.get_published_visualizations( trans, exclude_user=user )
+ #TODO: the admin case - everything
- for visualization in visualizations:
- item = self.get_visualization_summary_dict( visualization )
- item = trans.security.encode_dict_ids( item )
- item[ 'url' ] = web.url_for( 'visualization', id=item[ 'id' ] )
- rval.append( item )
-
- except ItemAccessibilityException, exception:
- trans.response.status = 403
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( str( exception ) )
-
- except Exception, exception:
- trans.response.status = 500
- rval = { 'error': str( exception ) }
- log.exception( 'visualizations index failed: %s' %( str( exception ) ) )
+ for visualization in visualizations:
+ item = self.get_visualization_summary_dict( visualization )
+ item = trans.security.encode_dict_ids( item )
+ item[ 'url' ] = web.url_for( 'visualization', id=item[ 'id' ] )
+ rval.append( item )
return rval
- @web.json
+ @expose_api
def show( self, trans, id, **kwargs ):
"""
GET /api/visualizations/{viz_id}
@@ -75,37 +59,25 @@
# the important thing is the config
rval = {}
#TODO:?? /api/visualizations/registry -> json of registry.listings?
- try:
- visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
- dictionary = trans.security.encode_dict_ids( self.get_visualization_dict( visualization ) )
- dictionary[ 'url' ] = url_for( controller='visualization',
- action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug )
- dictionary[ 'annotation' ] = self.get_item_annotation_str( trans.sa_session, trans.user, visualization )
- # need to encode ids in revisions as well
- encoded_revisions = []
- for revision in dictionary[ 'revisions' ]:
- #NOTE: does not encode ids inside the configs
- encoded_revisions.append( trans.security.encode_id( revision ) )
- dictionary[ 'revisions' ] = encoded_revisions
- dictionary[ 'latest_revision' ] = trans.security.encode_dict_ids( dictionary[ 'latest_revision' ] )
+ visualization = self.get_visualization( trans, id, check_ownership=False, check_accessible=True )
+ dictionary = trans.security.encode_dict_ids( self.get_visualization_dict( visualization ) )
+ dictionary[ 'url' ] = web.url_for( controller='visualization',
+ action="display_by_username_and_slug", username=visualization.user.username, slug=visualization.slug )
+ dictionary[ 'annotation' ] = self.get_item_annotation_str( trans.sa_session, trans.user, visualization )
- rval = dictionary
+ # need to encode ids in revisions as well
+ encoded_revisions = []
+ for revision in dictionary[ 'revisions' ]:
+ #NOTE: does not encode ids inside the configs
+ encoded_revisions.append( trans.security.encode_id( revision ) )
+ dictionary[ 'revisions' ] = encoded_revisions
+ dictionary[ 'latest_revision' ] = trans.security.encode_dict_ids( dictionary[ 'latest_revision' ] )
- except ( ItemAccessibilityException, ItemDeletionException ), exception:
- trans.response.status = 403
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( 'visualization show forbidden (%s): %s' %( id, str( exception ) ) )
-
- except Exception, exception:
- trans.response.status = 500
- rval = { 'error': str( exception ) }
- log.exception( 'visualization show failed (%s): %s' %( id, str( exception ) ) )
-
+ rval = dictionary
return rval
- @web.expose_api
+ @expose_api
def create( self, trans, payload, **kwargs ):
"""
POST /api/visualizations
@@ -115,93 +87,61 @@
imports a copy of an existing visualization into the user's workspace
"""
rval = None
- try:
- if 'import_id' in payload:
- import_id = payload( 'import_id' )
- visualization = self.import_visualization( trans, import_id, user=trans.user )
- else:
- payload = self._validate_and_parse_payload( payload )
- # must have a type (I've taken this to be the visualization name)
- if 'type' not in payload:
- raise ValueError( "key/value 'type' is required" )
- vis_type = payload.pop( 'type', False )
+ if 'import_id' in payload:
+ import_id = payload( 'import_id' )
+ visualization = self.import_visualization( trans, import_id, user=trans.user )
- payload[ 'save' ] = True
+ else:
+ payload = self._validate_and_parse_payload( payload )
+ # must have a type (I've taken this to be the visualization name)
+ if 'type' not in payload:
+ raise exceptions.RequestParameterMissingException( "key/value 'type' is required" )
+ vis_type = payload.pop( 'type', False )
+
+ payload[ 'save' ] = True
+ try:
# generate defaults - this will err if given a weird key?
visualization = self.create_visualization( trans, vis_type, **payload )
+ except ValueError, val_err:
+ raise exceptions.RequestParameterMissingException( str( val_err ) )
- rval = { 'id' : trans.security.encode_id( visualization.id ) }
-
- #TODO: exception boilerplate
- except ( ItemAccessibilityException, ItemDeletionException ), exception:
- trans.response.status = 403
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( str( exception ) )
-
- except ( ValueError, AttributeError ), exception:
- trans.response.status = 400
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( str( exception ) )
-
- except Exception, exception:
- trans.response.status = 500
- rval = { 'error': str( exception ) }
- log.exception( 'creation of visualization failed: %s' %( str( exception ) ) )
+ rval = { 'id' : trans.security.encode_id( visualization.id ) }
return rval
- @web.expose_api
+ @expose_api
def update( self, trans, id, payload, **kwargs ):
"""
PUT /api/visualizations/{encoded_visualization_id}
"""
rval = None
- try:
- payload = self._validate_and_parse_payload( payload )
- # there's a differentiation here between updating the visualiztion and creating a new revision
- # that needs to be handled clearly here
- # or alternately, using a different controller like PUT /api/visualizations/{id}/r/{id}
+ payload = self._validate_and_parse_payload( payload )
- #TODO: consider allowing direct alteration of revisions title (without a new revision)
- # only create a new revsion on a different config
+ # there's a differentiation here between updating the visualiztion and creating a new revision
+ # that needs to be handled clearly here
+ # or alternately, using a different controller like PUT /api/visualizations/{id}/r/{id}
- # only update owned visualizations
- visualization = self.get_visualization( trans, id, check_ownership=True )
- title = payload.get( 'title', visualization.latest_revision.title )
- dbkey = payload.get( 'dbkey', visualization.latest_revision.dbkey )
- config = payload.get( 'config', visualization.latest_revision.config )
+ #TODO: consider allowing direct alteration of revisions title (without a new revision)
+ # only create a new revsion on a different config
- latest_config = visualization.latest_revision.config
- if( ( title != visualization.latest_revision.title )
- or ( dbkey != visualization.latest_revision.dbkey )
- or ( util.json.dumps( config ) != util.json.dumps( latest_config ) ) ):
- revision = self.add_visualization_revision( trans, visualization, config, title, dbkey )
- rval = { 'id' : id, 'revision' : revision.id }
+ # only update owned visualizations
+ visualization = self.get_visualization( trans, id, check_ownership=True )
+ title = payload.get( 'title', visualization.latest_revision.title )
+ dbkey = payload.get( 'dbkey', visualization.latest_revision.dbkey )
+ config = payload.get( 'config', visualization.latest_revision.config )
- # allow updating vis title
- visualization.title = title
- trans.sa_session.flush()
+ latest_config = visualization.latest_revision.config
+ if( ( title != visualization.latest_revision.title )
+ or ( dbkey != visualization.latest_revision.dbkey )
+ or ( util.json.dumps( config ) != util.json.dumps( latest_config ) ) ):
+ revision = self.add_visualization_revision( trans, visualization, config, title, dbkey )
+ rval = { 'id' : id, 'revision' : revision.id }
- except ( ItemAccessibilityException, ItemDeletionException ), exception:
- trans.response.status = 403
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( str( exception ) )
-
- except ( ValueError, AttributeError ), exception:
- trans.response.status = 400
- rval = { 'error': str( exception ) }
- if trans.debug:
- log.exception( str( exception ) )
-
- except Exception, exception:
- trans.response.status = 500
- rval = { 'error': str( exception ) }
- log.exception( 'update of visualization (%s) failed: %s' %( id, str( exception ) ) )
+ # allow updating vis title
+ visualization.title = title
+ trans.sa_session.flush()
return rval
@@ -224,36 +164,37 @@
)
#TODO: deleted
#TODO: importable
+ ValidationError = exceptions.RequestParameterInvalidException
validated_payload = {}
for key, val in payload.items():
#TODO: validate types in VALID_TYPES/registry names at the mixin/model level?
if key == 'type':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
val = util.sanitize_html.sanitize_html( val, 'utf-8' )
elif key == 'config':
if not isinstance( val, dict ):
- raise ValueError( '%s must be a dictionary: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a dictionary: %s' %( key, str( type( val ) ) ) )
elif key == 'annotation':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
val = util.sanitize_html.sanitize_html( val, 'utf-8' )
# these are keys that actually only be *updated* at the revision level and not here
# (they are still valid for create, tho)
elif key == 'title':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
val = util.sanitize_html.sanitize_html( val, 'utf-8' )
elif key == 'slug':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( '%s must be a string: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a string: %s' %( key, str( type( val ) ) ) )
val = util.sanitize_html.sanitize_html( val, 'utf-8' )
elif key == 'dbkey':
if not ( isinstance( val, str ) or isinstance( val, unicode ) ):
- raise ValueError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
+ raise ValidationError( '%s must be a string or unicode: %s' %( key, str( type( val ) ) ) )
val = util.sanitize_html.sanitize_html( val, 'utf-8' )
elif key not in valid_but_uneditable_keys:
@@ -262,11 +203,3 @@
validated_payload[ key ] = val
return validated_payload
-
- #(a)web.expose_api
- #def delete( self, trans, id, **kwd ):
- # """
- # DELETE /api/visualizations/{encoded_history_id}
- # Deletes a visualization from the database
- # """
- # pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Merged in dan/galaxy-central-prs/stable (pull request #657)
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5e4060f5ac7a/
Changeset: 5e4060f5ac7a
Branch: stable
User: jmchilton
Date: 2015-02-02 21:30:49+00:00
Summary: Merged in dan/galaxy-central-prs/stable (pull request #657)
[STABLE] Fix for preventing non-admins from running data managers via the api.
Affected #: 4 files
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1388,6 +1388,12 @@
return section_id, section_name
return None, None
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ """
+ return True
+
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -3392,6 +3398,7 @@
self.data_manager_id = self.id
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
+ assert self.allow_user_access( job.user ), "You must be an admin to access this tool."
#run original exec_after_process
super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
#process results of tool
@@ -3415,6 +3422,7 @@
return history
user = trans.user
assert user, 'You must be logged in to use this tool.'
+ assert self.allow_user_access( user ), "You must be an admin to access this tool."
history = user.data_manager_histories
if not history:
#create
@@ -3434,6 +3442,17 @@
history = None
return history
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ Data Manager tools are only accessible to admins.
+ """
+ if super( DataManagerTool, self ).allow_user_access( user ) and self.app.config.is_admin_user( user ):
+ return True
+ if user:
+ user = user.id
+ log.debug( "User (%s) attempted to access a data manager tool (%s), but is not an admin.", user, self.id )
+ return False
# Populate tool_type to ToolClass mappings
tool_types = {}
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -155,6 +155,7 @@
submitting the job to the job queue. If history is not specified, use
trans.history as destination for tool's output datasets.
"""
+ assert tool.allow_user_access( trans.user ), "User (%s) is not allowed to access this tool." % ( trans.user )
# Set history.
if not history:
history = tool.get_default_history_by_trans( trans, create=True )
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -63,7 +63,7 @@
"""
io_details = util.string_as_bool( kwd.get( 'io_details', False ) )
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
return tool.to_dict( trans, io_details=io_details, link_details=link_details )
@_future_expose_api_anonymous
@@ -89,7 +89,7 @@
@_future_expose_api_anonymous
def citations( self, trans, id, **kwds ):
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
rval = []
for citation in tool.citations:
rval.append( citation.to_dict( 'bibtex' ) )
@@ -122,7 +122,7 @@
# Get tool.
tool = trans.app.toolbox.get_tool( payload[ 'tool_id' ] ) if 'tool_id' in payload else None
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
trans.response.status = 404
return { "message": { "type": "error", "text" : trans.app.model.Dataset.conversion_messages.NO_TOOL } }
@@ -212,10 +212,10 @@
#
# -- Helper methods --
#
- def _get_tool( self, id ):
+ def _get_tool( self, id, user=None ):
id = urllib.unquote_plus( id )
tool = self.app.toolbox.get_tool( id )
- if not tool:
+ if not tool or not tool.allow_user_access( user ):
raise exceptions.ObjectNotFound("Could not find tool with id '%s'" % id)
return tool
@@ -281,7 +281,7 @@
#
original_job = self.get_hda_job( original_dataset )
tool = trans.app.toolbox.get_tool( original_job.tool_id )
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
return trans.app.model.Dataset.conversion_messages.NO_TOOL
tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -65,7 +65,7 @@
get_loaded_tools_by_lineage=False,
set_selected=refreshed_on_change )
# No tool matching the tool id, display an error (shouldn't happen)
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
trans.response.status = 404
@@ -191,6 +191,8 @@
# This is expected so not an exception.
tool_id_version_message = ''
error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
+ if not tool.allow_user_access( trans.user ):
+ error( "The requested tool is unknown." )
# Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
if not tool.is_workflow_compatible:
error( "The '%s' tool does not currently support rerunning." % tool.name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0b9127ab708d/
Changeset: 0b9127ab708d
Branch: stable
User: dan
Date: 2015-02-02 21:21:00+00:00
Summary: Fix for preventing non-admins from running data managers via the api.
Affected #: 4 files
diff -r 1b96d3a4ff28da2bd547b65ad86c5ad5a9bbfb90 -r 0b9127ab708d7d8497e80169a3b8636aea1e37c6 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1388,6 +1388,12 @@
return section_id, section_name
return None, None
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ """
+ return True
+
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -3392,6 +3398,7 @@
self.data_manager_id = self.id
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
+ assert self.allow_user_access( job.user ), "You must be an admin to access this tool."
#run original exec_after_process
super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
#process results of tool
@@ -3415,6 +3422,7 @@
return history
user = trans.user
assert user, 'You must be logged in to use this tool.'
+ assert self.allow_user_access( user ), "You must be an admin to access this tool."
history = user.data_manager_histories
if not history:
#create
@@ -3434,6 +3442,17 @@
history = None
return history
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ Data Manager tools are only accessible to admins.
+ """
+ if super( DataManagerTool, self ).allow_user_access( user ) and self.app.config.is_admin_user( user ):
+ return True
+ if user:
+ user = user.id
+ log.debug( "User (%s) attempted to access a data manager tool (%s), but is not an admin.", user, self.id )
+ return False
# Populate tool_type to ToolClass mappings
tool_types = {}
diff -r 1b96d3a4ff28da2bd547b65ad86c5ad5a9bbfb90 -r 0b9127ab708d7d8497e80169a3b8636aea1e37c6 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -155,6 +155,7 @@
submitting the job to the job queue. If history is not specified, use
trans.history as destination for tool's output datasets.
"""
+ assert tool.allow_user_access( trans.user ), "User (%s) is not allowed to access this tool." % ( trans.user )
# Set history.
if not history:
history = tool.get_default_history_by_trans( trans, create=True )
diff -r 1b96d3a4ff28da2bd547b65ad86c5ad5a9bbfb90 -r 0b9127ab708d7d8497e80169a3b8636aea1e37c6 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -63,7 +63,7 @@
"""
io_details = util.string_as_bool( kwd.get( 'io_details', False ) )
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
return tool.to_dict( trans, io_details=io_details, link_details=link_details )
@_future_expose_api_anonymous
@@ -89,7 +89,7 @@
@_future_expose_api_anonymous
def citations( self, trans, id, **kwds ):
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
rval = []
for citation in tool.citations:
rval.append( citation.to_dict( 'bibtex' ) )
@@ -122,7 +122,7 @@
# Get tool.
tool = trans.app.toolbox.get_tool( payload[ 'tool_id' ] ) if 'tool_id' in payload else None
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
trans.response.status = 404
return { "message": { "type": "error", "text" : trans.app.model.Dataset.conversion_messages.NO_TOOL } }
@@ -212,10 +212,10 @@
#
# -- Helper methods --
#
- def _get_tool( self, id ):
+ def _get_tool( self, id, user=None ):
id = urllib.unquote_plus( id )
tool = self.app.toolbox.get_tool( id )
- if not tool:
+ if not tool or not tool.allow_user_access( user ):
raise exceptions.ObjectNotFound("Could not find tool with id '%s'" % id)
return tool
@@ -281,7 +281,7 @@
#
original_job = self.get_hda_job( original_dataset )
tool = trans.app.toolbox.get_tool( original_job.tool_id )
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
return trans.app.model.Dataset.conversion_messages.NO_TOOL
tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
diff -r 1b96d3a4ff28da2bd547b65ad86c5ad5a9bbfb90 -r 0b9127ab708d7d8497e80169a3b8636aea1e37c6 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -65,7 +65,7 @@
get_loaded_tools_by_lineage=False,
set_selected=refreshed_on_change )
# No tool matching the tool id, display an error (shouldn't happen)
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
trans.response.status = 404
@@ -191,6 +191,8 @@
# This is expected so not an exception.
tool_id_version_message = ''
error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
+ if not tool.allow_user_access( trans.user ):
+ error( "The requested tool is unknown." )
# Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
if not tool.is_workflow_compatible:
error( "The '%s' tool does not currently support rerunning." % tool.name )
https://bitbucket.org/galaxy/galaxy-central/commits/5e4060f5ac7a/
Changeset: 5e4060f5ac7a
Branch: stable
User: jmchilton
Date: 2015-02-02 21:30:49+00:00
Summary: Merged in dan/galaxy-central-prs/stable (pull request #657)
[STABLE] Fix for preventing non-admins from running data managers via the api.
Affected #: 4 files
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1388,6 +1388,12 @@
return section_id, section_name
return None, None
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ """
+ return True
+
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -3392,6 +3398,7 @@
self.data_manager_id = self.id
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
+ assert self.allow_user_access( job.user ), "You must be an admin to access this tool."
#run original exec_after_process
super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
#process results of tool
@@ -3415,6 +3422,7 @@
return history
user = trans.user
assert user, 'You must be logged in to use this tool.'
+ assert self.allow_user_access( user ), "You must be an admin to access this tool."
history = user.data_manager_histories
if not history:
#create
@@ -3434,6 +3442,17 @@
history = None
return history
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ Data Manager tools are only accessible to admins.
+ """
+ if super( DataManagerTool, self ).allow_user_access( user ) and self.app.config.is_admin_user( user ):
+ return True
+ if user:
+ user = user.id
+ log.debug( "User (%s) attempted to access a data manager tool (%s), but is not an admin.", user, self.id )
+ return False
# Populate tool_type to ToolClass mappings
tool_types = {}
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -155,6 +155,7 @@
submitting the job to the job queue. If history is not specified, use
trans.history as destination for tool's output datasets.
"""
+ assert tool.allow_user_access( trans.user ), "User (%s) is not allowed to access this tool." % ( trans.user )
# Set history.
if not history:
history = tool.get_default_history_by_trans( trans, create=True )
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -63,7 +63,7 @@
"""
io_details = util.string_as_bool( kwd.get( 'io_details', False ) )
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
return tool.to_dict( trans, io_details=io_details, link_details=link_details )
@_future_expose_api_anonymous
@@ -89,7 +89,7 @@
@_future_expose_api_anonymous
def citations( self, trans, id, **kwds ):
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
rval = []
for citation in tool.citations:
rval.append( citation.to_dict( 'bibtex' ) )
@@ -122,7 +122,7 @@
# Get tool.
tool = trans.app.toolbox.get_tool( payload[ 'tool_id' ] ) if 'tool_id' in payload else None
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
trans.response.status = 404
return { "message": { "type": "error", "text" : trans.app.model.Dataset.conversion_messages.NO_TOOL } }
@@ -212,10 +212,10 @@
#
# -- Helper methods --
#
- def _get_tool( self, id ):
+ def _get_tool( self, id, user=None ):
id = urllib.unquote_plus( id )
tool = self.app.toolbox.get_tool( id )
- if not tool:
+ if not tool or not tool.allow_user_access( user ):
raise exceptions.ObjectNotFound("Could not find tool with id '%s'" % id)
return tool
@@ -281,7 +281,7 @@
#
original_job = self.get_hda_job( original_dataset )
tool = trans.app.toolbox.get_tool( original_job.tool_id )
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
return trans.app.model.Dataset.conversion_messages.NO_TOOL
tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
diff -r 96aabc4e66f193d4bb3b7b37d8ad4dce671f1b04 -r 5e4060f5ac7a3c11d4681bc80f94c620df9d0479 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -65,7 +65,7 @@
get_loaded_tools_by_lineage=False,
set_selected=refreshed_on_change )
# No tool matching the tool id, display an error (shouldn't happen)
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
trans.response.status = 404
@@ -191,6 +191,8 @@
# This is expected so not an exception.
tool_id_version_message = ''
error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
+ if not tool.allow_user_access( trans.user ):
+ error( "The requested tool is unknown." )
# Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
if not tool.is_workflow_compatible:
error( "The '%s' tool does not currently support rerunning." % tool.name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Expose improved sample tracking to tools for implicit map/reduce ops.
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b9ec027d4b1e/
Changeset: b9ec027d4b1e
User: jmchilton
Date: 2015-02-02 21:29:42+00:00
Summary: Expose improved sample tracking to tools for implicit map/reduce ops.
Tools may now use $input.element_identifier during tool evalution for input 'data' parameters with the following semantics:
- If the input was specified as a single dataset by the user - this just fallbacks to providing the $input.name.
- If the input was mapped over a collection (to produce many jobs) or if the input is a 'multiple="true"' input that was provided a collection - the $input.element_identifier will be the element identifier for the corresponding collection item (generally much more useful the dataset name - since if preserved throughout workflows).
'data_collection' parameters already can access this kind of information - but it is something of a best practice to use simple 'data' parameters since they are compatible with more traditional un-collected datasets.
This commit really needs more comments - but Philip Mabon has been patiently waiting for this functionality for a long time.
Affected #: 9 files
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2685,6 +2685,16 @@
return instances
@property
+ def dataset_elements( self ):
+ elements = []
+ for element in self.elements:
+ if element.is_collection:
+ elements.extend( element.child_collection.dataset_elements )
+ else:
+ elements.append( element )
+ return elements
+
+ @property
def state( self ):
# TODO: DatasetCollection state handling...
return 'ok'
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -191,6 +191,10 @@
if data.dbkey not in [None, '?']:
input_dbkey = data.dbkey
+ identifier = getattr( data, "element_identifier", None )
+ if identifier is not None:
+ incoming[ "%s|__identifier__" % name ] = identifier
+
# Collect chromInfo dataset and add as parameters to incoming
( chrom_info, db_dataset ) = trans.app.genome_builds.get_chrom_info( input_dbkey, trans=trans, custom_build_hack_get_len_from_fasta_conversion=tool.id != 'CONVERTER_fasta_to_len' )
if db_dataset:
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd lib/galaxy/tools/evaluation.py
--- a/lib/galaxy/tools/evaluation.py
+++ b/lib/galaxy/tools/evaluation.py
@@ -154,7 +154,7 @@
if isinstance( input, DataToolParameter ) and input.multiple:
dataset_instances = input_values[ input.name ]
if isinstance( dataset_instances, model.HistoryDatasetCollectionAssociation ):
- dataset_instances = dataset_instances.collection.dataset_instances[:]
+ dataset_instances = dataset_instances.collection.dataset_elements[:]
input_values[ input.name ] = \
DatasetListWrapper( dataset_instances,
dataset_paths=input_dataset_paths,
@@ -199,6 +199,9 @@
tool=self,
name=input.name
)
+ identifier_key = "%s|__identifier__" % input.name
+ if identifier_key in param_dict:
+ wrapper_kwds["identifier"] = param_dict[identifier_key]
if dataset:
#A None dataset does not have a filename
real_path = dataset.file_name
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd lib/galaxy/tools/parameters/meta.py
--- a/lib/galaxy/tools/parameters/meta.py
+++ b/lib/galaxy/tools/parameters/meta.py
@@ -14,6 +14,13 @@
execution).
"""
+ to_remove = []
+ for key in incoming.keys():
+ if key.endswith("|__identifier__"):
+ to_remove.append(key)
+ for key in to_remove:
+ incoming.pop(key)
+
def classify_unmodified_parameter( input_key ):
value = incoming[ input_key ]
if isinstance( value, dict ) and 'values' in value:
@@ -118,7 +125,11 @@
subcollection_elements = subcollections.split_dataset_collection_instance( hdc, subcollection_type )
return subcollection_elements
else:
- hdas = hdc.collection.dataset_instances
+ hdas = []
+ for element in hdc.collection.dataset_elements:
+ hda = element.dataset_instance
+ hda.element_identifier = element.element_identifier
+ hdas.append( hda )
return hdas
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd lib/galaxy/tools/wrappers.py
--- a/lib/galaxy/tools/wrappers.py
+++ b/lib/galaxy/tools/wrappers.py
@@ -187,7 +187,7 @@
def items( self ):
return iter( [ ( k, self.get( k ) ) for k, v in self.metadata.items() ] )
- def __init__( self, dataset, datatypes_registry=None, tool=None, name=None, dataset_path=None ):
+ def __init__( self, dataset, datatypes_registry=None, tool=None, name=None, dataset_path=None, identifier=None ):
if not dataset:
try:
# TODO: allow this to work when working with grouping
@@ -205,6 +205,14 @@
self.datatypes_registry = datatypes_registry
self.false_path = getattr( dataset_path, "false_path", None )
self.false_extra_files_path = getattr( dataset_path, "false_extra_files_path", None )
+ self._element_identifier = identifier
+
+ @property
+ def element_identifier( self ):
+ identifier = self._element_identifier
+ if identifier is None:
+ identifier = self.name
+ return identifier
@property
def is_collection( self ):
@@ -270,6 +278,10 @@
datasets = [datasets]
def to_wrapper( dataset ):
+ if hasattr(dataset, "element_identifier"):
+ element = dataset
+ dataset = element.dataset_instance
+ kwargs["identifier"] = element.element_identifier
return self._dataset_wrapper( dataset, dataset_paths, **kwargs )
list.__init__( self, map( to_wrapper, datasets ) )
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd test/api/test_tools.py
--- a/test/api/test_tools.py
+++ b/test/api/test_tools.py
@@ -553,6 +553,93 @@
self.assertEquals( output1_content.strip(), "123" )
self.assertEquals( output2_content.strip(), "456" )
+ @skip_without_tool( "identifier_single" )
+ def test_identifier_in_map( self ):
+ history_id = self.dataset_populator.new_history()
+ hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+ inputs = {
+ "input1": { 'batch': True, 'values': [ { 'src': 'hdca', 'id': hdca_id } ] },
+ }
+ create_response = self._run( "identifier_single", history_id, inputs )
+ self._assert_status_code_is( create_response, 200 )
+ create = create_response.json()
+ outputs = create[ 'outputs' ]
+ jobs = create[ 'jobs' ]
+ implicit_collections = create[ 'implicit_collections' ]
+ self.assertEquals( len( jobs ), 2 )
+ self.assertEquals( len( outputs ), 2 )
+ self.assertEquals( len( implicit_collections ), 1 )
+ output1 = outputs[ 0 ]
+ output2 = outputs[ 1 ]
+ output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+ output2_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output2 )
+ self.assertEquals( output1_content.strip(), "forward" )
+ self.assertEquals( output2_content.strip(), "reverse" )
+
+ @skip_without_tool( "identifier_single" )
+ def test_identifier_outside_map( self ):
+ history_id = self.dataset_populator.new_history()
+ new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+ inputs = {
+ "input1": { 'src': 'hda', 'id': new_dataset1["id"] },
+ }
+ create_response = self._run( "identifier_single", history_id, inputs )
+ self._assert_status_code_is( create_response, 200 )
+ create = create_response.json()
+ outputs = create[ 'outputs' ]
+ jobs = create[ 'jobs' ]
+ implicit_collections = create[ 'implicit_collections' ]
+ self.assertEquals( len( jobs ), 1 )
+ self.assertEquals( len( outputs ), 1 )
+ self.assertEquals( len( implicit_collections ), 0 )
+ output1 = outputs[ 0 ]
+ output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+ self.assertEquals( output1_content.strip(), "Pasted Entry" )
+
+ @skip_without_tool( "identifier_multiple" )
+ def test_identifier_in_multiple_reduce( self ):
+ history_id = self.dataset_populator.new_history()
+ hdca_id = self.__build_pair( history_id, [ "123", "456" ] )
+ inputs = {
+ "input1": { 'src': 'hdca', 'id': hdca_id },
+ }
+ create_response = self._run( "identifier_multiple", history_id, inputs )
+ self._assert_status_code_is( create_response, 200 )
+ create = create_response.json()
+ outputs = create[ 'outputs' ]
+ jobs = create[ 'jobs' ]
+ implicit_collections = create[ 'implicit_collections' ]
+ self.assertEquals( len( jobs ), 1 )
+ self.assertEquals( len( outputs ), 1 )
+ self.assertEquals( len( implicit_collections ), 0 )
+ output1 = outputs[ 0 ]
+ output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+ self.assertEquals( output1_content.strip(), "forward\nreverse" )
+
+ @skip_without_tool( "identifier_multiple" )
+ def test_identifier_with_multiple_normal_datasets( self ):
+ history_id = self.dataset_populator.new_history()
+ new_dataset1 = self.dataset_populator.new_dataset( history_id, content='123' )
+ new_dataset2 = self.dataset_populator.new_dataset( history_id, content='456' )
+ inputs = {
+ "input1": [
+ { 'src': 'hda', 'id': new_dataset1["id"] },
+ { 'src': 'hda', 'id': new_dataset2["id"] }
+ ]
+ }
+ create_response = self._run( "identifier_multiple", history_id, inputs )
+ self._assert_status_code_is( create_response, 200 )
+ create = create_response.json()
+ outputs = create[ 'outputs' ]
+ jobs = create[ 'jobs' ]
+ implicit_collections = create[ 'implicit_collections' ]
+ self.assertEquals( len( jobs ), 1 )
+ self.assertEquals( len( outputs ), 1 )
+ self.assertEquals( len( implicit_collections ), 0 )
+ output1 = outputs[ 0 ]
+ output1_content = self.dataset_populator.get_history_dataset_content( history_id, dataset=output1 )
+ self.assertEquals( output1_content.strip(), "Pasted Entry\nPasted Entry" )
+
@skip_without_tool( "cat1" )
def test_map_over_nested_collections_legacy( self ):
history_id = self.dataset_populator.new_history()
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd test/functional/tools/identifier_multiple.xml
--- /dev/null
+++ b/test/functional/tools/identifier_multiple.xml
@@ -0,0 +1,15 @@
+<tool id="identifier_multiple" name="identifier_multiple">
+ <command>
+ #for $input in $input1#
+ echo '$input.element_identifier' >> 'output1';
+ #end for#
+ </command>
+ <inputs>
+ <param type="data" name="input1" label="Input 1" multiple="true" />
+ </inputs>
+ <outputs>
+ <data name="output1" type="tabular" from_work_dir="output1" />
+ </outputs>
+ <tests>
+ </tests>
+</tool>
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd test/functional/tools/identifier_single.xml
--- /dev/null
+++ b/test/functional/tools/identifier_single.xml
@@ -0,0 +1,13 @@
+<tool id="identifier_single" name="identifier_single">
+ <command>
+ echo '$input1.element_identifier' > 'output1'
+ </command>
+ <inputs>
+ <param type="data" name="input1" label="Input 1" />
+ </inputs>
+ <outputs>
+ <data name="output1" type="tabular" from_work_dir="output1" />
+ </outputs>
+ <tests>
+ </tests>
+</tool>
diff -r ff3c5721f74857cf689966abfdeb0dc9390bf605 -r b9ec027d4b1e1b67f26632a55076dbf53e3225bd test/functional/tools/samples_tool_conf.xml
--- a/test/functional/tools/samples_tool_conf.xml
+++ b/test/functional/tools/samples_tool_conf.xml
@@ -30,6 +30,8 @@
<tool file="validation_default.xml" /><tool file="validation_sanitizer.xml" /><tool file="validation_repeat.xml" />
+ <tool file="identifier_single.xml" />
+ <tool file="identifier_multiple.xml" /><tool file="collection_paired_test.xml" /><tool file="collection_nested_test.xml" /><tool file="collection_mixed_param.xml" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for preventing non-admins from running data managers via the api.
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ff3c5721f748/
Changeset: ff3c5721f748
User: dan
Date: 2015-02-02 20:57:18+00:00
Summary: Fix for preventing non-admins from running data managers via the api.
Affected #: 4 files
diff -r 8e5b9caba8ff493c25684ebdb6f69b819c34dca6 -r ff3c5721f74857cf689966abfdeb0dc9390bf605 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -550,6 +550,12 @@
def get_panel_section( self ):
return self.app.toolbox.get_integrated_section_for_tool( self )
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ """
+ return True
+
def parse( self, tool_source, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
@@ -2799,6 +2805,7 @@
self.data_manager_id = self.id
def exec_after_process( self, app, inp_data, out_data, param_dict, job=None, **kwds ):
+ assert self.allow_user_access( job.user ), "You must be an admin to access this tool."
#run original exec_after_process
super( DataManagerTool, self ).exec_after_process( app, inp_data, out_data, param_dict, job=job, **kwds )
#process results of tool
@@ -2822,6 +2829,7 @@
return history
user = trans.user
assert user, 'You must be logged in to use this tool.'
+ assert self.allow_user_access( user ), "You must be an admin to access this tool."
history = user.data_manager_histories
if not history:
#create
@@ -2841,6 +2849,17 @@
history = None
return history
+ def allow_user_access( self, user ):
+ """
+ :returns: bool -- Whether the user is allowed to access the tool.
+ Data Manager tools are only accessible to admins.
+ """
+ if super( DataManagerTool, self ).allow_user_access( user ) and self.app.config.is_admin_user( user ):
+ return True
+ if user:
+ user = user.id
+ log.debug( "User (%s) attempted to access a data manager tool (%s), but is not an admin.", user, self.id )
+ return False
# Populate tool_type to ToolClass mappings
tool_types = {}
diff -r 8e5b9caba8ff493c25684ebdb6f69b819c34dca6 -r ff3c5721f74857cf689966abfdeb0dc9390bf605 lib/galaxy/tools/actions/__init__.py
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -155,6 +155,7 @@
submitting the job to the job queue. If history is not specified, use
trans.history as destination for tool's output datasets.
"""
+ assert tool.allow_user_access( trans.user ), "User (%s) is not allowed to access this tool." % ( trans.user )
# Set history.
if not history:
history = tool.get_default_history_by_trans( trans, create=True )
diff -r 8e5b9caba8ff493c25684ebdb6f69b819c34dca6 -r ff3c5721f74857cf689966abfdeb0dc9390bf605 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -67,7 +67,7 @@
"""
io_details = util.string_as_bool( kwd.get( 'io_details', False ) )
link_details = util.string_as_bool( kwd.get( 'link_details', False ) )
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
return tool.to_dict( trans, io_details=io_details, link_details=link_details )
@_future_expose_api_anonymous
@@ -98,7 +98,7 @@
@_future_expose_api_anonymous
def citations( self, trans, id, **kwds ):
- tool = self._get_tool( id )
+ tool = self._get_tool( id, user=trans.user )
rval = []
for citation in tool.citations:
rval.append( citation.to_dict( 'bibtex' ) )
@@ -132,7 +132,7 @@
# Get tool.
tool_version = payload.get( 'tool_version', None )
tool = trans.app.toolbox.get_tool( payload[ 'tool_id' ] , tool_version ) if 'tool_id' in payload else None
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
trans.response.status = 404
return { "message": { "type": "error", "text" : trans.app.model.Dataset.conversion_messages.NO_TOOL } }
@@ -230,10 +230,10 @@
#
# -- Helper methods --
#
- def _get_tool( self, id, tool_version=None ):
+ def _get_tool( self, id, tool_version=None, user=None ):
id = urllib.unquote_plus( id )
tool = self.app.toolbox.get_tool( id, tool_version )
- if not tool:
+ if not tool or not tool.allow_user_access( user ):
raise exceptions.ObjectNotFound("Could not find tool with id '%s'" % id)
return tool
@@ -300,7 +300,7 @@
#
original_job = self.hda_manager.creating_job( original_dataset )
tool = trans.app.toolbox.get_tool( original_job.tool_id )
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
return trans.app.model.Dataset.conversion_messages.NO_TOOL
tool_params = dict( [ ( p.name, p.value ) for p in original_job.parameters ] )
diff -r 8e5b9caba8ff493c25684ebdb6f69b819c34dca6 -r ff3c5721f74857cf689966abfdeb0dc9390bf605 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -65,7 +65,7 @@
get_loaded_tools_by_lineage=False,
set_selected=refreshed_on_change )
# No tool matching the tool id, display an error (shouldn't happen)
- if not tool:
+ if not tool or not tool.allow_user_access( trans.user ):
log.error( "index called with tool id '%s' but no such tool exists", tool_id )
trans.log_event( "Tool id '%s' does not exist" % tool_id )
trans.response.status = 404
@@ -191,6 +191,8 @@
# This is expected so not an exception.
tool_id_version_message = ''
error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
+ if not tool.allow_user_access( trans.user ):
+ error( "The requested tool is unknown." )
# Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now
if not tool.is_workflow_compatible:
error( "The '%s' tool does not currently support rerunning." % tool.name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: Core: error with 403 if user is not logged in *and* there is no session (gen. api call with no api key)
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8e5b9caba8ff/
Changeset: 8e5b9caba8ff
User: carlfeberhard
Date: 2015-02-02 20:37:34+00:00
Summary: Core: error with 403 if user is not logged in *and* there is no session (gen. api call with no api key)
Affected #: 1 file
diff -r e56a78608c79097a6227f8c06cba8a45580a2a7d -r 8e5b9caba8ff493c25684ebdb6f69b819c34dca6 lib/galaxy/web/framework/decorators.py
--- a/lib/galaxy/web/framework/decorators.py
+++ b/lib/galaxy/web/framework/decorators.py
@@ -206,7 +206,10 @@
# TODO: Document this branch, when can this happen,
# I don't understand it.
return __api_error_response( trans, err_msg=trans.error_message )
- if user_required and trans.anonymous:
+ # error if user required and anon
+ # error if anon and no session
+ if ( ( trans.anonymous and user_required )
+ or ( trans.anonymous and not trans.galaxy_session ) ):
error_code = error_codes.USER_NO_API_KEY
# Use error codes default error message.
err_msg = "API authentication required for this request"
@@ -219,8 +222,10 @@
return __api_error_response( trans, status_code=400, err_code=error_code )
trans.response.set_content_type( JSON_CONTENT_TYPE )
+
# send 'do not cache' headers to handle IE's caching of ajax get responses
trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
+
# TODO: Refactor next block out into a helper procedure.
# Perform api_run_as processing, possibly changing identity
if 'payload' in kwargs and 'run_as' in kwargs['payload']:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Merged in nsoranzo/galaxy-central (pull request #656)
by commits-noreply@bitbucket.org 02 Feb '15
by commits-noreply@bitbucket.org 02 Feb '15
02 Feb '15
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e56a78608c79/
Changeset: e56a78608c79
User: dannon
Date: 2015-02-02 20:22:32+00:00
Summary: Merged in nsoranzo/galaxy-central (pull request #656)
Merge rolling_restart.sh in run.sh by adding --wait parameter.
Affected #: 2 files
diff -r 8d74daa932a06f7bbd3e357f8216c8f58ae55965 -r e56a78608c79097a6227f8c06cba8a45580a2a7d rolling_restart.sh
--- a/rolling_restart.sh
+++ b/rolling_restart.sh
@@ -2,60 +2,4 @@
cd `dirname $0`
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
-python ./scripts/check_python.py
-[ $? -ne 0 ] && exit 1
-
-./scripts/common_startup.sh
-
-if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
- python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
-fi
-
-if [ -z "$GALAXY_CONFIG_FILE" ]; then
- if [ -f universe_wsgi.ini ]; then
- GALAXY_CONFIG_FILE=universe_wsgi.ini
- elif [ -f config/galaxy.ini ]; then
- GALAXY_CONFIG_FILE=config/galaxy.ini
- else
- GALAXY_CONFIG_FILE=config/galaxy.ini.sample
- fi
- export GALAXY_CONFIG_FILE
-fi
-
-servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
-for server in $servers; do
- # If there's a pid
- if [ -e $server.pid ]; then
- # Then kill it
- echo "Killing $server"
- pid=`cat $server.pid`
- kill $pid
- else
- # Otherwise just continue
- echo "$server not running"
- fi
- # Start the server (and background) (should this be nohup'd?)
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log --daemon $@
- while true; do
- sleep 1
- printf "."
- # Grab the current pid from the pid file
- if ! current_pid_in_file=$(cat $server.pid); then
- echo "A Galaxy process died, interrupting" >&2
- exit 1
- fi
- # Search for all pids in the logs and tail for the last one
- latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
- # If they're equivalent, then the current pid file agrees with our logs
- # and we've succesfully started
- [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
- done
- echo
-done
+GALAXY_RUN_ALL=1 ./run.sh restart --wait
diff -r 8d74daa932a06f7bbd3e357f8216c8f58ae55965 -r e56a78608c79097a6227f8c06cba8a45580a2a7d run.sh
--- a/run.sh
+++ b/run.sh
@@ -44,15 +44,38 @@
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
- daemon=`echo "$@" | grep -q daemon`
+ echo "$@" | grep -q 'daemon\|restart'
if [ $? -ne 0 ]; then
- echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon` or `--stop-daemon` arguments to run.sh'
+ echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon`, `--stop-daemon` or `restart` arguments to run.sh'
exit 1
fi
+ (echo "$@" | grep -q -e '--daemon\|restart') && (echo "$@" | grep -q -e '--wait')
+ WAIT=$?
+ ARGS=`echo "$@" | sed 's/--wait//'`
for server in $servers; do
- echo "Handling $server with log file $server.log..."
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ if [ $WAIT -eq 0 ]; then
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $ARGS
+ while true; do
+ sleep 1
+ printf "."
+ # Grab the current pid from the pid file
+ if ! current_pid_in_file=$(cat $server.pid); then
+ echo "A Galaxy process died, interrupting" >&2
+ exit 1
+ fi
+ # Search for all pids in the logs and tail for the last one
+ latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
+ # If they're equivalent, then the current pid file agrees with our logs
+ # and we've succesfully started
+ [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
+ done
+ echo
+ else
+ echo "Handling $server with log file $server.log..."
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ fi
done
else
+ # Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
python ./scripts/paster.py serve $GALAXY_CONFIG_FILE $@
fi
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1333f32a5063/
Changeset: 1333f32a5063
User: nsoranzo
Date: 2015-02-02 15:46:48+00:00
Summary: Allow "GALAXY_RUN_ALL=1 ./run.sh restart" since --daemon is implied.
Affected #: 1 file
diff -r 5aca28ab8cfbbc1769f68fcfacf3dfbf9352ccde -r 1333f32a50639a2d2dfaca327b20520da358721c run.sh
--- a/run.sh
+++ b/run.sh
@@ -44,9 +44,9 @@
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
- daemon=`echo "$@" | grep -q daemon`
+ echo "$@" | grep -q 'daemon\|restart'
if [ $? -ne 0 ]; then
- echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon` or `--stop-daemon` arguments to run.sh'
+ echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon`, `--stop-daemon` or `restart` arguments to run.sh'
exit 1
fi
for server in $servers; do
@@ -54,5 +54,6 @@
python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
done
else
+ # Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
python ./scripts/paster.py serve $GALAXY_CONFIG_FILE $@
fi
https://bitbucket.org/galaxy/galaxy-central/commits/6d11484ceb7d/
Changeset: 6d11484ceb7d
User: nsoranzo
Date: 2015-02-02 16:08:13+00:00
Summary: Merge rolling_restart.sh in run.sh by adding --wait parameter.
Also add the following combination:
GALAXY_RUN_ALL=1 ./run.sh --daemon --wait
which, by preventing servers from starting simultaneously, is useful when
multiple servers are configured before creating the database, or when updating
to a new Galaxy release containing a database upgrade.
Affected #: 2 files
diff -r 1333f32a50639a2d2dfaca327b20520da358721c -r 6d11484ceb7d797f5571df24a147d3f59f8ee553 rolling_restart.sh
--- a/rolling_restart.sh
+++ b/rolling_restart.sh
@@ -1,61 +1,3 @@
#!/bin/sh
-cd `dirname $0`
-
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
-python ./scripts/check_python.py
-[ $? -ne 0 ] && exit 1
-
-./scripts/common_startup.sh
-
-if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
- python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
-fi
-
-if [ -z "$GALAXY_CONFIG_FILE" ]; then
- if [ -f universe_wsgi.ini ]; then
- GALAXY_CONFIG_FILE=universe_wsgi.ini
- elif [ -f config/galaxy.ini ]; then
- GALAXY_CONFIG_FILE=config/galaxy.ini
- else
- GALAXY_CONFIG_FILE=config/galaxy.ini.sample
- fi
- export GALAXY_CONFIG_FILE
-fi
-
-servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
-for server in $servers; do
- # If there's a pid
- if [ -e $server.pid ]; then
- # Then kill it
- echo "Killing $server"
- pid=`cat $server.pid`
- kill $pid
- else
- # Otherwise just continue
- echo "$server not running"
- fi
- # Start the server (and background) (should this be nohup'd?)
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log --daemon $@
- while true; do
- sleep 1
- printf "."
- # Grab the current pid from the pid file
- if ! current_pid_in_file=$(cat $server.pid); then
- echo "A Galaxy process died, interrupting" >&2
- exit 1
- fi
- # Search for all pids in the logs and tail for the last one
- latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
- # If they're equivalent, then the current pid file agrees with our logs
- # and we've succesfully started
- [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
- done
- echo
-done
+GALAXY_RUN_ALL=1 ./run.sh restart --wait
diff -r 1333f32a50639a2d2dfaca327b20520da358721c -r 6d11484ceb7d797f5571df24a147d3f59f8ee553 run.sh
--- a/run.sh
+++ b/run.sh
@@ -49,9 +49,31 @@
echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon`, `--stop-daemon` or `restart` arguments to run.sh'
exit 1
fi
+ (echo "$@" | grep -q -e '--daemon\|restart') && (echo "$@" | grep -q -e '--wait')
+ WAIT=$?
+ ARGS=`echo "$@" | sed 's/--wait//'`
for server in $servers; do
- echo "Handling $server with log file $server.log..."
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ if [ $WAIT -eq 0 ]; then
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $ARGS
+ while true; do
+ sleep 1
+ printf "."
+ # Grab the current pid from the pid file
+ if ! current_pid_in_file=$(cat $server.pid); then
+ echo "A Galaxy process died, interrupting" >&2
+ exit 1
+ fi
+ # Search for all pids in the logs and tail for the last one
+ latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
+ # If they're equivalent, then the current pid file agrees with our logs
+ # and we've succesfully started
+ [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
+ done
+ echo
+ else
+ echo "Handling $server with log file $server.log..."
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ fi
done
else
# Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
https://bitbucket.org/galaxy/galaxy-central/commits/d9c2dce96ec4/
Changeset: d9c2dce96ec4
User: nsoranzo
Date: 2015-02-02 17:30:03+00:00
Summary: Restore a line deleted by mistake. Thanks John.
Affected #: 1 file
diff -r 6d11484ceb7d797f5571df24a147d3f59f8ee553 -r d9c2dce96ec4376816a0bf23b4ba956616b63a48 rolling_restart.sh
--- a/rolling_restart.sh
+++ b/rolling_restart.sh
@@ -1,3 +1,5 @@
#!/bin/sh
+cd `dirname $0`
+
GALAXY_RUN_ALL=1 ./run.sh restart --wait
https://bitbucket.org/galaxy/galaxy-central/commits/e56a78608c79/
Changeset: e56a78608c79
User: dannon
Date: 2015-02-02 20:22:32+00:00
Summary: Merged in nsoranzo/galaxy-central (pull request #656)
Merge rolling_restart.sh in run.sh by adding --wait parameter.
Affected #: 2 files
diff -r 8d74daa932a06f7bbd3e357f8216c8f58ae55965 -r e56a78608c79097a6227f8c06cba8a45580a2a7d rolling_restart.sh
--- a/rolling_restart.sh
+++ b/rolling_restart.sh
@@ -2,60 +2,4 @@
cd `dirname $0`
-# If there is a .venv/ directory, assume it contains a virtualenv that we
-# should run this instance in.
-if [ -d .venv ];
-then
- . .venv/bin/activate
-fi
-
-python ./scripts/check_python.py
-[ $? -ne 0 ] && exit 1
-
-./scripts/common_startup.sh
-
-if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
- python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
-fi
-
-if [ -z "$GALAXY_CONFIG_FILE" ]; then
- if [ -f universe_wsgi.ini ]; then
- GALAXY_CONFIG_FILE=universe_wsgi.ini
- elif [ -f config/galaxy.ini ]; then
- GALAXY_CONFIG_FILE=config/galaxy.ini
- else
- GALAXY_CONFIG_FILE=config/galaxy.ini.sample
- fi
- export GALAXY_CONFIG_FILE
-fi
-
-servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
-for server in $servers; do
- # If there's a pid
- if [ -e $server.pid ]; then
- # Then kill it
- echo "Killing $server"
- pid=`cat $server.pid`
- kill $pid
- else
- # Otherwise just continue
- echo "$server not running"
- fi
- # Start the server (and background) (should this be nohup'd?)
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log --daemon $@
- while true; do
- sleep 1
- printf "."
- # Grab the current pid from the pid file
- if ! current_pid_in_file=$(cat $server.pid); then
- echo "A Galaxy process died, interrupting" >&2
- exit 1
- fi
- # Search for all pids in the logs and tail for the last one
- latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
- # If they're equivalent, then the current pid file agrees with our logs
- # and we've succesfully started
- [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
- done
- echo
-done
+GALAXY_RUN_ALL=1 ./run.sh restart --wait
diff -r 8d74daa932a06f7bbd3e357f8216c8f58ae55965 -r e56a78608c79097a6227f8c06cba8a45580a2a7d run.sh
--- a/run.sh
+++ b/run.sh
@@ -44,15 +44,38 @@
if [ -n "$GALAXY_RUN_ALL" ]; then
servers=`sed -n 's/^\[server:\(.*\)\]/\1/ p' $GALAXY_CONFIG_FILE | xargs echo`
- daemon=`echo "$@" | grep -q daemon`
+ echo "$@" | grep -q 'daemon\|restart'
if [ $? -ne 0 ]; then
- echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon` or `--stop-daemon` arguments to run.sh'
+ echo 'ERROR: $GALAXY_RUN_ALL cannot be used without the `--daemon`, `--stop-daemon` or `restart` arguments to run.sh'
exit 1
fi
+ (echo "$@" | grep -q -e '--daemon\|restart') && (echo "$@" | grep -q -e '--wait')
+ WAIT=$?
+ ARGS=`echo "$@" | sed 's/--wait//'`
for server in $servers; do
- echo "Handling $server with log file $server.log..."
- python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ if [ $WAIT -eq 0 ]; then
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $ARGS
+ while true; do
+ sleep 1
+ printf "."
+ # Grab the current pid from the pid file
+ if ! current_pid_in_file=$(cat $server.pid); then
+ echo "A Galaxy process died, interrupting" >&2
+ exit 1
+ fi
+ # Search for all pids in the logs and tail for the last one
+ latest_pid=`egrep '^Starting server in PID [0-9]+\.$' $server.log -o | sed 's/Starting server in PID //g;s/\.$//g' | tail -n 1`
+ # If they're equivalent, then the current pid file agrees with our logs
+ # and we've succesfully started
+ [ -n "$latest_pid" ] && [ $latest_pid -eq $current_pid_in_file ] && break
+ done
+ echo
+ else
+ echo "Handling $server with log file $server.log..."
+ python ./scripts/paster.py serve $GALAXY_CONFIG_FILE --server-name=$server --pid-file=$server.pid --log-file=$server.log $@
+ fi
done
else
+ # Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
python ./scripts/paster.py serve $GALAXY_CONFIG_FILE $@
fi
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0