galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
April 2013
- 1 participants
- 197 discussions
commit/galaxy-central: dannon: Fix more broken tooltips from the tipsy->bootstrap.tooltip() swap
by commits-noreply@bitbucket.org 17 Apr '13
by commits-noreply@bitbucket.org 17 Apr '13
17 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/57b64ceac4b5/
Changeset: 57b64ceac4b5
User: dannon
Date: 2013-04-17 14:40:24
Summary: Fix more broken tooltips from the tipsy->bootstrap.tooltip() swap
Affected #: 1 file
diff -r ec36ff4ec0f6baf7c6dc7af436b70a711cab94aa -r 57b64ceac4b5eaef10eb0f6c3cf94a87646901c2 templates/webapps/galaxy/workflow/editor.mako
--- a/templates/webapps/galaxy/workflow/editor.mako
+++ b/templates/webapps/galaxy/workflow/editor.mako
@@ -1015,7 +1015,7 @@
## Workflow name.
<div id="workflow-name-area" class="form-row"><label>Name:</label>
- <span id="workflow-name" class="tooltip editable-text" original-title="Click to rename workflow">${h.to_unicode( stored.name ) | h}</span>
+ <span id="workflow-name" class="tooltip editable-text" title="Click to rename workflow">${h.to_unicode( stored.name ) | h}</span></div>
## Workflow tags.
<%namespace file="/tagging_common.mako" import="render_individual_tagging_element" />
@@ -1037,7 +1037,7 @@
## Annotation elt.
<div id="workflow-annotation-area" class="form-row"><label>Annotation / Notes:</label>
- <div id="workflow-annotation" class="tooltip editable-text" original-title="Click to edit annotation">
+ <div id="workflow-annotation" class="tooltip editable-text" title="Click to edit annotation">
%if annotation:
${h.to_unicode( annotation ) | h}
%else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/83b09f742d31/
Changeset: 83b09f742d31
User: dannon
Date: 2013-04-17 01:01:32
Summary: Refactor api auth logic, again. This should resolve remote_user API issues and correctly allow access by key or session. This also re-allows access to the API *without* a session in the case of key auth -- probably need to review and make sure we fail gracefully if we were counting on that anywhere
Affected #: 2 files
diff -r 46133ca43f322fe8b042ac0c0b393fe72579cecc -r 83b09f742d3134168ef3925c712e1c0e54f0aeb7 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -112,34 +112,25 @@
def expose_api_anonymous( func, to_json=True ):
"""
- Expose this function via the API but don't require an API key.
+ Expose this function via the API but don't require a set user.
"""
- return expose_api( func, to_json=to_json, key_required=False )
+ return expose_api( func, to_json=to_json, user_required=False )
-def expose_api( func, to_json=True, key_required=True ):
+def expose_api( func, to_json=True, user_required=True ):
+ """
+ Expose this function via the API.
+ """
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
def error( environ, start_response ):
start_response( error_status, [('Content-type', 'text/plain')] )
return error_message
error_status = '403 Forbidden'
- #If no key supplied, we use the existing session which may be an anonymous user.
- if key_required and not trans.user:
- try:
- if 'key' not in kwargs:
- raise NoResultFound( 'No key provided' )
- provided_key = trans.sa_session.query( trans.app.model.APIKeys ).filter( trans.app.model.APIKeys.table.c.key == kwargs['key'] ).one()
- except NoResultFound:
- error_message = 'Provided API key is not valid.'
- return error
- if provided_key.user.deleted:
- error_message = 'User account is deactivated, please contact an administrator.'
- return error
- newest_key = provided_key.user.api_keys[0]
- if newest_key.key != provided_key.key:
- error_message = 'Provided API key has expired.'
- return error
- trans.set_user( provided_key.user )
+ if trans.error_message:
+ return trans.error_message
+ if user_required and trans.user is None:
+ error_message = "API Authentication Required for this request"
+ return error
if trans.request.body:
def extract_payload_from_request(trans, func, kwargs):
content_type = trans.request.headers['content-type']
@@ -238,6 +229,7 @@
def form( *args, **kwargs ):
return FormBuilder( *args, **kwargs )
+
class WebApplication( base.WebApplication ):
def __init__( self, galaxy_app, session_cookie='galaxysession', name=None ):
@@ -326,6 +318,7 @@
controller_name = getattr( T, "controller_name", name )
self.add_api_controller( controller_name, T( app ) )
+
class GalaxyWebTransaction( base.DefaultWebTransaction ):
"""
Encapsulates web transaction specific state for the Galaxy application
@@ -346,13 +339,26 @@
# Flag indicating whether this is an API call and the API key user is an administrator
self.api_inherit_admin = False
self.__user = None
- # Always have a valid galaxy session
- self._ensure_valid_session( session_cookie )
- # Prevent deleted users from accessing Galaxy
- if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
- self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
- if self.app.config.require_login:
- self._ensure_logged_in_user( environ, session_cookie )
+ self.galaxy_session = None
+ self.error_message = None
+ if self.environ.get('is_api_request', False):
+ # With API requests, if there's a key, use it and associate the
+ # user with the transaction.
+ # If not, check for an active session but do not create one.
+ # If an error message is set here, it's sent back using
+ # trans.show_error in the response -- in expose_api.
+ self.error_message = self._authenticate_api( session_cookie )
+ else:
+ #This is a web request, get or create session.
+ self._ensure_valid_session( session_cookie )
+ if self.galaxy_session:
+ # When we've authenticated by session, we have to check the
+ # following.
+ # Prevent deleted users from accessing Galaxy
+ if self.app.config.use_remote_user and self.galaxy_session.user.deleted:
+ self.response.send_redirect( url_for( '/static/user_disabled.html' ) )
+ if self.app.config.require_login:
+ self._ensure_logged_in_user( environ, session_cookie )
def setup_i18n( self ):
locales = []
@@ -382,16 +388,17 @@
def get_user( self ):
"""Return the current user if logged in or None."""
- if self.__user:
+ if self.galaxy_session:
+ return self.galaxy_session.user
+ else:
return self.__user
- else:
- return self.galaxy_session.user
def set_user( self, user ):
"""Set the current user."""
- self.galaxy_session.user = user
- self.sa_session.add( self.galaxy_session )
- self.sa_session.flush()
+ if self.galaxy_session:
+ self.galaxy_session.user = user
+ self.sa_session.add( self.galaxy_session )
+ self.sa_session.flush()
self.__user = user
user = property( get_user, set_user )
@@ -473,6 +480,33 @@
except CookieError, e:
log.warning( "Error setting httponly attribute in cookie '%s': %s" % ( name, e ) )
+ def _authenticate_api( self, session_cookie ):
+ """
+ Authenticate for the API via key or session (if available).
+ """
+ api_key = self.request.params.get('key', None)
+ secure_id = self.get_cookie( name=session_cookie )
+ if self.environ.get('is_api_request', False) and api_key:
+ # Sessionless API transaction, we just need to associate a user.
+ try:
+ provided_key = self.sa_session.query( self.app.model.APIKeys ).filter( self.app.model.APIKeys.table.c.key == api_key ).one()
+ except NoResultFound:
+ return 'Provided API key is not valid.'
+ if provided_key.user.deleted:
+ return 'User account is deactivated, please contact an administrator.'
+ newest_key = provided_key.user.api_keys[0]
+ if newest_key.key != provided_key.key:
+ return 'Provided API key has expired.'
+ self.set_user( provided_key.user )
+ elif secure_id:
+ # API authentication via active session
+ # Associate user using existing session
+ self._ensure_valid_session( session_cookie )
+ else:
+ # Anonymous API interaction -- anything but @expose_api_anonymous will fail past here.
+ self.user = None
+ self.galaxy_session = None
+
def _ensure_valid_session( self, session_cookie, create=True):
"""
Ensure that a valid Galaxy session exists and is available as
@@ -505,26 +539,27 @@
.filter( and_( self.app.model.GalaxySession.table.c.session_key==session_key,
self.app.model.GalaxySession.table.c.is_valid==True ) ) \
.first()
- # If remote user is in use it can invalidate the session, so we need to to check some things now.
- if self.app.config.use_remote_user:
- assert "HTTP_REMOTE_USER" in self.environ, \
- "use_remote_user is set but no HTTP_REMOTE_USER variable"
- remote_user_email = self.environ[ 'HTTP_REMOTE_USER' ]
- if galaxy_session:
- # An existing session, make sure correct association exists
- if galaxy_session.user is None:
- # No user, associate
- galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
- galaxy_session_requires_flush = True
- elif galaxy_session.user.email != remote_user_email:
- # Session exists but is not associated with the correct remote user
- invalidate_existing_session = True
+ # If remote user is in use it can invalidate the session, so we need to to check some things now.
+ if self.app.config.use_remote_user:
+ #If this is an api request, and they've passed a key, we let this go.
+ assert "HTTP_REMOTE_USER" in self.environ, \
+ "use_remote_user is set but no HTTP_REMOTE_USER variable"
+ remote_user_email = self.environ[ 'HTTP_REMOTE_USER' ]
+ if galaxy_session:
+ # An existing session, make sure correct association exists
+ if galaxy_session.user is None:
+ # No user, associate
+ galaxy_session.user = self.get_or_create_remote_user( remote_user_email )
+ galaxy_session_requires_flush = True
+ elif galaxy_session.user.email != remote_user_email:
+ # Session exists but is not associated with the correct remote user
+ invalidate_existing_session = True
+ user_for_new_session = self.get_or_create_remote_user( remote_user_email )
+ log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
+ remote_user_email, galaxy_session.user.email )
+ else:
+ # No session exists, get/create user for new session
user_for_new_session = self.get_or_create_remote_user( remote_user_email )
- log.warning( "User logged in as '%s' externally, but has a cookie as '%s' invalidating session",
- remote_user_email, galaxy_session.user.email )
- else:
- # No session exists, get/create user for new session
- user_for_new_session = self.get_or_create_remote_user( remote_user_email )
else:
if galaxy_session is not None and galaxy_session.user and galaxy_session.user.external:
# Remote user support is not enabled, but there is an existing
@@ -605,6 +640,7 @@
pass
if self.request.path not in allowed_paths:
self.response.send_redirect( url_for( controller='root', action='index' ) )
+
def __create_new_session( self, prev_galaxy_session=None, user_for_new_session=None ):
"""
Create a new GalaxySession for this request, possibly with a connection
@@ -627,6 +663,7 @@
# The new session should be associated with the user
galaxy_session.user = user_for_new_session
return galaxy_session
+
def get_or_create_remote_user( self, remote_user_email ):
"""
Create a remote user with the email remote_user_email and return it
@@ -671,11 +708,13 @@
self.app.security_agent.user_set_default_permissions( user )
#self.log_event( "Automatically created account '%s'", user.email )
return user
+
def __update_session_cookie( self, name='galaxysession' ):
"""
Update the session cookie to match the current session.
"""
self.set_cookie( self.security.encode_guid( self.galaxy_session.session_key ), name=name, path=self.app.config.cookie_path )
+
def handle_user_login( self, user ):
"""
Login a new user (possibly newly created)
@@ -701,9 +740,9 @@
except:
users_last_session = None
last_accessed = False
- if prev_galaxy_session.current_history and \
- not prev_galaxy_session.current_history.deleted and \
- prev_galaxy_session.current_history.datasets:
+ if (prev_galaxy_session.current_history and not
+ prev_galaxy_session.current_history.deleted and
+ prev_galaxy_session.current_history.datasets):
if prev_galaxy_session.current_history.user is None or prev_galaxy_session.current_history.user == user:
# If the previous galaxy session had a history, associate it with the new
# session, but only if it didn't belong to a different user.
@@ -714,10 +753,9 @@
user.total_disk_usage += hda.quota_amount( user )
elif self.galaxy_session.current_history:
history = self.galaxy_session.current_history
- if not history and \
- users_last_session and \
- users_last_session.current_history and \
- not users_last_session.current_history.deleted:
+ if (not history and users_last_session and
+ users_last_session.current_history and not
+ users_last_session.current_history.deleted):
history = users_last_session.current_history
elif not history:
history = self.get_history( create=True )
@@ -736,6 +774,8 @@
self.sa_session.flush()
# This method is not called from the Galaxy reports, so the cookie will always be galaxysession
self.__update_session_cookie( name=cookie_name )
+
+
def handle_user_logout( self, logout_all=False ):
"""
Logout the current user:
@@ -988,6 +1028,7 @@
return True
return False
+
class FormBuilder( object ):
"""
Simple class describing an HTML form
@@ -999,17 +1040,22 @@
self.submit_text = submit_text
self.inputs = []
self.use_panels = use_panels
+
def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
return self
+
def add_text( self, name, label, value=None, error=None, help=None ):
return self.add_input( 'text', label, name, value, error, help )
+
def add_password( self, name, label, value=None, error=None, help=None ):
return self.add_input( 'password', label, name, value, error, help )
+
def add_select( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
self.inputs.append( SelectInput( name, label, value=value, options=options, error=error, help=help, use_label=use_label ) )
return self
+
class FormInput( object ):
"""
Simple class describing a form input element
@@ -1023,12 +1069,14 @@
self.help = help
self.use_label = use_label
+
class SelectInput( FormInput ):
""" A select form input. """
def __init__( self, name, label, value=None, options=[], error=None, help=None, use_label=True ):
FormInput.__init__( self, "select", name, label, value=value, error=error, help=help, use_label=use_label )
self.options = options
+
class FormData( object ):
"""
Class for passing data about a form to a template, very rudimentary, could
@@ -1038,6 +1086,7 @@
self.values = Bunch()
self.errors = Bunch()
+
class Bunch( dict ):
"""
Bunch based on a dict
@@ -1047,3 +1096,4 @@
return self[key]
def __setattr__( self, key, value ):
self[key] = value
+
diff -r 46133ca43f322fe8b042ac0c0b393fe72579cecc -r 83b09f742d3134168ef3925c712e1c0e54f0aeb7 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -138,6 +138,7 @@
environ[ 'is_api_request' ] = True
controllers = self.api_controllers
else:
+ environ[ 'is_api_request' ] = False
controllers = self.controllers
if map == None:
raise httpexceptions.HTTPNotFound( "No route for " + path_info )
https://bitbucket.org/galaxy/galaxy-central/commits/bf323c3cf3c2/
Changeset: bf323c3cf3c2
User: dannon
Date: 2013-04-17 01:03:39
Summary: Cleanup spacing in framework/base.py
Affected #: 1 file
diff -r 83b09f742d3134168ef3925c712e1c0e54f0aeb7 -r bf323c3cf3c28ebbecb7491abb87e58bf7f7a028 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -44,20 +44,21 @@
self.resource( member_name, collection_name, **kwargs )
routes.Mapper.resource_with_deleted = __resource_with_deleted
+
class WebApplication( object ):
"""
A simple web application which maps requests to objects using routes,
- and to methods on those objects in the CherryPy style. Thus simple
+ and to methods on those objects in the CherryPy style. Thus simple
argument mapping in the CherryPy style occurs automatically, but more
complicated encoding of arguments in the PATH_INFO can be performed
with routes.
"""
def __init__( self ):
"""
- Create a new web application object. To actually connect some
- controllers use `add_controller` and `add_route`. Call
+ Create a new web application object. To actually connect some
+ controllers use `add_controller` and `add_route`. Call
`finalize_config` when all controllers and routes have been added
- and `__call__` to handle a request (WSGI style).
+ and `__call__` to handle a request (WSGI style).
"""
self.controllers = dict()
self.api_controllers = dict()
@@ -69,36 +70,41 @@
self.transaction_factory = DefaultWebTransaction
# Set if trace logging is enabled
self.trace_logger = None
+
def add_ui_controller( self, controller_name, controller ):
"""
Add a controller class to this application. A controller class has
methods which handle web requests. To connect a URL to a controller's
method use `add_route`.
"""
- log.debug( "Enabling '%s' controller, class: %s",
+ log.debug( "Enabling '%s' controller, class: %s",
controller_name, controller.__class__.__name__ )
self.controllers[ controller_name ] = controller
+
def add_api_controller( self, controller_name, controller ):
log.debug( "Enabling '%s' API controller, class: %s",
controller_name, controller.__class__.__name__ )
self.api_controllers[ controller_name ] = controller
+
def add_route( self, route, **kwargs ):
"""
Add a route to match a URL with a method. Accepts all keyword
arguments of `routes.Mapper.connect`. Every route should result in
- at least a controller value which corresponds to one of the
- objects added with `add_controller`. It optionally may yield an
+ at least a controller value which corresponds to one of the
+ objects added with `add_controller`. It optionally may yield an
`action` argument which will be used to locate the method to call
on the controller. Additional arguments will be passed to the
- method as keyword args.
+ method as keyword args.
"""
self.mapper.connect( route, **kwargs )
+
def set_transaction_factory( self, transaction_factory ):
"""
Use the callable `transaction_factory` to create the transaction
which will be passed to requests.
"""
self.transaction_factory = transaction_factory
+
def finalize_config( self ):
"""
Call when application is completely configured and ready to serve
@@ -106,6 +112,7 @@
"""
# Create/compile the regular expressions for route mapping
self.mapper.create_regs( self.controllers.keys() )
+
def trace( self, **fields ):
if self.trace_logger:
self.trace_logger.log( "WebApplication", **fields )
@@ -173,7 +180,7 @@
raise httpexceptions.HTTPNotFound( "Action not exposed for " + path_info )
# Is the method callable
if not callable( method ):
- raise httpexceptions.HTTPNotFound( "Action not callable for " + path_info )
+ raise httpexceptions.HTTPNotFound( "Action not callable for " + path_info )
# Combine mapper args and query string / form args and call
kwargs = trans.request.params.mixed()
kwargs.update( map )
@@ -196,14 +203,14 @@
elif isinstance( body, tarfile.ExFileObject ):
# Stream the tarfile member back to the browser
body = iterate_file( body )
- start_response( trans.response.wsgi_status(),
+ start_response( trans.response.wsgi_status(),
trans.response.wsgi_headeritems() )
return body
else:
- start_response( trans.response.wsgi_status(),
+ start_response( trans.response.wsgi_status(),
trans.response.wsgi_headeritems() )
return self.make_body_iterable( trans, body )
-
+
def make_body_iterable( self, trans, body ):
if isinstance( body, ( types.GeneratorType, list, tuple ) ):
# Recursively stream the iterable
@@ -223,7 +230,8 @@
Allow handling of exceptions raised in controller methods.
"""
return False
-
+
+
class WSGIEnvironmentProperty( object ):
"""
Descriptor that delegates a property to a key in the environ member of the
@@ -237,6 +245,7 @@
if obj is None: return self
return obj.environ.get( self.key, self.default )
+
class LazyProperty( object ):
"""
Property that replaces itself with a calculated value the first time
@@ -250,12 +259,13 @@
setattr( obj, self.func.func_name, value )
return value
lazy_property = LazyProperty
-
+
+
class DefaultWebTransaction( object ):
"""
- Wraps the state of a single web transaction (request/response cycle).
+ Wraps the state of a single web transaction (request/response cycle).
- TODO: Provide hooks to allow application specific state to be included
+ TODO: Provide hooks to allow application specific state to be included
in here.
"""
def __init__( self, environ ):
@@ -274,7 +284,7 @@
return self.environ['beaker.session']
else:
return None
-
+
# For request.params, override cgi.FieldStorage.make_file to create persistent
# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
# but for performance reasons it's way better to use Paste's tempfile than to
@@ -290,12 +300,13 @@
if self.outerboundary:
self.read_lines_to_outerboundary()
else:
- self.read_lines_to_eof()
+ self.read_lines_to_eof()
cgi.FieldStorage = FieldStorage
+
class Request( webob.Request ):
"""
- Encapsulates an HTTP request.
+ Encapsulates an HTTP request.
"""
def __init__( self, environ ):
"""
@@ -330,7 +341,7 @@
return self.environ['SCRIPT_NAME'] + self.environ['PATH_INFO']
@lazy_property
def browser_url( self ):
- return self.base + self.path
+ return self.base + self.path
# Descriptors that map properties to the associated environment
## scheme = WSGIEnvironmentProperty( 'wsgi.url_scheme' )
## remote_addr = WSGIEnvironmentProperty( 'REMOTE_ADDR' )
@@ -341,6 +352,7 @@
## query_string = WSGIEnvironmentProperty( 'QUERY_STRING' )
## path_info = WSGIEnvironmentProperty( 'PATH_INFO' )
+
class Response( object ):
"""
Describes an HTTP response. Currently very simple since the actual body
@@ -353,18 +365,22 @@
self.status = "200 OK"
self.headers = HeaderDict( { "content-type": "text/html" } )
self.cookies = SimpleCookie()
+
def set_content_type( self, type ):
"""
Sets the Content-Type header
"""
self.headers[ "content-type" ] = type
+
def get_content_type( self ):
return self.headers[ "content-type" ]
+
def send_redirect( self, url ):
"""
Send an HTTP redirect response to (target `url`)
"""
raise httpexceptions.HTTPFound( url, headers=self.wsgi_headeritems() )
+
def wsgi_headeritems( self ):
"""
Return headers in format appropriate for WSGI `start_response`
@@ -376,10 +392,11 @@
header, value = str( crumb ).split( ': ', 1 )
result.append( ( header, value ) )
return result
+
def wsgi_status( self ):
"""
Return status line in format appropriate for WSGI `start_response`
- """
+ """
if isinstance( self.status, int ):
exception = httpexceptions.get_exception( self.status )
return "%d %s" % ( exception.code, exception.title )
@@ -404,7 +421,7 @@
# Fall back on sending the file in chunks
else:
body = iterate_file( body )
- start_response( trans.response.wsgi_status(),
+ start_response( trans.response.wsgi_status(),
trans.response.wsgi_headeritems() )
return body
https://bitbucket.org/galaxy/galaxy-central/commits/356965791008/
Changeset: 356965791008
User: dannon
Date: 2013-04-17 01:06:42
Summary: Cleanup in framework/__init__.py
Affected #: 1 file
diff -r bf323c3cf3c28ebbecb7491abb87e58bf7f7a028 -r 356965791008f24aa26dc8043259765dcc0e7f7e lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -713,7 +713,8 @@
"""
Update the session cookie to match the current session.
"""
- self.set_cookie( self.security.encode_guid( self.galaxy_session.session_key ), name=name, path=self.app.config.cookie_path )
+ self.set_cookie( self.security.encode_guid(self.galaxy_session.session_key ),
+ name=name, path=self.app.config.cookie_path )
def handle_user_login( self, user ):
"""
@@ -800,11 +801,13 @@
self.__update_session_cookie( name='galaxysession' )
elif self.webapp.name == 'tool_shed':
self.__update_session_cookie( name='galaxycommunitysession' )
+
def get_galaxy_session( self ):
"""
Return the current galaxy session
"""
return self.galaxy_session
+
def get_history( self, create=False ):
"""
Load the current history, creating a new one only if there is not
@@ -819,12 +822,15 @@
log.debug( "This request returned None from get_history(): %s" % self.request.browser_url )
return None
return history
+
def set_history( self, history ):
if history and not history.deleted:
self.galaxy_session.current_history = history
self.sa_session.add( self.galaxy_session )
self.sa_session.flush()
+
history = property( get_history, set_history )
+
def new_history( self, name=None ):
"""
Create a new history and associate it with the current session and
@@ -849,6 +855,7 @@
self.sa_session.add_all( ( self.galaxy_session, history ) )
self.sa_session.flush()
return history
+
def get_current_user_roles( self ):
user = self.get_user()
if user:
@@ -856,27 +863,34 @@
else:
roles = []
return roles
+
def user_is_admin( self ):
if self.api_inherit_admin:
return True
admin_users = [ x.strip() for x in self.app.config.get( "admin_users", "" ).split( "," ) ]
return self.user and admin_users and self.user.email in admin_users
+
def user_can_do_run_as( self ):
run_as_users = self.app.config.get( "api_allow_run_as", "" ).split( "," )
return self.user and run_as_users and self.user.email in run_as_users
+
def get_toolbox(self):
"""Returns the application toolbox"""
return self.app.toolbox
+
@base.lazy_property
def template_context( self ):
return dict()
+
@property
def model( self ):
return self.app.model
+
def make_form_data( self, name, **kwargs ):
rval = self.template_context[name] = FormData()
rval.values.update( kwargs )
return rval
+
def set_message( self, message, type=None ):
"""
Convenience method for setting the 'message' and 'message_type'
@@ -885,12 +899,14 @@
self.template_context['message'] = message
if type:
self.template_context['status'] = type
+
def get_message( self ):
"""
Convenience method for getting the 'message' element of the template
context.
"""
return self.template_context['message']
+
def show_message( self, message, type='info', refresh_frames=[], cont=None, use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single message.
@@ -902,21 +918,25 @@
refreshed when the message is displayed
"""
return self.fill_template( "message.mako", status=type, message=message, refresh_frames=refresh_frames, cont=cont, use_panels=use_panels, active_view=active_view )
+
def show_error_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an error message. See `show_message`.
"""
return self.show_message( message, 'error', refresh_frames, use_panels=use_panels, active_view=active_view )
+
def show_ok_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an ok message. See `show_message`.
"""
return self.show_message( message, 'done', refresh_frames, use_panels=use_panels, active_view=active_view )
+
def show_warn_message( self, message, refresh_frames=[], use_panels=False, active_view="" ):
"""
Convenience method for displaying an warn message. See `show_message`.
"""
return self.show_message( message, 'warning', refresh_frames, use_panels=use_panels, active_view=active_view )
+
def show_form( self, form, header=None, template="form.mako", use_panels=False, active_view="" ):
"""
Convenience method for displaying a simple page with a single HTML
@@ -924,6 +944,7 @@
"""
return self.fill_template( template, form=form, header=header, use_panels=( form.use_panels or use_panels ),
active_view=active_view )
+
def fill_template(self, filename, **kwargs):
"""
Fill in a template, putting any keyword arguments on the context.
@@ -937,6 +958,7 @@
template = Template( file=os.path.join(self.app.config.template_path, filename),
searchList=[kwargs, self.template_context, dict(caller=self, t=self, h=helpers, util=util, request=self.request, response=self.response, app=self.app)] )
return str( template )
+
def fill_template_mako( self, filename, **kwargs ):
template = self.webapp.mako_template_lookup.get_template( filename )
template.output_encoding = 'utf-8'
@@ -944,6 +966,7 @@
data.update( self.template_context )
data.update( kwargs )
return template.render( **data )
+
def stream_template_mako( self, filename, **kwargs ):
template = self.webapp.mako_template_lookup.get_template( filename )
template.output_encoding = 'utf-8'
@@ -961,6 +984,7 @@
template.render_context( context )
return []
return render
+
def fill_template_string(self, template_string, context=None, **kwargs):
"""
Fill in a template, putting any keyword arguments on the context.
https://bitbucket.org/galaxy/galaxy-central/commits/18c73eeb77fe/
Changeset: 18c73eeb77fe
User: dannon
Date: 2013-04-17 01:07:02
Summary: Merge with galaxy-central
Affected #: 1 file
diff -r 356965791008f24aa26dc8043259765dcc0e7f7e -r 18c73eeb77feba055b2240412006dc3ce2e49fb0 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -770,6 +770,10 @@
else:
pos = source.tell()
+ # If last line is a comment, there are no data lines.
+ if line.startswith( "#" ):
+ return []
+
# Match chrom naming format.
if line:
dataset_chrom = line.split()[0]
@@ -1272,7 +1276,7 @@
class IntervalIndexDataProvider( FilterableMixin, GenomeDataProvider ):
"""
- Interval index files used only for GFF files.
+ Interval index files used for GFF, Pileup files.
"""
col_name_data_attr_mapping = { 4 : { 'index': 4 , 'name' : 'Score' } }
@@ -1282,20 +1286,26 @@
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
out = open( filename, 'w' )
-
+
for region in regions:
# Write data from region.
chrom = region.chrom
start = region.start
end = region.end
- for start, end, offset in index.find(chrom, start, end):
+ for start, end, offset in index.find( chrom, start, end ):
source.seek( offset )
-
- reader = GFFReaderWrapper( source, fix_strand=True )
- feature = reader.next()
- for interval in feature.intervals:
- out.write( '\t'.join( interval.fields ) + '\n' )
-
+
+ # HACK: write differently depending on original dataset format.
+ if self.original_dataset.ext not in [ 'gff', 'gff3', 'gtf' ]:
+ line = source.readline()
+ out.write( line )
+ else:
+ reader = GFFReaderWrapper( source, fix_strand=True )
+ feature = reader.next()
+ for interval in feature.intervals:
+ out.write( '\t'.join( interval.fields ) + '\n' )
+
+ source.close()
out.close()
def get_iterator( self, chrom, start, end, **kwargs ):
https://bitbucket.org/galaxy/galaxy-central/commits/94caae7433a7/
Changeset: 94caae7433a7
User: dannon
Date: 2013-04-17 03:38:38
Summary: Disable select2 for Input Dataset steps on the run workflow page.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/60d7b1a9ebec/
Changeset: 60d7b1a9ebec
User: dannon
Date: 2013-04-17 03:47:17
Summary: Update multiselect tooltip, now functional. Was still using tipsy's 'original-title' attribute.
Affected #: 1 file
Diff not available.
https://bitbucket.org/galaxy/galaxy-central/commits/ec36ff4ec0f6/
Changeset: ec36ff4ec0f6
User: dannon
Date: 2013-04-17 04:27:07
Summary: Add tooltips for (un)linking icon on run workflow page.
Affected #: 1 file
Diff not available.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Enable IntervalIndex data provider to work with pileup datasets.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/98b6216e42f5/
Changeset: 98b6216e42f5
User: jgoecks
Date: 2013-04-17 01:01:50
Summary: Enable IntervalIndex data provider to work with pileup datasets.
Affected #: 1 file
diff -r 46133ca43f322fe8b042ac0c0b393fe72579cecc -r 98b6216e42f5beec89052cccf7c98820b978bf92 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -770,6 +770,10 @@
else:
pos = source.tell()
+ # If last line is a comment, there are no data lines.
+ if line.startswith( "#" ):
+ return []
+
# Match chrom naming format.
if line:
dataset_chrom = line.split()[0]
@@ -1272,7 +1276,7 @@
class IntervalIndexDataProvider( FilterableMixin, GenomeDataProvider ):
"""
- Interval index files used only for GFF files.
+ Interval index files used for GFF, Pileup files.
"""
col_name_data_attr_mapping = { 4 : { 'index': 4 , 'name' : 'Score' } }
@@ -1282,20 +1286,26 @@
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
out = open( filename, 'w' )
-
+
for region in regions:
# Write data from region.
chrom = region.chrom
start = region.start
end = region.end
- for start, end, offset in index.find(chrom, start, end):
+ for start, end, offset in index.find( chrom, start, end ):
source.seek( offset )
-
- reader = GFFReaderWrapper( source, fix_strand=True )
- feature = reader.next()
- for interval in feature.intervals:
- out.write( '\t'.join( interval.fields ) + '\n' )
-
+
+ # HACK: write differently depending on original dataset format.
+ if self.original_dataset.ext not in [ 'gff', 'gff3', 'gtf' ]:
+ line = source.readline()
+ out.write( line )
+ else:
+ reader = GFFReaderWrapper( source, fix_strand=True )
+ feature = reader.next()
+ for interval in feature.intervals:
+ out.write( '\t'.join( interval.fields ) + '\n' )
+
+ source.close()
out.close()
def get_iterator( self, chrom, start, end, **kwargs ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Make a data_sources attribute for data types and remove get_track_type because it is no longer needed.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/46133ca43f32/
Changeset: 46133ca43f32
User: jgoecks
Date: 2013-04-17 00:11:09
Summary: Make a data_sources attribute for data types and remove get_track_type because it is no longer needed.
Affected #: 8 files
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -95,6 +95,7 @@
"""Class describing a BAM binary file"""
file_ext = "bam"
track_type = "ReadTrack"
+ data_sources = { "data": "bai", "index": [ "bigwig", "summary_tree" ] }
MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, file_ext="bai", readonly=True, no_value=None, visible=False, optional=True )
@@ -246,9 +247,7 @@
return dataset.peek
except:
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
- def get_track_type( self ):
- return self.track_type, { "data": "bai", "index": [ "bigwig", "summary_tree" ] }
-
+
Binary.register_sniffable_binary_format("bam", "bam", Bam)
class H5( Binary ):
@@ -327,6 +326,7 @@
http://bioinformatics.oxfordjournals.org/cgi/content/abstract/btq351v1
"""
track_type = "LineTrack"
+ data_sources = { "data_standalone": "bigwig" }
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
@@ -352,20 +352,18 @@
return dataset.peek
except:
return "Binary UCSC %s file (%s)" % ( self._name, data.nice_size( dataset.get_size() ) )
- def get_track_type( self ):
- return self.track_type, {"data_standalone": "bigwig"}
-
+
Binary.register_sniffable_binary_format("bigwig", "bigwig", BigWig)
class BigBed(BigWig):
"""BigBed support from UCSC."""
+ data_sources = { "data_standalone": "bigbed" }
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
- def get_track_type( self ):
- return self.track_type, {"data_standalone": "bigbed"}
Binary.register_sniffable_binary_format("bigbed", "bigbed", BigBed)
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -100,6 +100,9 @@
# Trackster track type.
track_type = None
+ # Data sources.
+ data_sources = {}
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -47,6 +47,7 @@
file_ext = "interval"
line_class = "region"
track_type = "FeatureTrack"
+ data_sources = { "data": "tabix", "index": "summary_tree" }
"""Add metadata elements"""
MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
@@ -329,18 +330,13 @@
def get_track_resolution( self, dataset, start, end):
return None
-
- def get_track_type( self ):
- return self.track_type, {"data": "tabix", "index": "summary_tree"}
class BedGraph( Interval ):
"""Tab delimited chrom/start/end/datavalue dataset"""
file_ext = "bedgraph"
track_type = "LineTrack"
-
- def get_track_type( self ):
- return self.track_type, { "data": "bigwig", "index": "bigwig" }
+ data_sources = { "data": "bigwig", "index": "bigwig" }
def as_ucsc_display_file( self, dataset, **kwd ):
"""
@@ -358,6 +354,7 @@
class Bed( Interval ):
"""Tab delimited data in BED format"""
file_ext = "bed"
+ data_sources = {"data": "tabix", "index": "summary_tree", "feature_search": "fli"}
"""Add metadata elements"""
MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
@@ -512,9 +509,6 @@
else: return False
return True
except: return False
-
- def get_track_type( self ):
- return self.track_type, {"data": "tabix", "index": "summary_tree", "feature_search": "fli"}
class BedStrict( Bed ):
"""Tab delimited data in strict BED format - no non-standard columns allowed"""
@@ -574,6 +568,7 @@
"""Tab delimited data in Gff format"""
file_ext = "gff"
column_names = [ 'Seqname', 'Source', 'Feature', 'Start', 'End', 'Score', 'Strand', 'Frame', 'Group' ]
+ data_sources = { "data": "interval_index", "index": "summary_tree", "feature_search": "fli" }
"""Add metadata elements"""
MetadataElement( name="columns", default=9, desc="Number of columns", readonly=True, visible=False )
@@ -785,10 +780,6 @@
return True
except:
return False
-
- def get_track_type( self ):
- return self.track_type, {"data": "interval_index", "index": "summary_tree", "feature_search": "fli"}
-
class Gff3( Gff ):
"""Tab delimited data in Gff3 format"""
@@ -969,6 +960,7 @@
"""Tab delimited data in wiggle format"""
file_ext = "wig"
track_type = "LineTrack"
+ data_sources = { "data": "bigwig", "index": "bigwig" }
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
@@ -1149,9 +1141,6 @@
resolution = min( resolution, 100000 )
resolution = max( resolution, 1 )
return resolution
-
- def get_track_type( self ):
- return self.track_type, { "data": "bigwig", "index": "bigwig" }
class CustomTrack ( Tabular ):
"""UCSC CustomTrack"""
@@ -1295,6 +1284,7 @@
file_ext = "encodepeak"
column_names = [ 'Chrom', 'Start', 'End', 'Name', 'Score', 'Strand', 'SignalValue', 'pValue', 'qValue', 'Peak' ]
+ data_sources = { "data": "tabix", "index": "summary_tree" }
"""Add metadata elements"""
MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
@@ -1306,9 +1296,6 @@
def sniff( self, filename ):
return False
- def get_track_type( self ):
- return self.track_type, {"data": "tabix", "index": "summary_tree"}
-
class ChromatinInteractions( Interval ):
'''
Chromatin interactions obtained from 3C/5C/Hi-C experiments.
@@ -1316,6 +1303,7 @@
file_ext = "chrint"
track_type = "DiagonalHeatmapTrack"
+ data_sources = { "data": "tabix", "index": "summary_tree" }
column_names = [ 'Chrom1', 'Start1', 'End1', 'Chrom2', 'Start2', 'End2', 'Value' ]
@@ -1332,11 +1320,6 @@
def sniff( self, filename ):
return False
-
- def get_track_type( self ):
- return self.track_type, {"data": "tabix", "index": "summary_tree"}
-
-
if __name__ == '__main__':
import doctest, sys
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -359,6 +359,7 @@
class Sam( Tabular ):
file_ext = 'sam'
track_type = "ReadTrack"
+ data_sources = { "data": "bam", "index": "summary_tree" }
def __init__(self, **kwd):
"""Initialize taxonomy datatype"""
@@ -469,13 +470,11 @@
raise Exception('Result %s from %s' % (result, cmd))
merge = staticmethod(merge)
- def get_track_type( self ):
- return self.track_type, {"data": "bam", "index": "summary_tree"}
-
class Pileup( Tabular ):
"""Tab delimited data in pileup (6- or 10-column) format"""
file_ext = "pileup"
line_class = "genomic coordinate"
+ data_sources = { "data": "interval_index" }
"""Add metadata elements"""
MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
@@ -527,10 +526,6 @@
return True
except:
return False
-
- def get_track_type( self ):
- return self.track_type, { "data": "interval_index" }
-
class ElandMulti( Tabular ):
file_ext = 'elandmulti'
@@ -541,6 +536,7 @@
class Vcf( Tabular ):
""" Variant Call Format for describing SNPs and other simple genome variations. """
track_type = "VariantTrack"
+ data_sources = { "data": "tabix", "index": "summary_tree" }
file_ext = 'vcf'
column_names = [ 'Chrom', 'Pos', 'ID', 'Ref', 'Alt', 'Qual', 'Filter', 'Info', 'Format', 'data' ]
@@ -557,9 +553,6 @@
"""Returns formated html of peek"""
return Tabular.make_html_table( self, dataset, column_names=self.column_names )
- def get_track_type( self ):
- return self.track_type, { "data": "tabix", "index": "summary_tree" }
-
class Eland( Tabular ):
"""Support for the export.txt.gz file used by Illumina's ELANDv2e aligner"""
file_ext = '_export.txt.gz'
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -1333,10 +1333,9 @@
with entries of type
(<datasource_type> : {<datasource_name>, <indexing_message>}).
"""
- track_type, data_sources = self.datatype.get_track_type()
data_sources_dict = {}
msg = None
- for source_type, source_list in data_sources.iteritems():
+ for source_type, source_list in self.datatype.data_sources.iteritems():
data_source = None
if source_type == "data_standalone":
# Nothing to do.
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/visualization/data_providers/registry.py
--- a/lib/galaxy/visualization/data_providers/registry.py
+++ b/lib/galaxy/visualization/data_providers/registry.py
@@ -83,12 +83,12 @@
elif original_dataset:
# No name, so look up a provider name from datatype's information.
- # Dataset must have get_track_type function to get data.
- if not hasattr( original_dataset.datatype, 'get_track_type'):
+ # Dataset must have data sources to get data.
+ if not original_dataset.datatype.data_sources:
return None
# Get data provider mapping and data provider.
- _ , data_provider_mapping = original_dataset.datatype.get_track_type()
+ data_provider_mapping = original_dataset.datatype.data_sources
if 'data_standalone' in data_provider_mapping:
data_provider = self.get_data_provider( trans,
name=data_provider_mapping[ 'data_standalone' ],
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -590,12 +590,11 @@
except KeyError:
prefs = {}
- track_type, _ = dataset.datatype.get_track_type()
track_data_provider = trans.app.data_provider_registry.get_data_provider( trans,
original_dataset=dataset,
source='data' )
return {
- "track_type": track_type,
+ "track_type": dataset.datatype.track_type,
"name": track_dict['name'],
"hda_ldda": track_dict.get("hda_ldda", "hda"),
"dataset_id": trans.security.encode_id( dataset.id ),
@@ -666,10 +665,8 @@
Returns track configuration dict for a dataset.
"""
# Get data provider.
- track_type, _ = dataset.datatype.get_track_type()
track_data_provider = trans.app.data_provider_registry.get_data_provider( trans, original_dataset=dataset )
-
if isinstance( dataset, trans.app.model.HistoryDatasetAssociation ):
hda_ldda = "hda"
elif isinstance( dataset, trans.app.model.LibraryDatasetDatasetAssociation ):
@@ -677,7 +674,7 @@
# Get track definition.
return {
- "track_type": track_type,
+ "track_type": dataset.datatype.track_type,
"name": dataset.name,
"hda_ldda": hda_ldda,
"dataset_id": trans.security.encode_id( dataset.id ),
diff -r da3b931de04c93ddae85e4999b6911948fdf0b34 -r 46133ca43f322fe8b042ac0c0b393fe72579cecc lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -225,7 +225,7 @@
data_provider = data_provider_registry.get_data_provider( trans, original_dataset=input_dataset, source='data' )
if data_provider and not data_provider.converted_dataset:
# Can convert but no converted dataset yet, so return message about why.
- _, data_sources = input_dataset.datatype.get_track_type()
+ data_sources = input_dataset.datatype.data_sources
msg = input_dataset.convert_dataset( trans, data_sources[ 'data' ] )
if msg is not None:
messages_list.append( msg )
@@ -311,7 +311,7 @@
input_dataset = jida.dataset
if input_dataset is None: #optional dataset and dataset wasn't selected
tool_params[ jida.name ] = None
- elif run_on_regions and hasattr( input_dataset.datatype, 'get_track_type' ):
+ elif run_on_regions and 'data' in input_dataset.datatype.data_sources:
# Dataset is indexed and hence a subset can be extracted and used
# as input.
@@ -324,8 +324,7 @@
subset_dataset = subset_dataset_association.subset
else:
# Need to create subset.
- track_type, data_sources = input_dataset.datatype.get_track_type()
- data_source = data_sources[ 'data' ]
+ data_source = input_dataset.datatype.data_sources[ 'data' ]
converted_dataset = input_dataset.get_converted_dataset( trans, data_source )
deps = input_dataset.get_converted_dataset_deps( trans, data_source )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Add pileup to interval_index converter.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/da3b931de04c/
Changeset: da3b931de04c
User: jgoecks
Date: 2013-04-16 23:45:52
Summary: Add pileup to interval_index converter.
Affected #: 4 files
diff -r e0354c9d5497fa9d169df6a7aa44c8132718f92c -r da3b931de04c93ddae85e4999b6911948fdf0b34 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -159,7 +159,9 @@
<converter file="encodepeak_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype><datatype extension="pdf" type="galaxy.datatypes.images:Pdf" mimetype="application/pdf"/>
- <datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true" />
+ <datatype extension="pileup" type="galaxy.datatypes.tabular:Pileup" display_in_upload="true">
+ <converter file="pileup_to_interval_index_converter.xml" target_datatype="interval_index"/>
+ </datatype><datatype extension="png" type="galaxy.datatypes.images:Png" mimetype="image/png"/><datatype extension="qual" type="galaxy.datatypes.qualityscore:QualityScore" /><datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
diff -r e0354c9d5497fa9d169df6a7aa44c8132718f92c -r da3b931de04c93ddae85e4999b6911948fdf0b34 lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+"""
+Convert from pileup file to interval index file.
+
+usage: %prog <options> in_file out_file
+"""
+
+from __future__ import division
+
+import sys, fileinput, optparse
+from galaxy import eggs
+import pkg_resources; pkg_resources.require( "bx-python" )
+from galaxy.visualization.tracks.summary import *
+from galaxy.datatypes.util.gff_util import convert_gff_coords_to_bed
+from bx.interval_index_file import Indexes
+
+def main():
+
+ # Read options, args.
+ parser = optparse.OptionParser()
+ (options, args) = parser.parse_args()
+ input_fname, output_fname = args
+
+ # Do conversion.
+ index = Indexes()
+ offset = 0
+ for line in open( input_fname, "r" ):
+ chrom, start = line.split()[ 0:2 ]
+ # Pileup format is 1-based.
+ start = int( start ) - 1
+ index.add( chrom, start, start + 1, offset )
+ offset += len( line )
+
+ index.write( open(output_fname, "w") )
+
+if __name__ == "__main__":
+ main()
+
\ No newline at end of file
diff -r e0354c9d5497fa9d169df6a7aa44c8132718f92c -r da3b931de04c93ddae85e4999b6911948fdf0b34 lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
--- /dev/null
+++ b/lib/galaxy/datatypes/converters/pileup_to_interval_index_converter.xml
@@ -0,0 +1,15 @@
+<tool id="CONVERTER_pileup_to_interval_index_0" name="Convert Pileup to Interval Index" version="1.0.0" hidden="true">
+<!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <command interpreter="python">pileup_to_interval_index_converter.py $input $output
+ </command>
+ <inputs>
+ <page>
+ <param format="pileup" name="input" type="data" label="Choose Pileup file"/>
+ </page>
+ </inputs>
+ <outputs>
+ <data format="interval_index" name="output"/>
+ </outputs>
+ <help>
+ </help>
+</tool>
diff -r e0354c9d5497fa9d169df6a7aa44c8132718f92c -r da3b931de04c93ddae85e4999b6911948fdf0b34 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -527,6 +527,10 @@
return True
except:
return False
+
+ def get_track_type( self ):
+ return self.track_type, { "data": "interval_index" }
+
class ElandMulti( Tabular ):
file_ext = 'elandmulti'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0a3c911778ae/
Changeset: 0a3c911778ae
User: jmchilton
Date: 2013-04-16 04:48:52
Summary: Consolidate logic for determining user's FTP directory.
Affected #: 3 files
diff -r 788cd3d065413b2611d82375a5d0d562775ea529 -r 0a3c911778aebded1d40c5e12b05959a761a0276 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -475,7 +475,7 @@
if trans is None or trans.user is None:
user_ftp_dir = None
else:
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value = value )
def from_html( self, value, trans=None, other_values={} ):
try:
diff -r 788cd3d065413b2611d82375a5d0d562775ea529 -r 0a3c911778aebded1d40c5e12b05959a761a0276 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -270,7 +270,7 @@
if file_bunch.path and ftp_files is not None:
warnings.append( "All FTP uploaded file selections were ignored." )
elif ftp_files is not None and trans.user is not None: # look for files uploaded via FTP
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
for filename in filenames:
for ftp_filename in ftp_files:
@@ -318,7 +318,7 @@
ftp_files = []
# TODO: warning to the user (could happen if session has become invalid)
else:
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
for filename in filenames:
path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
diff -r 788cd3d065413b2611d82375a5d0d562775ea529 -r 0a3c911778aebded1d40c5e12b05959a761a0276 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -958,6 +958,10 @@
def ncbi_builds( self ):
return util.dlnames['ncbi']
+ @property
+ def user_ftp_dir( self ):
+ return os.path.join( self.app.config.ftp_upload_dir, self.user.email )
+
def db_dataset_for( self, dbkey ):
"""
Returns the db_file dataset associated/needed by `dataset`, or `None`.
https://bitbucket.org/galaxy/galaxy-central/commits/0d2a9d0dbd2d/
Changeset: 0d2a9d0dbd2d
User: jmchilton
Date: 2013-04-16 05:00:14
Summary: Implement new property ftp_uplod_dir_identifier which defaults to 'email' but can be changed to 'username' to allow FTP directories based on usernames instead of e-mail addresses. I setup ProFTP + mod_ldap and it does not have the same flexibility the SQL driver has so this change is required at the Galaxy level.
Affected #: 2 files
diff -r 0a3c911778aebded1d40c5e12b05959a761a0276 -r 0d2a9d0dbd2db8a31f570b13acaf0caa9b245b09 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -166,6 +166,7 @@
self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
+ self.ftp_upload_dir_identifier = kwargs.get( 'ftp_uplod_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
diff -r 0a3c911778aebded1d40c5e12b05959a761a0276 -r 0d2a9d0dbd2db8a31f570b13acaf0caa9b245b09 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -960,7 +960,8 @@
@property
def user_ftp_dir( self ):
- return os.path.join( self.app.config.ftp_upload_dir, self.user.email )
+ identifier = self.app.config.ftp_upload_dir_identifier
+ return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
def db_dataset_for( self, dbkey ):
"""
https://bitbucket.org/galaxy/galaxy-central/commits/8b74c635ca3b/
Changeset: 8b74c635ca3b
User: jmchilton
Date: 2013-04-16 14:44:54
Summary: Fix typo in previous FTP upload commit.
Affected #: 1 file
diff -r 0d2a9d0dbd2db8a31f570b13acaf0caa9b245b09 -r 8b74c635ca3bb266eb0ad2b08aa17836e500add2 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -166,7 +166,7 @@
self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
- self.ftp_upload_dir_identifier = kwargs.get( 'ftp_uplod_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
+ self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
https://bitbucket.org/galaxy/galaxy-central/commits/e0354c9d5497/
Changeset: e0354c9d5497
User: natefoo
Date: 2013-04-16 22:17:33
Summary: Merged in jmchilton/galaxy-central-multi-input-tool-fixes-2 (pull request #157)
Increased flexibility for FTP directory configuration.
Affected #: 4 files
diff -r bbb229927c688213e67ba62998a64526ca948a19 -r e0354c9d5497fa9d169df6a7aa44c8132718f92c lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -166,6 +166,7 @@
self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
+ self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
diff -r bbb229927c688213e67ba62998a64526ca948a19 -r e0354c9d5497fa9d169df6a7aa44c8132718f92c lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -475,7 +475,7 @@
if trans is None or trans.user is None:
user_ftp_dir = None
else:
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
return form_builder.FTPFileField( self.name, user_ftp_dir, trans.app.config.ftp_upload_site, value = value )
def from_html( self, value, trans=None, other_values={} ):
try:
diff -r bbb229927c688213e67ba62998a64526ca948a19 -r e0354c9d5497fa9d169df6a7aa44c8132718f92c lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -270,7 +270,7 @@
if file_bunch.path and ftp_files is not None:
warnings.append( "All FTP uploaded file selections were ignored." )
elif ftp_files is not None and trans.user is not None: # look for files uploaded via FTP
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
for filename in filenames:
for ftp_filename in ftp_files:
@@ -318,7 +318,7 @@
ftp_files = []
# TODO: warning to the user (could happen if session has become invalid)
else:
- user_ftp_dir = os.path.join( trans.app.config.ftp_upload_dir, trans.user.email )
+ user_ftp_dir = trans.user_ftp_dir
for ( dirpath, dirnames, filenames ) in os.walk( user_ftp_dir ):
for filename in filenames:
path = relpath( os.path.join( dirpath, filename ), user_ftp_dir )
diff -r bbb229927c688213e67ba62998a64526ca948a19 -r e0354c9d5497fa9d169df6a7aa44c8132718f92c lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -960,6 +960,11 @@
def ncbi_builds( self ):
return util.dlnames['ncbi']
+ @property
+ def user_ftp_dir( self ):
+ identifier = self.app.config.ftp_upload_dir_identifier
+ return os.path.join( self.app.config.ftp_upload_dir, getattr(self.user, identifier) )
+
def db_dataset_for( self, dbkey ):
"""
Returns the db_file dataset associated/needed by `dataset`, or `None`.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes to make support for recently introduced prior_installation_required atribute for ordering installation of tool shed repositories backward compatible to the 12/20/12 Galaxy release.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bbb229927c68/
Changeset: bbb229927c68
User: greg
Date: 2013-04-16 21:55:07
Summary: Fixes to make support for recently introduced prior_installation_required atribute for ordering installation of tool shed repositories backward compatible to the 12/20/12 Galaxy release.
Affected #: 5 files
diff -r cf7a0098c612499d03aafb2f62084ebbcf99e55f -r bbb229927c688213e67ba62998a64526ca948a19 lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -1031,14 +1031,18 @@
changeset_revision = items[ 3 ]
if len( items ) == 5:
prior_installation_required = asbool( str( items[ 4 ] ) )
+ return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required
else:
- # Metadata should have been reset on the repository that contains the definition for this repository_dependency. In the meantime we'll
- # default the prior_installation_required to False.
- prior_installation_required = False
- return toolshed_base_url, repository_name, repository_owner, changeset_revision, prior_installation_required
+ # For backward compatibility to the 12/20/12 Galaxy release we have to return the following, and callers must handle exceptions.
+ return toolshed_base_url, repository_name, repository_owner, changeset_revision
def handle_repository_dependencies_container_entry( trans, repository_dependencies_folder, rd_key, rd_value, folder_id, repository_dependency_id, folder_keys ):
- toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = get_components_from_key( rd_key )
+ try:
+ toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = get_components_from_key( rd_key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False.
+ toolshed, repository_name, repository_owner, changeset_revision = get_components_from_key( rd_key )
+ prior_installation_required = False
folder = get_folder( repository_dependencies_folder, rd_key )
label = generate_repository_dependencies_folder_label_from_key( repository_name,
repository_owner,
@@ -1114,7 +1118,12 @@
return False
def key_is_current_repositorys_key( repository_name, repository_owner, changeset_revision, prior_installation_required, key ):
- toolshed_base_url, key_name, key_owner, key_changeset_revision, key_prior_installation_required = get_components_from_key( key )
+ try:
+ toolshed_base_url, key_name, key_owner, key_changeset_revision, key_prior_installation_required = get_components_from_key( key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release, default key_prior_installation_required to False.
+ toolshed_base_url, key_name, key_owner, key_changeset_revision = get_components_from_key( key )
+ key_prior_installation_required = False
return repository_name == key_name and \
repository_owner == key_owner and \
changeset_revision == key_changeset_revision and \
diff -r cf7a0098c612499d03aafb2f62084ebbcf99e55f -r bbb229927c688213e67ba62998a64526ca948a19 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -478,8 +478,12 @@
old_container_repository_dependencies_folder.id = folder_id
folder_id += 1
# Generate the label by retrieving the repository name.
- toolshed, name, owner, changeset_revision, prior_installation_required = \
- container_util.get_components_from_key( old_container_repository_dependencies_folder.key )
+ try:
+ toolshed, name, owner, changeset_revision, prior_installation_required = \
+ container_util.get_components_from_key( old_container_repository_dependencies_folder.key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( old_container_repository_dependencies_folder.key )
old_container_repository_dependencies_folder.label = str( name )
repository_dependencies_folder.folders.append( old_container_repository_dependencies_folder )
# Merge tool_dependencies.
diff -r cf7a0098c612499d03aafb2f62084ebbcf99e55f -r bbb229927c688213e67ba62998a64526ca948a19 lib/tool_shed/tool_shed_registry.py
--- a/lib/tool_shed/tool_shed_registry.py
+++ b/lib/tool_shed/tool_shed_registry.py
@@ -42,9 +42,8 @@
Following more what galaxy.demo_sequencer.controllers.common does might be more appropriate at some stage...
"""
- log.debug( 'Looking for url %s' % url )
for shed_name, shed_url in self.tool_sheds.items():
if shed_url.find( url ) >= 0:
- log.debug( 'Found %s -> %s' % ( shed_name, shed_url ) )
return self.tool_sheds_auth[ shed_name ]
+ log.debug( "Invalid url '%s' received by tool shed registry's password_manager_for_url method." % str( url ) )
return None
diff -r cf7a0098c612499d03aafb2f62084ebbcf99e55f -r bbb229927c688213e67ba62998a64526ca948a19 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -263,8 +263,13 @@
for key, val in repository_dependencies.items():
if key in [ 'root_key', 'description' ]:
continue
- toolshed, name, owner, changeset_revision, prior_installation_required = container_util.get_components_from_key( key )
- components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
+ try:
+ toolshed, name, owner, changeset_revision, prior_installation_required = container_util.get_components_from_key( key )
+ components_list = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release, default prior_installation_required to False in the caller.
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+ components_list = [ toolshed, name, owner, changeset_revision ]
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
for components_list in val:
@@ -279,7 +284,7 @@
encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
if text:
required_repo_info_dict = json.from_json_string( text )
diff -r cf7a0098c612499d03aafb2f62084ebbcf99e55f -r bbb229927c688213e67ba62998a64526ca948a19 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -33,8 +33,12 @@
if key in [ 'root_key', 'description' ]:
continue
dependent_repository = None
- dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision, dependent_prior_installation_required = \
- container_util.get_components_from_key( key )
+ try:
+ dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision, dependent_prior_installation_required = \
+ container_util.get_components_from_key( key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ dependent_toolshed, dependent_name, dependent_owner, dependent_changeset_revision = container_util.get_components_from_key( key )
for tsr in tool_shed_repositories:
# Get the the tool_shed_repository defined by name, owner and changeset_revision. This is the repository that will be
# dependent upon each of the tool shed repositories contained in val.
@@ -350,13 +354,22 @@
# We have the updated changset revision.
updated_key_rd_dicts.append( new_key_rd_dict )
else:
- toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required = \
- container_util.get_components_from_key( key )
+ try:
+ toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required = \
+ container_util.get_components_from_key( key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ toolshed, repository_name, repository_owner, repository_changeset_revision = container_util.get_components_from_key( key )
message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \
( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) )
log.debug( message )
else:
- toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required = container_util.get_components_from_key( key )
+ try:
+ toolshed, repository_name, repository_owner, repository_changeset_revision, prior_installation_required = \
+ container_util.get_components_from_key( key )
+ except ValueError:
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ toolshed, repository_name, repository_owner, repository_changeset_revision = container_util.get_components_from_key( key )
message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \
( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) )
log.debug( message )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Add a track_type attribute to data types and use instead of burying track type in get_track_type function.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cf7a0098c612/
Changeset: cf7a0098c612
User: jgoecks
Date: 2013-04-16 21:32:46
Summary: Add a track_type attribute to data types and use instead of burying track type in get_track_type function.
Affected #: 5 files
diff -r 270ef5007ae876bfbaa3110d6666432751152323 -r cf7a0098c612499d03aafb2f62084ebbcf99e55f lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -94,6 +94,8 @@
class Bam( Binary ):
"""Class describing a BAM binary file"""
file_ext = "bam"
+ track_type = "ReadTrack"
+
MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, file_ext="bai", readonly=True, no_value=None, visible=False, optional=True )
def _get_samtools_version( self ):
@@ -245,7 +247,7 @@
except:
return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
- return "ReadTrack", { "data": "bai", "index": [ "bigwig", "summary_tree" ] }
+ return self.track_type, { "data": "bai", "index": [ "bigwig", "summary_tree" ] }
Binary.register_sniffable_binary_format("bam", "bam", Bam)
@@ -324,6 +326,8 @@
The supplemental info in the paper has the binary details:
http://bioinformatics.oxfordjournals.org/cgi/content/abstract/btq351v1
"""
+ track_type = "LineTrack"
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
self._magic = 0x888FFC26
@@ -349,18 +353,19 @@
except:
return "Binary UCSC %s file (%s)" % ( self._name, data.nice_size( dataset.get_size() ) )
def get_track_type( self ):
- return "LineTrack", {"data_standalone": "bigwig"}
+ return self.track_type, {"data_standalone": "bigwig"}
Binary.register_sniffable_binary_format("bigwig", "bigwig", BigWig)
class BigBed(BigWig):
"""BigBed support from UCSC."""
+
def __init__( self, **kwd ):
Binary.__init__( self, **kwd )
self._magic = 0x8789F2EB
self._name = "BigBed"
def get_track_type( self ):
- return "LineTrack", {"data_standalone": "bigbed"}
+ return self.track_type, {"data_standalone": "bigbed"}
Binary.register_sniffable_binary_format("bigbed", "bigbed", BigBed)
diff -r 270ef5007ae876bfbaa3110d6666432751152323 -r cf7a0098c612499d03aafb2f62084ebbcf99e55f lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -97,6 +97,9 @@
#A per datatype setting (inherited): max file size (in bytes) for setting optional metadata
_max_optional_metadata_filesize = None
+ # Trackster track type.
+ track_type = None
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
@@ -561,7 +564,7 @@
Returns a list of visualizations for datatype.
"""
- if hasattr( self, 'get_track_type' ):
+ if self.track_type:
return [ 'trackster', 'circster' ]
return []
diff -r 270ef5007ae876bfbaa3110d6666432751152323 -r cf7a0098c612499d03aafb2f62084ebbcf99e55f lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -46,6 +46,7 @@
"""Tab delimited data containing interval information"""
file_ext = "interval"
line_class = "region"
+ track_type = "FeatureTrack"
"""Add metadata elements"""
MetadataElement( name="chromCol", default=1, desc="Chrom column", param=metadata.ColumnParameter )
@@ -330,15 +331,16 @@
return None
def get_track_type( self ):
- return "FeatureTrack", {"data": "tabix", "index": "summary_tree"}
+ return self.track_type, {"data": "tabix", "index": "summary_tree"}
class BedGraph( Interval ):
"""Tab delimited chrom/start/end/datavalue dataset"""
file_ext = "bedgraph"
+ track_type = "LineTrack"
def get_track_type( self ):
- return "LineTrack", { "data": "bigwig", "index": "bigwig" }
+ return self.track_type, { "data": "bigwig", "index": "bigwig" }
def as_ucsc_display_file( self, dataset, **kwd ):
"""
@@ -512,7 +514,7 @@
except: return False
def get_track_type( self ):
- return "FeatureTrack", {"data": "tabix", "index": "summary_tree", "feature_search": "fli"}
+ return self.track_type, {"data": "tabix", "index": "summary_tree", "feature_search": "fli"}
class BedStrict( Bed ):
"""Tab delimited data in strict BED format - no non-standard columns allowed"""
@@ -785,7 +787,7 @@
return False
def get_track_type( self ):
- return "FeatureTrack", {"data": "interval_index", "index": "summary_tree", "feature_search": "fli"}
+ return self.track_type, {"data": "interval_index", "index": "summary_tree", "feature_search": "fli"}
class Gff3( Gff ):
@@ -966,6 +968,7 @@
class Wiggle( Tabular, _RemoteCallMixin ):
"""Tab delimited data in wiggle format"""
file_ext = "wig"
+ track_type = "LineTrack"
MetadataElement( name="columns", default=3, desc="Number of columns", readonly=True, visible=False )
@@ -1148,7 +1151,7 @@
return resolution
def get_track_type( self ):
- return "LineTrack", { "data": "bigwig", "index": "bigwig" }
+ return self.track_type, { "data": "bigwig", "index": "bigwig" }
class CustomTrack ( Tabular ):
"""UCSC CustomTrack"""
@@ -1304,7 +1307,7 @@
return False
def get_track_type( self ):
- return "FeatureTrack", {"data": "tabix", "index": "summary_tree"}
+ return self.track_type, {"data": "tabix", "index": "summary_tree"}
class ChromatinInteractions( Interval ):
'''
@@ -1312,6 +1315,7 @@
'''
file_ext = "chrint"
+ track_type = "DiagonalHeatmapTrack"
column_names = [ 'Chrom1', 'Start1', 'End1', 'Chrom2', 'Start2', 'End2', 'Value' ]
@@ -1330,7 +1334,7 @@
return False
def get_track_type( self ):
- return "DiagonalHeatmapTrack", {"data": "tabix", "index": "summary_tree"}
+ return self.track_type, {"data": "tabix", "index": "summary_tree"}
diff -r 270ef5007ae876bfbaa3110d6666432751152323 -r cf7a0098c612499d03aafb2f62084ebbcf99e55f lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -163,7 +163,7 @@
# Use default mime type as per datatype spec
mimetype = self.datatypes_by_extension[ extension ].get_mime()
self.mimetypes_by_extension[ extension ] = mimetype
- if hasattr( datatype_class, "get_track_type" ):
+ if datatype_class.track_type:
self.available_tracks.append( extension )
if display_in_upload:
self.upload_file_formats.append( extension )
diff -r 270ef5007ae876bfbaa3110d6666432751152323 -r cf7a0098c612499d03aafb2f62084ebbcf99e55f lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py
+++ b/lib/galaxy/datatypes/tabular.py
@@ -358,6 +358,8 @@
class Sam( Tabular ):
file_ext = 'sam'
+ track_type = "ReadTrack"
+
def __init__(self, **kwd):
"""Initialize taxonomy datatype"""
Tabular.__init__( self, **kwd )
@@ -468,7 +470,7 @@
merge = staticmethod(merge)
def get_track_type( self ):
- return "ReadTrack", {"data": "bam", "index": "summary_tree"}
+ return self.track_type, {"data": "bam", "index": "summary_tree"}
class Pileup( Tabular ):
"""Tab delimited data in pileup (6- or 10-column) format"""
@@ -525,8 +527,7 @@
return True
except:
return False
-
-
+
class ElandMulti( Tabular ):
file_ext = 'elandmulti'
@@ -535,6 +536,7 @@
class Vcf( Tabular ):
""" Variant Call Format for describing SNPs and other simple genome variations. """
+ track_type = "VariantTrack"
file_ext = 'vcf'
column_names = [ 'Chrom', 'Pos', 'ID', 'Ref', 'Alt', 'Qual', 'Filter', 'Info', 'Format', 'data' ]
@@ -552,7 +554,7 @@
return Tabular.make_html_table( self, dataset, column_names=self.column_names )
def get_track_type( self ):
- return "VariantTrack", { "data": "tabix", "index": "summary_tree" }
+ return self.track_type, { "data": "tabix", "index": "summary_tree" }
class Eland( Tabular ):
"""Support for the export.txt.gz file used by Illumina's ELANDv2e aligner"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Added info_only option to install and test script.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/270ef5007ae8/
Changeset: 270ef5007ae8
User: inithello
Date: 2013-04-16 21:28:23
Summary: Added info_only option to install and test script.
Affected #: 1 file
diff -r 95c90c1d90294032a0db5f3cb9312a967ac00e49 -r 270ef5007ae876bfbaa3110d6666432751152323 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -265,9 +265,8 @@
tool_id = parts[ -2 ]
return tool_id, tool_version
-def get_tool_test_errors_from_api( tool_shed_url, metadata_dict ):
- params = dict()
- api_path = metadata_dict[ 'url' ].split( '/' )
+def get_tool_test_errors_from_api( tool_shed_url, metadata_revision_id ):
+ api_path = [ 'api', 'repository_revisions', metadata_revision_id ]
api_url = get_api_url( base=tool_shed_url, parts=api_path )
repository_metadata = json_from_url( api_url )
if repository_metadata[ 'tool_test_errors' ] is None:
@@ -292,7 +291,10 @@
params[ 'tools_functionally_correct' ] = 'false'
params[ 'do_not_test' ] = 'false'
params[ 'tool_test_errors' ] = test_results_dict
- return update( tool_shed_api_key, '%s' % ( url_join( galaxy_tool_shed_url, 'api', 'repository_revisions', metadata_id ) ), params, return_formatted=False )
+ if '-info_only' in sys.argv:
+ return {}
+ else:
+ return update( tool_shed_api_key, '%s' % ( url_join( galaxy_tool_shed_url, 'api', 'repository_revisions', metadata_id ) ), params, return_formatted=False )
def run_tests( test_config ):
loader = nose.loader.TestLoader( config=test_config )
@@ -332,8 +334,6 @@
galaxy_test_file_dir = os.environ.get( 'GALAXY_INSTALL_TEST_FILE_DIR', default_galaxy_test_file_dir )
if not os.path.isabs( galaxy_test_file_dir ):
galaxy_test_file_dir = os.path.abspath( galaxy_test_file_dir )
- # Set up the tool dependency path for the Galaxy instance.
- tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
use_distributed_object_store = os.environ.get( 'GALAXY_INSTALL_TEST_USE_DISTRIBUTED_OBJECT_STORE', False )
if not os.path.isdir( galaxy_test_tmp_dir ):
os.mkdir( galaxy_test_tmp_dir )
@@ -363,9 +363,12 @@
new_repos_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_tempfiles = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
galaxy_shed_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir, prefix='shed_tools' )
- galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
- os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' ] = galaxy_tool_dependency_dir
+ galaxy_migrated_tool_path = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ # Set up the tool dependency path for the Galaxy instance.
+ tool_dependency_dir = os.environ.get( 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR', None )
+ if tool_dependency_dir is None:
+ tool_dependency_dir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir )
+ os.environ[ 'GALAXY_INSTALL_TEST_TOOL_DEPENDENCY_DIR' ] = tool_dependency_dir
if 'GALAXY_INSTALL_TEST_DBURI' in os.environ:
database_connection = os.environ[ 'GALAXY_INSTALL_TEST_DBURI' ]
else:
@@ -422,7 +425,7 @@
tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_data_path = tool_data_path,
tool_data_table_config_path = galaxy_tool_data_table_conf_file,
- tool_dependency_dir = galaxy_tool_dependency_dir,
+ tool_dependency_dir = tool_dependency_dir,
tool_path = tool_path,
tool_parse_help = False,
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
@@ -625,7 +628,7 @@
# },
# ]
# }
- repository_status = get_tool_test_errors_from_api( galaxy_tool_shed_url, repository_info_dict )
+ repository_status = get_tool_test_errors_from_api( galaxy_tool_shed_url, metadata_revision_id )
if 'test_environment' not in repository_status:
repository_status[ 'test_environment' ] = {}
test_environment = get_test_environment( repository_status[ 'test_environment' ] )
@@ -775,6 +778,8 @@
print "####################################################################################"
print "# %s - repository installation and testing script completed." % now
print "# Repository revisions tested: %d" % repositories_tested
+ if '-info_only' in sys.argv:
+ print "# -info_only set, not updating the tool shed."
if repositories_tested > 0:
if repositories_passed:
print '# ----------------------------------------------------------------------------------'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Rewrite RawVcfDataProvider for completeness and efficiency.
by commits-noreply@bitbucket.org 16 Apr '13
by commits-noreply@bitbucket.org 16 Apr '13
16 Apr '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/95c90c1d9029/
Changeset: 95c90c1d9029
User: jgoecks
Date: 2013-04-16 20:36:27
Summary: Rewrite RawVcfDataProvider for completeness and efficiency.
Affected #: 1 file
diff -r 6e91867769eab9e97353cd0e6363393c8077aadc -r 95c90c1d90294032a0db5f3cb9312a967ac00e49 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -759,29 +759,41 @@
"""
def get_iterator( self, chrom, start, end, **kwargs ):
- # Read first line in order to match chrom naming format.
- line = source.readline()
- dataset_chrom = line.split()[0]
- if not _chrom_naming_matches( chrom, dataset_chrom ):
- chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
- # Undo read.
- source.seek( 0 )
+ source = open( self.original_dataset.file_name )
+
+ # Skip comments.
+ pos = 0
+ line = None
+ for line in source:
+ if not line.startswith("#"):
+ break
+ else:
+ pos = source.tell()
+
+ # Match chrom naming format.
+ if line:
+ dataset_chrom = line.split()[0]
+ if not _chrom_naming_matches( chrom, dataset_chrom ):
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+
+ def line_in_region( vcf_line, chrom, start, end ):
+ """ Returns true if line is in region. """
+ variant_chrom, variant_start = vcf_line.split()[ 0:2 ]
+ # VCF format is 1-based.
+ variant_start = int( variant_start ) - 1
+ return variant_chrom == chrom and variant_start >= start and variant_start <= end
def line_filter_iter():
- for line in open( self.original_dataset.file_name ):
- if line.startswith("#"):
- continue
- variant = line.split()
- variant_chrom, variant_start, id, ref, alts = variant[ 0:5 ]
- variant_start = int( variant_start )
- longest_alt = -1
- for alt in alts:
- if len( alt ) > longest_alt:
- longest_alt = len( alt )
- variant_end = variant_start + abs( len( ref ) - longest_alt )
- if variant_chrom != chrom or variant_start > end or variant_end < start:
- continue
+ """ Yields lines in source that are in region chrom:start-end """
+ # Yield data line read above.
+ if line_in_region( line, chrom, start, end ):
yield line
+
+ # Search for and yield other data lines.
+ for data_line in source:
+ if line_in_region( data_line, chrom, start, end ):
+ print chrom, start, end, ">>>", data_line,
+ yield data_line
return line_filter_iter()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0