1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c842439a245c/
Changeset: c842439a245c
User: dan
Date: 2013-05-13 21:56:34
Summary: Fix for manage_data_manager.mako.
Affected #: 1 file
diff -r b8af8fa7ab2e61700875249c62308302a105bb38 -r c842439a245cf97137a819aaae9907240f47e6d9 templates/webapps/galaxy/data_manager/manage_data_manager.mako
--- a/templates/webapps/galaxy/data_manager/manage_data_manager.mako
+++ b/templates/webapps/galaxy/data_manager/manage_data_manager.mako
@@ -15,7 +15,7 @@
<p>Access managed data by job</p>
%if jobs:
-<form name="jobs" action="${h.url_for()}" method="POST">
+<div><table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%"><tr class="header"><td>Job ID</td>
@@ -42,7 +42,7 @@
%endfor
</table><p/>
-</form>
+</div>
%else:
<div class="infomessage">There are no jobs for this data manager.</div>
%endif
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b8af8fa7ab2e/
Changeset: b8af8fa7ab2e
User: greg
Date: 2013-05-13 21:44:58
Summary: More fixes for installing many repositories from the tool shed.
Affected #: 4 files
diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -313,11 +313,6 @@
"""Return an svg image representation of a workflow dictionary created when the workflow was exported."""
return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=repository_id )
- @web.expose
- def get_contents_of_file( self, trans, encoded_file_path ):
- file_path = encoding_util.tool_shed_decode( encoded_file_path )
- return suc.get_file_contents( file_path )
-
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -388,34 +383,6 @@
tool_version = tool_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
- @web.json
- def handle_large_repo_info_dict( self, trans, **kwd ):
- """
- In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
- (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
- eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
- the URI path plus query. The referer handles requests longer than this by persisting the repo_info_dict to a temporary file which we can read.
- """
- required_repo_info_dict = {}
- encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
- # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it.
- tool_shed_url = kwd.get( 'tool_shed_url', None )
- if tool_shed_url and encoded_tmp_file_name:
- url = suc.url_join( tool_shed_url,
- '/repository/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
- response = urllib2.urlopen( url )
- text = response.read()
- required_repo_info_dict = json.from_json_string( text )
- else:
- log.debug( "Invalid tool_shed_url '%s' or encoded_tmp_file_name '%s'." % ( str( tool_shed_url ), str( encoded_tmp_file_name ) ) )
- tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
- if os.path.exists( tmp_file_name ):
- try:
- os.unkink( tmp_file_name )
- except:
- pass
- return common_install_util.process_repo_info_dict( trans, required_repo_info_dict )
-
@web.expose
@web.require_admin
def import_workflow( self, trans, workflow_name, repository_id, **kwd ):
diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -4,7 +4,6 @@
import re
import string
import tempfile
-import urllib2
from time import gmtime
from time import strftime
from datetime import date
@@ -1285,11 +1284,6 @@
return encoding_util.tool_shed_encode( update_dict )
@web.expose
- def get_contents_of_file( self, trans, encoded_file_path ):
- file_path = encoding_util.tool_shed_decode( encoded_file_path )
- return suc.get_file_contents( file_path )
-
- @web.expose
def get_ctx_rev( self, trans, **kwd ):
"""Given a repository and changeset_revision, return the correct ctx.rev() value."""
repository_name = kwd[ 'name' ]
@@ -1478,27 +1472,27 @@
repo_info_dicts=repo_info_dicts )
@web.json
- def get_required_repo_info_dict( self, trans, encoded_str, **kwd ):
+ def get_required_repo_info_dict( self, trans, encoded_str=None ):
"""
Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
repositories defined by the received encoded_str.
"""
- encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
- encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
- decoded_required_repository_tups = []
- for encoded_required_repository_tup in encoded_required_repository_tups:
- decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) )
- encoded_repository_ids = []
- changeset_revisions = []
- for required_repository_tup in decoded_required_repository_tups:
- tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( required_repository_tup )
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
- changeset_revisions.append( changeset_revision )
- if encoded_repository_ids and changeset_revisions:
- repo_info_dict = json.from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
- else:
- repo_info_dict = {}
+ repo_info_dict = {}
+ if encoded_str:
+ encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+ encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
+ decoded_required_repository_tups = []
+ for encoded_required_repository_tup in encoded_required_repository_tups:
+ decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) )
+ encoded_repository_ids = []
+ changeset_revisions = []
+ for required_repository_tup in decoded_required_repository_tups:
+ tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( required_repository_tup )
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
+ changeset_revisions.append( changeset_revision )
+ if encoded_repository_ids and changeset_revisions:
+ repo_info_dict = json.from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
return repo_info_dict
@web.expose
@@ -1660,41 +1654,6 @@
return tool_guid_lineage
@web.expose
- def handle_large_repo_info_dict( self, trans, **kwd ):
- """
- In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
- (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
- eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
- the URI path plus query. The referer handles requests longer than this by persisting the encoded_str to a temporary file which we can read.
- """
- repo_info_dict = {}
- encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
- # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it.
- galaxy_url = suc.handle_galaxy_url( trans, **kwd )
- if galaxy_url and encoded_tmp_file_name:
- url = suc.url_join( galaxy_url,
- '/admin_toolshed/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
- response = urllib2.urlopen( url )
- encoded_required_repository_str = response.read()
- repo_info_dict = self.get_required_repo_info_dict( trans, encoded_str )
- repo_info_dict[ 'encoded_tmp_file_name' ] = encoded_tmp_file_name
- else:
- log.debug( "Invalid galaxy_url '%s' or encoded_tmp_file_name '%s'." % ( str( galaxy_url ), str( encoded_tmp_file_name ) ) )
- # Persist the large repo_info_dict to a temporary file.
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_file_name = fh.name
- fh.close()
- fh = open( tmp_file_name, 'wb' )
- fh.write( encoded_required_repository_str )
- fh.close()
- encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
- tool_shed_url = web.url_for( '/', qualified=True )
- # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str.
- url = suc.url_join( galaxy_url,
- '/admin_toolshed/handle_large_repo_info_dict?encoded_tmp_file_name=%s&tool_shed_url=%s' % ( str( encoded_tmp_file_name ), str( tool_shed_url ) ) )
- return trans.response.send_redirect( url )
-
- @web.expose
def help( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -1,6 +1,7 @@
import logging
import os
-import tempfile
+import urllib
+import urllib2
from galaxy import eggs
from galaxy import util
from galaxy import web
@@ -17,9 +18,6 @@
import pkg_resources
-pkg_resources.require( "simplejson" )
-import simplejson
-
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree
from elementtree import ElementInclude
@@ -256,6 +254,7 @@
repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
this method is required to retrieve all repository dependencies.
"""
+ all_repo_info_dicts = []
if repo_info_dicts:
# We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
required_repository_tups = []
@@ -288,47 +287,25 @@
encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
- url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
- text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
- if text:
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
+ request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
+ response = urllib2.urlopen( request ).read()
+ if response:
try:
- required_repo_info_dict = json.from_json_string( text )
- except simplejson.decoder.JSONDecodeError, e:
- if len( url ) >= 8177:
- message = '\n\nThe selected tool shed repositories cannot be installed until the tool shed at %s and the Galaxy ' % str( tool_shed_url )
- message += 'instance at %s are both updated to at least the June 3, 2013 Galaxy release. These upgrades ' % str( trans.request.base )
- message += 'are necessary because the number of repositories you are attempting to install generates an HTTP request that is longer than '
- message += '8177 bytes which cannot be handled by tool shed or Galaxy instances older than the June 3, 2013 release.\n\n'
- log.exception( message )
- else:
- log.exception()
- return []
+ required_repo_info_dict = json.from_json_string( response )
except Exception, e:
- log.exception()
- return []
- return process_repo_info_dict( trans, required_repo_info_dict )
- return []
-
-def handle_large_repo_info_dict( trans, tool_shed_url, encoded_required_repository_str ):
- """
- Handle the cases where the received encoded_required_repository_str is long. With apache, the default limit for the length of the request line is 8190 bytes
- (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET), eight bytes for the version
- information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for the URI path plus query.
- """
- # Persist the encoded string to a temporary file.
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_file_name = fh.name
- fh.close()
- fh = open( tmp_file_name, 'wb' )
- fh.write( encoded_required_repository_str )
- fh.close()
- encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
- galaxy_url = web.url_for( '/', qualified=True )
- # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str.
- url = suc.url_join( tool_shed_url,
- '/repository/handle_large_repo_info_dict?encoded_tmp_file_name=%s&galaxy_url=%s' % \
- ( encoded_tmp_file_name, galaxy_url ) )
- return trans.response.send_redirect( url )
+ log.exception( e )
+ return all_repo_info_dicts
+ required_repo_info_dicts = []
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ if required_repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ return all_repo_info_dicts
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
@@ -387,24 +364,3 @@
app.model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
return installed_tool_dependencies
-
-def process_repo_info_dict( trans, required_repo_info_dict ):
- all_repo_info_dicts = []
- required_repo_info_dicts = []
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- if required_repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( required_repo_info_dict )
- # Remove the temporary file that stored the long encoded_required_repository_str if possible.
- encoded_tmp_file_name = required_repo_info_dict.get( 'encoded_tmp_file_name', None )
- if encoded_tmp_file_name:
- tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
- try:
- os.unlink( tmp_file_name )
- except:
- pass
- return all_repo_info_dicts
diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -466,14 +466,6 @@
return manifest_ctx, ctx_file
return None, None
-def get_file_contents( file_path ):
- if os.path.exists( file_path ):
- fh = open( file_path )
- contents = fh.read()
- fh.close()
- return contents
- return ''
-
def get_file_context_from_ctx( ctx, filename ):
"""Return the mercurial file context for a specified file."""
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d92704335a27/
Changeset: d92704335a27
User: Dave Bouvier
Date: 2013-05-13 19:12:40
Summary: Updated documentation on missing tool dependencies.
Affected #: 1 file
diff -r ef2b705c7de344c2c052a36ff3592e78ae5115af -r d92704335a2774e1bde2be72f9f78ce63c0447fd test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -720,8 +720,9 @@
test_toolbox.toolbox = app.toolbox
repositories_failed.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
elif missing_tool_dependencies:
- # If a tool dependency fails to install correctly, this should be considered a missing test component,
- # as the tools depend on the tool dependency being present in order to return coherent results.
+ # If a tool dependency fails to install correctly, this should be considered an installation error,
+ # and functional tests should be skipped, since the tool dependency needs to be correctly installed
+ # for the test to be considered reliable.
log.error( 'One or more tool dependencies of this repository are marked as missing.' )
log.error( 'Updating repository and skipping functional tests.' )
# In keeping with the standard display layout, add the error message to the dict for each tool individually.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ef2b705c7de3/
Changeset: ef2b705c7de3
User: Dave Bouvier
Date: 2013-05-13 19:08:39
Summary: Skip functional tests and log error message if a repository's tool dependencies fail to install correctly. Clean up uninstall method.
Affected #: 1 file
diff -r 604693b3ebe0283804225eac10adab2f858ec380 -r ef2b705c7de344c2c052a36ff3592e78ae5115af test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -39,6 +39,7 @@
eggs.require( "pysqlite" )
import install_and_test_tool_shed_repositories.functional.test_install_repositories as test_install_repositories
+import install_and_test_tool_shed_repositories.base.test_db_util as test_db_util
import functional.test_toolbox as test_toolbox
import atexit, logging, os, os.path, sys, tempfile, simplejson
@@ -185,7 +186,7 @@
sa_session = app.model.context.current
repositories_to_uninstall = sa_session.query( app.model.ToolShedRepository ).all()
for repository in repositories_to_uninstall:
- if repository.status == app.model.ToolShedRepository.states.UNINSTALLED:
+ if repository.status == app.model.ToolShedRepository.installation_status.UNINSTALLED:
continue
if repository.status not in [ app.model.ToolShedRepository.installation_status.UNINSTALLED,
app.model.ToolShedRepository.installation_status.ERROR,
@@ -196,7 +197,7 @@
name = str( repository.name )
owner = str( repository.owner )
changeset_revision = str( repository.installed_changeset_revision )
- log.debug( 'Changeset revision %s of repository %s queued for uninstallation.' % ( changeset_revision, name ) )
+ log.debug( 'Changeset revision %s of repository %s queued for uninstallation.', changeset_revision, name )
repository_dict = dict( name=name, owner=owner, changeset_revision=changeset_revision )
# Generate a test method to uninstall this repository through the embedded Galaxy application's web interface.
test_install_repositories.generate_uninstall_method( repository_dict )
@@ -326,7 +327,7 @@
global test_toolbox
for key in test_toolbox.__dict__:
if key.startswith( 'TestForTool_' ):
- log.info( 'Tool test found in test_toolbox, deleting: %s' % key )
+ log.info( 'Tool test found in test_toolbox, deleting: %s', key )
# We can't delete this test just yet, we're still iterating over __dict__.
tests_to_delete.append( key )
tool_id = key.replace( 'TestForTool_', '' )
@@ -493,7 +494,7 @@
for i in range( 0, 9 ):
try:
galaxy_test_port = str( random.randint( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
- log.debug( "Attempting to serve app on randomly chosen port: %s" % galaxy_test_port )
+ log.debug( "Attempting to serve app on randomly chosen port: %s", galaxy_test_port )
server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False )
break
except socket.error, e:
@@ -522,10 +523,10 @@
raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
log.info( "Embedded galaxy web server started" )
if galaxy_test_proxy_port:
- log.info( "The embedded Galaxy application is running on %s:%s" % ( galaxy_test_host, galaxy_test_proxy_port ) )
+ log.info( "The embedded Galaxy application is running on %s:%s", galaxy_test_host, galaxy_test_proxy_port )
else:
- log.info( "The embedded Galaxy application is running on %s:%s" % ( galaxy_test_host, galaxy_test_port ) )
- log.info( "Repositories will be installed from the tool shed at %s" % galaxy_tool_shed_url )
+ log.info( "The embedded Galaxy application is running on %s:%s", galaxy_test_host, galaxy_test_port )
+ log.info( "Repositories will be installed from the tool shed at %s", galaxy_tool_shed_url )
success = False
# If a tool_data_table_conf.test.xml file was found, add the entries from it into the app's tool data tables.
if additional_tool_data_tables:
@@ -541,9 +542,9 @@
try:
detailed_repository_list = []
# Get a list of repositories to test from the tool shed specified in the GALAXY_INSTALL_TEST_TOOL_SHED_URL environment variable.
- log.info( "Retrieving repositories to install from the URL:\n%s\n" % str( galaxy_tool_shed_url ) )
+ log.info( "Retrieving repositories to install from the URL:\n%s\n", str( galaxy_tool_shed_url ) )
repositories_to_install = get_repositories_to_install( galaxy_tool_shed_url, source='url' )
- log.info( "Retrieved %d repositories from the API." % len( repositories_to_install ) )
+ log.info( "Retrieved %d repositories from the API.", len( repositories_to_install ) )
for repository_to_install_dict in repositories_to_install:
# We need to get some details from the tool shed API, such as repository name and owner, to pass on to the
# module that will generate the install methods.
@@ -552,22 +553,23 @@
# and functionally correct tools that someone has previously installed. Deleted repositories have never been installed,
# and therefore do not need to be checked. If they are undeleted, this script will then test them the next time it runs.
if repository_info_dict[ 'deleted' ]:
- log.info( "Skipping revision %s of repository id %s (%s/%s) since the repository is deleted..." % \
- ( repository_to_install_dict[ 'changeset_revision' ],
- repository_to_install_dict[ 'repository_id' ],
- repository_info_dict[ 'owner' ],
- repository_info_dict[ 'name' ] ) )
+ log.info( "Skipping revision %s of repository id %s (%s/%s) since the repository is deleted...",
+ repository_to_install_dict[ 'changeset_revision' ],
+ repository_to_install_dict[ 'repository_id' ],
+ repository_info_dict[ 'owner' ],
+ repository_info_dict[ 'name' ] )
continue
# Now merge the dict returned from /api/repository_revisions with the detailed dict we just retrieved.
detailed_repository_list.append( dict( repository_info_dict.items() + repository_to_install_dict.items() ) )
repositories_tested = len( detailed_repository_list )
- log.info( 'After removing deleted repositories from the list, %d remain to be tested.' % repositories_tested )
+ log.info( 'After removing deleted repositories from the list, %d remain to be tested.', repositories_tested )
if '-list_repositories' in sys.argv:
log.info( "The API returned the following repositories, not counting deleted:" )
for repository_info_dict in detailed_repository_list:
- log.info( "%s owned by %s changeset revision %s" % ( repository_info_dict.get( 'name', None ),
- repository_info_dict.get( 'owner', None ),
- repository_info_dict.get( 'changeset_revision', None ) ) )
+ log.info( "%s owned by %s changeset revision %s",
+ repository_info_dict.get( 'name', None ),
+ repository_info_dict.get( 'owner', None ),
+ repository_info_dict.get( 'changeset_revision', None ) )
# This loop will iterate through the list of repositories generated by the above code, having already filtered out any
# that were marked as deleted. For each repository, it will generate a test method that will use Twill to install that
# repository into the embedded Galaxy application that was started up, selecting to install repository and tool
@@ -608,8 +610,8 @@
# Get the name and owner out of the repository info dict.
name = str( repository_info_dict[ 'name' ] )
owner = str( repository_info_dict[ 'owner' ] )
- log.info( "Installing and testing revision %s of repository id %s (%s/%s)..." % \
- ( str( changeset_revision ), str( repository_id ), owner, name ) )
+ log.info( "Installing and testing revision %s of repository id %s (%s/%s)...",
+ str( changeset_revision ), str( repository_id ), owner, name )
# Explicitly clear tests from twill's test environment.
remove_generated_tests( app )
# Use the repository information dict to generate an install method that will install the repository into the embedded
@@ -626,7 +628,7 @@
# If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
# sh run_functional_tests.sh -installed
if success:
- log.debug( 'Installation of %s succeeded, running all defined functional tests.' % name )
+ log.debug( 'Installation of %s succeeded, running all defined functional tests.', name )
# Generate the shed_tools_dict that specifies the location of test data contained within this repository. If the repository
# does not have a test-data directory, this will return has_test_data = False, and we will set the do_not_test flag to True,
# and the tools_functionally_correct flag to False, as well as updating tool_test_results.
@@ -663,6 +665,15 @@
# "traceback": "The captured traceback."
# },
# ]
+ # "installation_errors":
+ # [
+ # {
+ # "name": "The name of the repository.",
+ # "owner": "The owner of the repository.",
+ # "changeset_revision": "The changeset revision of the repository.",
+ # "error_message": "The message stored in tool_dependency.error_message."
+ # },
+ # ]
# "missing_test_components":
# [
# {
@@ -682,14 +693,42 @@
repository_status[ 'test_environment' ] = test_environment
repository_status[ 'passed_tests' ] = []
repository_status[ 'failed_tests' ] = []
+ repository_status[ 'installation_errors' ] = []
+ repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ missing_tool_dependencies = repository.includes_tool_dependencies and repository.missing_tool_dependencies
if 'missing_test_components' not in repository_status:
repository_status[ 'missing_test_components' ] = []
if not has_test_data:
+ # If the repository does not have a test-data directory, any functional tests in the tool configuration will
+ # fail. Mark the repository as failed and skip installation.
log.error( 'Test data is missing for this repository. Updating repository and skipping functional tests.' )
# Record the lack of test data.
- failed_tests = dict( tool_id=None, tool_version=None, tool_guid=None,
- missing_components="Repository %s is missing a test-data directory." % name )
- repository_status[ 'missing_test_components' ].append( failed_tests )
+ for tool in repository.metadata[ 'tools' ]:
+ tool_id = tool[ 'id' ]
+ tool_version = tool[ 'version' ]
+ tool_guid = tool[ 'guid' ]
+ # In keeping with the standard display layout, add the error message to the dict for each tool individually.
+ failed_tests = dict( tool_id=tool_id, tool_version=tool_version, tool_guid=tool_guid,
+ missing_components="Repository %s is missing a test-data directory." % name )
+ repository_status[ 'missing_test_components' ].append( failed_tests )
+ # Record the status of this repository in the tool shed.
+ register_test_result( galaxy_tool_shed_url, metadata_revision_id, repository_status, passed_tests=False )
+ # Run the cleanup method. This removes tool functional test methods from the test_toolbox module and uninstalls the
+ # repository using Twill.
+ execute_uninstall_method( app )
+ # Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
+ test_toolbox.toolbox = app.toolbox
+ repositories_failed.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
+ elif missing_tool_dependencies:
+ # If a tool dependency fails to install correctly, this should be considered a missing test component,
+ # as the tools depend on the tool dependency being present in order to return coherent results.
+ log.error( 'One or more tool dependencies of this repository are marked as missing.' )
+ log.error( 'Updating repository and skipping functional tests.' )
+ # In keeping with the standard display layout, add the error message to the dict for each tool individually.
+ for dependency in repository.missing_tool_dependencies:
+ test_result = dict( name=name, owner=owner, changeset_revision=changeset_revision,
+ error_message=dependency.error_message )
+ repository_status[ 'installation_errors' ].append( test_result )
# Record the status of this repository in the tool shed.
register_test_result( galaxy_tool_shed_url, metadata_revision_id, repository_status, passed_tests=False )
# Run the cleanup method. This removes tool functional test methods from the test_toolbox module and uninstalls the
@@ -702,7 +741,7 @@
# If the repository does have a test-data directory, we write the generated shed_tools_dict to a file, so the functional
# test framework can find it.
file( galaxy_shed_tools_dict, 'w' ).write( to_json_string( shed_tools_dict ) )
- log.info( 'Saved generated shed_tools_dict to %s\nContents: %s' % ( galaxy_shed_tools_dict, str( shed_tools_dict ) ) )
+ log.info( 'Saved generated shed_tools_dict to %s\nContents: %s', galaxy_shed_tools_dict, str( shed_tools_dict ) )
# Set the GALAXY_TOOL_SHED_TEST_FILE environment variable to the path of the shed_tools_dict file, so that test.base.twilltestcase.setUp
# will find and parse it properly.
os.environ[ 'GALAXY_TOOL_SHED_TEST_FILE' ] = galaxy_shed_tools_dict
@@ -727,6 +766,7 @@
break
repository_status[ 'passed_tests' ] = []
for test_id in passed_tests:
+ # Normalize the tool ID and version display.
tool_id, tool_version = get_tool_info_from_test_id( test_id )
test_result = dict( test_id=test_id, tool_id=tool_id, tool_version=tool_version )
repository_status[ 'passed_tests' ].append( test_result )
@@ -737,7 +777,7 @@
# updates the time_last_tested field to today's date.
repositories_passed.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
register_test_result( galaxy_tool_shed_url, metadata_revision_id, repository_status, passed_tests=True )
- log.debug( 'Revision %s of repository %s installed and passed functional tests.' % ( changeset_revision, name ) )
+ log.debug( 'Revision %s of repository %s installed and passed functional tests.', changeset_revision, name )
else:
# If the functional tests fail, log the output and update the failed changeset revision's metadata record in the tool shed via the API.
for failure in result.failures + result.errors:
@@ -782,25 +822,26 @@
# field to today's date.
repositories_failed.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
register_test_result( galaxy_tool_shed_url, metadata_revision_id, repository_status, passed_tests=False )
- log.debug( 'Revision %s of repository %s installed successfully, but did not pass functional tests.' % \
- ( changeset_revision, name ) )
+ log.debug( 'Revision %s of repository %s installed successfully, but did not pass functional tests.',
+ changeset_revision, name )
# Run the uninstall method. This removes tool functional test methods from the test_toolbox module and uninstalls the
# repository using Twill.
- log.debug( 'Uninstalling changeset revision %s of repository %s' % \
- ( repository_info_dict[ 'changeset_revision' ], repository_info_dict[ 'name' ] ) )
+ log.debug( 'Uninstalling changeset revision %s of repository %s',
+ repository_info_dict[ 'changeset_revision' ],
+ repository_info_dict[ 'name' ] )
success = execute_uninstall_method( app )
if not success:
- log.error( 'Repository %s failed to uninstall.' )
+ log.error( 'Repository %s failed to uninstall.', repository_info_dict[ 'name' ] )
# Set the test_toolbox.toolbox module-level variable to the new app.toolbox.
test_toolbox.toolbox = app.toolbox
else:
# Even if the repository failed to install, execute the uninstall method, in case a dependency did succeed.
- log.debug( 'Uninstalling repository %s' % repository_info_dict[ 'name' ] )
+ log.debug( 'Uninstalling repository %s', repository_info_dict[ 'name' ] )
success = execute_uninstall_method( app )
if not success:
- log.error( 'Repository %s failed to uninstall.' )
+ log.error( 'Repository %s failed to uninstall.', repository_info_dict[ 'name' ] )
repositories_failed_install.append( dict( name=name, owner=owner, changeset_revision=changeset_revision ) )
- log.debug( 'Repository %s failed to install correctly.' % repository_info_dict[ 'name' ] )
+ log.debug( 'Repository %s failed to install correctly.', repository_info_dict[ 'name' ] )
except:
log.exception( "Failure running tests" )
@@ -822,7 +863,7 @@
try:
for dir in [ galaxy_test_tmp_dir ]:
if os.path.exists( dir ):
- log.info( "Cleaning up temporary files in %s" % dir )
+ log.info( "Cleaning up temporary files in %s", dir )
shutil.rmtree( dir )
except:
pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/604693b3ebe0/
Changeset: 604693b3ebe0
User: greg
Date: 2013-05-13 17:10:50
Summary: Fixes for handling large numbers of tool shed repositories during installation into Galaxy.
Affected #: 5 files
diff -r 65a81aead95e147f709ee3969d49766f35d6a2e2 -r 604693b3ebe0283804225eac10adab2f858ec380 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -313,6 +313,11 @@
"""Return an svg image representation of a workflow dictionary created when the workflow was exported."""
return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=repository_id )
+ @web.expose
+ def get_contents_of_file( self, trans, encoded_file_path ):
+ file_path = encoding_util.tool_shed_decode( encoded_file_path )
+ return suc.get_file_contents( file_path )
+
@web.json
@web.require_admin
def get_file_contents( self, trans, file_path ):
@@ -383,6 +388,34 @@
tool_version = tool_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
+ @web.json
+ def handle_large_repo_info_dict( self, trans, **kwd ):
+ """
+ In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
+ (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
+ eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
+ the URI path plus query. The referer handles requests longer than this by persisting the repo_info_dict to a temporary file which we can read.
+ """
+ required_repo_info_dict = {}
+ encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
+ # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it.
+ tool_shed_url = kwd.get( 'tool_shed_url', None )
+ if tool_shed_url and encoded_tmp_file_name:
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ required_repo_info_dict = json.from_json_string( text )
+ else:
+ log.debug( "Invalid tool_shed_url '%s' or encoded_tmp_file_name '%s'." % ( str( tool_shed_url ), str( encoded_tmp_file_name ) ) )
+ tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
+ if os.path.exists( tmp_file_name ):
+ try:
+ os.unkink( tmp_file_name )
+ except:
+ pass
+ return common_install_util.process_repo_info_dict( trans, required_repo_info_dict )
+
@web.expose
@web.require_admin
def import_workflow( self, trans, workflow_name, repository_id, **kwd ):
@@ -1406,15 +1439,6 @@
message=message,
status=status )
- @web.expose
- def stream_file_contents( self, trans, encoded_tmp_file_name ):
- tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
- if os.path.exists( tmp_file_name ):
- return open( tmp_file_name, 'r' )
- else:
- log.debug( "The required temporary file '%s' cannot be located." % str( tmp_file_name ) )
- return ''
-
@web.json
def tool_dependency_status_updates( self, trans, ids=None, status_list=None ):
# Avoid caching
diff -r 65a81aead95e147f709ee3969d49766f35d6a2e2 -r 604693b3ebe0283804225eac10adab2f858ec380 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1285,6 +1285,11 @@
return encoding_util.tool_shed_encode( update_dict )
@web.expose
+ def get_contents_of_file( self, trans, encoded_file_path ):
+ file_path = encoding_util.tool_shed_decode( encoded_file_path )
+ return suc.get_file_contents( file_path )
+
+ @web.expose
def get_ctx_rev( self, trans, **kwd ):
"""Given a repository and changeset_revision, return the correct ctx.rev() value."""
repository_name = kwd[ 'name' ]
@@ -1473,32 +1478,12 @@
repo_info_dicts=repo_info_dicts )
@web.json
- def get_required_repo_info_dict( self, trans, encoded_str=None, **kwd ):
+ def get_required_repo_info_dict( self, trans, encoded_str, **kwd ):
"""
Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
repositories defined by the received encoded_str.
"""
- # In some cases the received encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
- # (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
- # eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
- # the URI path plus query. The referer handles requests longer than this by persisting the encoded_str to a temporary file which we
- # can read.
- encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
- if encoded_str:
- encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
- else:
- # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to
- # stream the string to us.
- galaxy_url = suc.handle_galaxy_url( trans, **kwd )
- if galaxy_url and encoded_tmp_file_name:
- url = suc.url_join( galaxy_url,
- 'admin_toolshed/stream_file_contents?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
- response = urllib2.urlopen( url )
- encoded_required_repository_str = response.read()
- else:
- log.debug( "Invalid galaxy_url '%s' or encoded_tmp_file_name '%s'." % ( str( galaxy_url ), str( encoded_tmp_file_name ) ) )
- repo_info_dict = {}
- return repo_info_dict
+ encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
decoded_required_repository_tups = []
for encoded_required_repository_tup in encoded_required_repository_tups:
@@ -1675,6 +1660,41 @@
return tool_guid_lineage
@web.expose
+ def handle_large_repo_info_dict( self, trans, **kwd ):
+ """
+ In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
+ (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
+ eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
+ the URI path plus query. The referer handles requests longer than this by persisting the encoded_str to a temporary file which we can read.
+ """
+ repo_info_dict = {}
+ encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
+ # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it.
+ galaxy_url = suc.handle_galaxy_url( trans, **kwd )
+ if galaxy_url and encoded_tmp_file_name:
+ url = suc.url_join( galaxy_url,
+ '/admin_toolshed/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
+ response = urllib2.urlopen( url )
+ encoded_required_repository_str = response.read()
+ repo_info_dict = self.get_required_repo_info_dict( trans, encoded_str )
+ repo_info_dict[ 'encoded_tmp_file_name' ] = encoded_tmp_file_name
+ else:
+ log.debug( "Invalid galaxy_url '%s' or encoded_tmp_file_name '%s'." % ( str( galaxy_url ), str( encoded_tmp_file_name ) ) )
+ # Persist the large repo_info_dict to a temporary file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_file_name = fh.name
+ fh.close()
+ fh = open( tmp_file_name, 'wb' )
+ fh.write( encoded_required_repository_str )
+ fh.close()
+ encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
+ tool_shed_url = web.url_for( '/', qualified=True )
+ # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str.
+ url = suc.url_join( galaxy_url,
+ '/admin_toolshed/handle_large_repo_info_dict?encoded_tmp_file_name=%s&tool_shed_url=%s' % ( str( encoded_tmp_file_name ), str( tool_shed_url ) ) )
+ return trans.response.send_redirect( url )
+
+ @web.expose
def help( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
diff -r 65a81aead95e147f709ee3969d49766f35d6a2e2 -r 604693b3ebe0283804225eac10adab2f858ec380 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -17,6 +17,9 @@
import pkg_resources
+pkg_resources.require( "simplejson" )
+import simplejson
+
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree
from elementtree import ElementInclude
@@ -253,7 +256,6 @@
repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
this method is required to retrieve all repository dependencies.
"""
- all_repo_info_dicts = []
if repo_info_dicts:
# We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
required_repository_tups = []
@@ -287,59 +289,47 @@
encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
- # In some cases the above URL will be long. With apache, the default limit for the length of the request line is 8190 bytes
- # (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method
- # (i.e. GET), eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end
- # up with 8177 bytes for the URI path plus query.
- if len( url ) >= 8177:
- # Persist the encoded string to a temporary file.
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_file_name = fh.name
- fh.close()
- fh = open( tmp_file_name, 'wb' )
- fh.write( encoded_required_repository_str )
- fh.close()
- encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
- galaxy_url = web.url_for( '/', qualified=True )
- # Send a request to the tool shed to enable it to read the temporary file.
- url = suc.url_join( tool_shed_url,
- '/repository/get_required_repo_info_dict?encoded_tmp_file_name=%s&galaxy_url=%s' % \
- ( encoded_tmp_file_name, galaxy_url ) )
- else:
- encoded_tmp_file_name = None
- tmp_file_name = None
- try:
- text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
- except Exception, e:
- if encoded_tmp_file_name:
- message = 'The selected tool shed repositories cannot be installed until the tool shed at '
- message += '%s and the Galaxy instance at %s ' % ( str( tool_shed_url ), str( trans.request.base ) )
- message += 'are both updated to at least the June 3, 2013 Galaxy release. These upgrades '
- message += 'are necessary because the number of repositories you are attempting to install '
- message += 'generates an HTTP request that is longer than 8177 bytes which cannot be handled '
- message += 'by tool shed or Galaxy instances older than this release.'
- log.debug( message )
- else:
+ text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ if text:
+ try:
+ required_repo_info_dict = json.from_json_string( text )
+ except simplejson.decoder.JSONDecodeError, e:
+ if len( url ) >= 8177:
+ message = '\n\nThe selected tool shed repositories cannot be installed until the tool shed at %s and the Galaxy ' % str( tool_shed_url )
+ message += 'instance at %s are both updated to at least the June 3, 2013 Galaxy release. These upgrades ' % str( trans.request.base )
+ message += 'are necessary because the number of repositories you are attempting to install generates an HTTP request that is longer than '
+ message += '8177 bytes which cannot be handled by tool shed or Galaxy instances older than the June 3, 2013 release.\n\n'
+ log.exception( message )
+ else:
+ log.exception()
+ return []
+ except Exception, e:
log.exception()
- text = None
- if tmp_file_name:
- try:
- os.unlink( tmp_file_name )
- except:
- pass
- if text:
- required_repo_info_dict = json.from_json_string( text )
- required_repo_info_dicts = []
- encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
- for encoded_dict_str in encoded_dict_strings:
- decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
- required_repo_info_dicts.append( decoded_dict )
- if required_repo_info_dicts:
- for required_repo_info_dict in required_repo_info_dicts:
- if required_repo_info_dict not in all_repo_info_dicts:
- all_repo_info_dicts.append( required_repo_info_dict )
- return all_repo_info_dicts
+ return []
+ return process_repo_info_dict( trans, required_repo_info_dict )
+ return []
+def handle_large_repo_info_dict( trans, tool_shed_url, encoded_required_repository_str ):
+ """
+ Handle the cases where the received encoded_required_repository_str is long. With apache, the default limit for the length of the request line is 8190 bytes
+ (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET), eight bytes for the version
+ information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for the URI path plus query.
+ """
+ # Persist the encoded string to a temporary file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_file_name = fh.name
+ fh.close()
+ fh = open( tmp_file_name, 'wb' )
+ fh.write( encoded_required_repository_str )
+ fh.close()
+ encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
+ galaxy_url = web.url_for( '/', qualified=True )
+ # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str.
+ url = suc.url_join( tool_shed_url,
+ '/repository/handle_large_repo_info_dict?encoded_tmp_file_name=%s&galaxy_url=%s' % \
+ ( encoded_tmp_file_name, galaxy_url ) )
+ return trans.response.send_redirect( url )
+
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
@@ -397,3 +387,24 @@
app.model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
return installed_tool_dependencies
+
+def process_repo_info_dict( trans, required_repo_info_dict ):
+ all_repo_info_dicts = []
+ required_repo_info_dicts = []
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ if required_repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ # Remove the temporary file that stored the long encoded_required_repository_str if possible.
+ encoded_tmp_file_name = required_repo_info_dict.get( 'encoded_tmp_file_name', None )
+ if encoded_tmp_file_name:
+ tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
+ try:
+ os.unlink( tmp_file_name )
+ except:
+ pass
+ return all_repo_info_dicts
diff -r 65a81aead95e147f709ee3969d49766f35d6a2e2 -r 604693b3ebe0283804225eac10adab2f858ec380 lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -27,7 +27,7 @@
url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
try:
- text = tool_shed_get(app, tool_shed_url, url)
+ text = tool_shed_get( app, tool_shed_url, url )
tool_shed_accessible = True
except Exception, e:
# Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
diff -r 65a81aead95e147f709ee3969d49766f35d6a2e2 -r 604693b3ebe0283804225eac10adab2f858ec380 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -466,6 +466,14 @@
return manifest_ctx, ctx_file
return None, None
+def get_file_contents( file_path ):
+ if os.path.exists( file_path ):
+ fh = open( file_path )
+ contents = fh.read()
+ fh.close()
+ return contents
+ return ''
+
def get_file_context_from_ctx( ctx, filename ):
"""Return the mercurial file context for a specified file."""
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bc5cd95d042b/
Changeset: bc5cd95d042b
User: greg
Date: 2013-05-12 17:05:49
Summary: Include the galaxy_url param in the new request to the tool shed.
Affected #: 1 file
diff -r dc7d35578676935777d1f315693a4c15498919e8 -r bc5cd95d042bc062c688bee6b3e9cbf74d853fb1 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -299,8 +299,11 @@
fh.write( encoded_required_repository_str )
fh.close()
encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
+ galaxy_url = web.url_for( '/', qualified=True )
# Send a request to the tool shed to enable it to read the temporary file.
- url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_required_repo_info_dict?encoded_tmp_file_name=%s&galaxy_url=%s' % \
+ ( encoded_tmp_file_name, galaxy_url ) )
else:
encoded_tmp_file_name = None
tmp_file_name = None
@@ -311,7 +314,7 @@
message = 'The selected tool shed repositories cannot be installed until the tool shed at '
message += '%s and the Galaxy instance at %s ' % ( str( tool_shed_url ), str( trans.request.base ) )
message += 'are both updated to at least the June 3, 2013 Galaxy release. These upgrades '
- message += 'are necessary because the number of repositories you are attemping to install '
+ message += 'are necessary because the number of repositories you are attempting to install '
message += 'generates an HTTP request that is longer than 8177 bytes which cannot be handled '
message += 'by tool shed or Galaxy instances older than this release.'
log.debug( message )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dc7d35578676/
Changeset: dc7d35578676
User: greg
Date: 2013-05-12 16:47:33
Summary: Persist the encoded string resulting from the information needed to install tool shed repositories if the string generates an HTTP request longer than 8177 bytes and enhance the necessary tool shed and Galaxy methods to handle this scenario.
Affected #: 2 files
diff -r d09a3531b3d35a3921fdaa52beed7481f0391ca8 -r dc7d35578676935777d1f315693a4c15498919e8 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -4,9 +4,11 @@
import re
import string
import tempfile
+import urllib2
from time import gmtime
from time import strftime
-from datetime import date, datetime
+from datetime import date
+from datetime import datetime
from galaxy import util
from galaxy import web
from galaxy.util.odict import odict
@@ -1471,12 +1473,29 @@
repo_info_dicts=repo_info_dicts )
@web.json
- def get_required_repo_info_dict( self, trans, encoded_str ):
+ def get_required_repo_info_dict( self, trans, encoded_str=None, **kwd ):
"""
Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of
repositories defined by the received encoded_str.
"""
- encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+ # In some cases the received encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes
+ # (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET),
+ # eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for
+ # the URI path plus query. The referer handles requests longer than this by persisting the encoded_str to a temporary file which we
+ # can read.
+ encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None )
+ if encoded_str:
+ encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+ else:
+ # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to
+ # stream the string to us.
+ galaxy_url = suc.handle_galaxy_url( trans, **kwd )
+ if galaxy_url and encoded_tmp_file_name:
+ url = suc.url_join( galaxy_url, '/common_install_util/stream_file_contents?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
+ response = urllib2.urlopen( url )
+ encoded_required_repository_str = response.read()
+ else:
+ raise Exception( "Required galaxy_url or encoded_tmp_file_name request parameters missing." )
encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
decoded_required_repository_tups = []
for encoded_required_repository_tup in encoded_required_repository_tups:
diff -r d09a3531b3d35a3921fdaa52beed7481f0391ca8 -r dc7d35578676935777d1f315693a4c15498919e8 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -1,5 +1,6 @@
import logging
import os
+import tempfile
from galaxy import eggs
from galaxy import util
from galaxy.util import json
@@ -285,7 +286,43 @@
encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
- text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ # In some cases the above URL will be long. With apache, the default limit for the length of the request line is 8190 bytes
+ # (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method
+ # (i.e. GET), eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end
+ # up with 8177 bytes for the URI path plus query.
+ if len( url ) >= 8177:
+ # Persist the encoded string to a temporary file.
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_file_name = fh.name
+ fh.close()
+ fh = open( tmp_file_name, 'wb' )
+ fh.write( encoded_required_repository_str )
+ fh.close()
+ encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) )
+ # Send a request to the tool shed to enable it to read the temporary file.
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_tmp_file_name=%s' % encoded_tmp_file_name )
+ else:
+ encoded_tmp_file_name = None
+ tmp_file_name = None
+ try:
+ text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ except Exception, e:
+ if encoded_tmp_file_name:
+ message = 'The selected tool shed repositories cannot be installed until the tool shed at '
+ message += '%s and the Galaxy instance at %s ' % ( str( tool_shed_url ), str( trans.request.base ) )
+ message += 'are both updated to at least the June 3, 2013 Galaxy release. These upgrades '
+ message += 'are necessary because the number of repositories you are attemping to install '
+ message += 'generates an HTTP request that is longer than 8177 bytes which cannot be handled '
+ message += 'by tool shed or Galaxy instances older than this release.'
+ log.debug( message )
+ else:
+ log.exception()
+ text = None
+ if tmp_file_name:
+ try:
+ os.unlink( tmp_file_name )
+ except:
+ pass
if text:
required_repo_info_dict = json.from_json_string( text )
required_repo_info_dicts = []
@@ -356,3 +393,7 @@
app.model.ToolDependency.installation_status.ERROR ]:
installed_tool_dependencies.append( tool_dependency )
return installed_tool_dependencies
+
+def stream_file_contents( trans, encoded_tmp_file_name ):
+ tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name )
+ return open( tmp_file_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d09a3531b3d3/
Changeset: d09a3531b3d3
User: greg
Date: 2013-05-11 19:16:09
Summary: Fix for updating metadata for a tool shed repository that defines complex repository dependencies.
Affected #: 3 files
diff -r 7dc222a7163a5049cc407ad1a049dd091ac27b03 -r d09a3531b3d35a3921fdaa52beed7481f0391ca8 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -487,9 +487,9 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
- url = suc.url_join( tool_shed_url,
- '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' %
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' %
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
if text:
tool_version_dicts = json.from_json_string( text )
diff -r 7dc222a7163a5049cc407ad1a049dd091ac27b03 -r d09a3531b3d35a3921fdaa52beed7481f0391ca8 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1857,23 +1857,14 @@
else:
tool_shed, name, owner, changeset_revision, prior_installation_required, error_message = repository_dependency_tup
prior_installation_required = util.asbool( str( prior_installation_required ) )
- rd_key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=tool_shed,
- repository_name=name,
- repository_owner=owner,
- changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required )
if repository_dependencies_dict:
- if rd_key in repository_dependencies_dict:
- repository_dependencies = repository_dependencies_dict[ rd_key ]
- for repository_dependency_tup in repository_dependency_tups:
- if repository_dependency_tup not in repository_dependencies:
- repository_dependencies.append( repository_dependency_tup )
- repository_dependencies_dict[ rd_key ] = repository_dependencies
- else:
- repository_dependencies_dict[ rd_key ] = repository_dependency_tups
+ repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ for repository_dependency_tup in repository_dependency_tups:
+ if repository_dependency_tup not in repository_dependencies:
+ repository_dependencies.append( repository_dependency_tup )
+ repository_dependencies_dict[ 'repository_dependencies' ] = repository_dependencies
else:
- repository_dependencies_dict = dict( root_key=rd_key,
- description=description,
+ repository_dependencies_dict = dict( description=description,
repository_dependencies=repository_dependency_tups )
if repository_dependencies_dict:
if is_valid:
diff -r 7dc222a7163a5049cc407ad1a049dd091ac27b03 -r d09a3531b3d35a3921fdaa52beed7481f0391ca8 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -198,7 +198,7 @@
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
"""
- Update a tool shed repository record i the Galaxy database with the new information received. If a record defined by the received tool shed, repository name
+ Update a tool shed repository record in the Galaxy database with the new information received. If a record defined by the received tool shed, repository name
and owner does not exists, create a new record with the received information.
"""
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -1052,6 +1052,10 @@
prior_installation_required = util.asbool( str( prior_installation_required ) )
return tool_shed, name, owner, changeset_revision, prior_installation_required
+def pretty_print( dict=None ):
+ if dict:
+ return json.to_json_string( dict, sort_keys=True, indent=4 * ' ' )
+
def remove_dir( dir ):
"""Attempt to remove a directory from disk."""
if os.path.exists( dir ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7dc222a7163a/
Changeset: 7dc222a7163a
User: jgoecks
Date: 2013-05-10 23:16:20
Summary: Trackster: fix bug in variant track drawing.
Affected #: 1 file
diff -r b4733e42a2c98c42401bca5e0643fc4d4dcf2db6 -r 7dc222a7163a5049cc407ad1a049dd091ac27b03 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -4266,7 +4266,7 @@
draw_tile: function(result, ctx, mode, resolution, region, w_scale) {
// Data could be coverage data or variant data.
if (result.dataset_type === 'bigwig') {
- return this._draw_line_track_tile(result, ctx, "Histogram", region, resolution, w_scale);
+ return this._draw_line_track_tile(result, ctx, "Histogram", resolution, region, w_scale);
}
else { // result.dataset_type === 'variant'
var view = this.view,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.