commit/galaxy-central: greg: More fixes for installing many repositories from the tool shed.
1 new commit in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/b8af8fa7ab2e/ Changeset: b8af8fa7ab2e User: greg Date: 2013-05-13 21:44:58 Summary: More fixes for installing many repositories from the tool shed. Affected #: 4 files diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -313,11 +313,6 @@ """Return an svg image representation of a workflow dictionary created when the workflow was exported.""" return workflow_util.generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=repository_id ) - @web.expose - def get_contents_of_file( self, trans, encoded_file_path ): - file_path = encoding_util.tool_shed_decode( encoded_file_path ) - return suc.get_file_contents( file_path ) - @web.json @web.require_admin def get_file_contents( self, trans, file_path ): @@ -388,34 +383,6 @@ tool_version = tool_util.get_tool_version( app, guid ) return tool_version.get_version_ids( app, reverse=True ) - @web.json - def handle_large_repo_info_dict( self, trans, **kwd ): - """ - In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes - (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET), - eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for - the URI path plus query. The referer handles requests longer than this by persisting the repo_info_dict to a temporary file which we can read. - """ - required_repo_info_dict = {} - encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None ) - # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it. - tool_shed_url = kwd.get( 'tool_shed_url', None ) - if tool_shed_url and encoded_tmp_file_name: - url = suc.url_join( tool_shed_url, - '/repository/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name ) - response = urllib2.urlopen( url ) - text = response.read() - required_repo_info_dict = json.from_json_string( text ) - else: - log.debug( "Invalid tool_shed_url '%s' or encoded_tmp_file_name '%s'." % ( str( tool_shed_url ), str( encoded_tmp_file_name ) ) ) - tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name ) - if os.path.exists( tmp_file_name ): - try: - os.unkink( tmp_file_name ) - except: - pass - return common_install_util.process_repo_info_dict( trans, required_repo_info_dict ) - @web.expose @web.require_admin def import_workflow( self, trans, workflow_name, repository_id, **kwd ): diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/galaxy/webapps/tool_shed/controllers/repository.py --- a/lib/galaxy/webapps/tool_shed/controllers/repository.py +++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py @@ -4,7 +4,6 @@ import re import string import tempfile -import urllib2 from time import gmtime from time import strftime from datetime import date @@ -1285,11 +1284,6 @@ return encoding_util.tool_shed_encode( update_dict ) @web.expose - def get_contents_of_file( self, trans, encoded_file_path ): - file_path = encoding_util.tool_shed_decode( encoded_file_path ) - return suc.get_file_contents( file_path ) - - @web.expose def get_ctx_rev( self, trans, **kwd ): """Given a repository and changeset_revision, return the correct ctx.rev() value.""" repository_name = kwd[ 'name' ] @@ -1478,27 +1472,27 @@ repo_info_dicts=repo_info_dicts ) @web.json - def get_required_repo_info_dict( self, trans, encoded_str, **kwd ): + def get_required_repo_info_dict( self, trans, encoded_str=None ): """ Retrieve and return a dictionary that includes a list of dictionaries that each contain all of the information needed to install the list of repositories defined by the received encoded_str. """ - encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str ) - encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 ) - decoded_required_repository_tups = [] - for encoded_required_repository_tup in encoded_required_repository_tups: - decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) ) - encoded_repository_ids = [] - changeset_revisions = [] - for required_repository_tup in decoded_required_repository_tups: - tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( required_repository_tup ) - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - encoded_repository_ids.append( trans.security.encode_id( repository.id ) ) - changeset_revisions.append( changeset_revision ) - if encoded_repository_ids and changeset_revisions: - repo_info_dict = json.from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) ) - else: - repo_info_dict = {} + repo_info_dict = {} + if encoded_str: + encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str ) + encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 ) + decoded_required_repository_tups = [] + for encoded_required_repository_tup in encoded_required_repository_tups: + decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) ) + encoded_repository_ids = [] + changeset_revisions = [] + for required_repository_tup in decoded_required_repository_tups: + tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( required_repository_tup ) + repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + encoded_repository_ids.append( trans.security.encode_id( repository.id ) ) + changeset_revisions.append( changeset_revision ) + if encoded_repository_ids and changeset_revisions: + repo_info_dict = json.from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) ) return repo_info_dict @web.expose @@ -1660,41 +1654,6 @@ return tool_guid_lineage @web.expose - def handle_large_repo_info_dict( self, trans, **kwd ): - """ - In some cases the required encoded_str will be long. With apache, the default limit for the length of the request line is 8190 bytes - (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET), - eight bytes for the version information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for - the URI path plus query. The referer handles requests longer than this by persisting the encoded_str to a temporary file which we can read. - """ - repo_info_dict = {} - encoded_tmp_file_name = kwd.get( 'encoded_tmp_file_name', None ) - # The request would have been longer than 8190 bytes if it included the encoded_str, so we'll send a request to the Galaxy instance to get it. - galaxy_url = suc.handle_galaxy_url( trans, **kwd ) - if galaxy_url and encoded_tmp_file_name: - url = suc.url_join( galaxy_url, - '/admin_toolshed/get_contents_of_file?encoded_tmp_file_name=%s' % encoded_tmp_file_name ) - response = urllib2.urlopen( url ) - encoded_required_repository_str = response.read() - repo_info_dict = self.get_required_repo_info_dict( trans, encoded_str ) - repo_info_dict[ 'encoded_tmp_file_name' ] = encoded_tmp_file_name - else: - log.debug( "Invalid galaxy_url '%s' or encoded_tmp_file_name '%s'." % ( str( galaxy_url ), str( encoded_tmp_file_name ) ) ) - # Persist the large repo_info_dict to a temporary file. - fh = tempfile.NamedTemporaryFile( 'wb' ) - tmp_file_name = fh.name - fh.close() - fh = open( tmp_file_name, 'wb' ) - fh.write( encoded_required_repository_str ) - fh.close() - encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) ) - tool_shed_url = web.url_for( '/', qualified=True ) - # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str. - url = suc.url_join( galaxy_url, - '/admin_toolshed/handle_large_repo_info_dict?encoded_tmp_file_name=%s&tool_shed_url=%s' % ( str( encoded_tmp_file_name ), str( tool_shed_url ) ) ) - return trans.response.send_redirect( url ) - - @web.expose def help( self, trans, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/tool_shed/util/common_install_util.py --- a/lib/tool_shed/util/common_install_util.py +++ b/lib/tool_shed/util/common_install_util.py @@ -1,6 +1,7 @@ import logging import os -import tempfile +import urllib +import urllib2 from galaxy import eggs from galaxy import util from galaxy import web @@ -17,9 +18,6 @@ import pkg_resources -pkg_resources.require( "simplejson" ) -import simplejson - pkg_resources.require( 'elementtree' ) from elementtree import ElementTree from elementtree import ElementInclude @@ -256,6 +254,7 @@ repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through this method is required to retrieve all repository dependencies. """ + all_repo_info_dicts = [] if repo_info_dicts: # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids. required_repository_tups = [] @@ -288,47 +287,25 @@ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) ) encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups ) encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str ) - url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str ) - text = common_util.tool_shed_get( trans.app, tool_shed_url, url ) - if text: + url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' ) + request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) ) + response = urllib2.urlopen( request ).read() + if response: try: - required_repo_info_dict = json.from_json_string( text ) - except simplejson.decoder.JSONDecodeError, e: - if len( url ) >= 8177: - message = '\n\nThe selected tool shed repositories cannot be installed until the tool shed at %s and the Galaxy ' % str( tool_shed_url ) - message += 'instance at %s are both updated to at least the June 3, 2013 Galaxy release. These upgrades ' % str( trans.request.base ) - message += 'are necessary because the number of repositories you are attempting to install generates an HTTP request that is longer than ' - message += '8177 bytes which cannot be handled by tool shed or Galaxy instances older than the June 3, 2013 release.\n\n' - log.exception( message ) - else: - log.exception() - return [] + required_repo_info_dict = json.from_json_string( response ) except Exception, e: - log.exception() - return [] - return process_repo_info_dict( trans, required_repo_info_dict ) - return [] - -def handle_large_repo_info_dict( trans, tool_shed_url, encoded_required_repository_str ): - """ - Handle the cases where the received encoded_required_repository_str is long. With apache, the default limit for the length of the request line is 8190 bytes - (http://httpd.apache.org/docs/2.2/mod/core.html#limitrequestline). And if we subtract three bytes for the request method (i.e. GET), eight bytes for the version - information (i.e. HTTP/1.0/HTTP/1.1) and two bytes for the separating space, we end up with 8177 bytes for the URI path plus query. - """ - # Persist the encoded string to a temporary file. - fh = tempfile.NamedTemporaryFile( 'wb' ) - tmp_file_name = fh.name - fh.close() - fh = open( tmp_file_name, 'wb' ) - fh.write( encoded_required_repository_str ) - fh.close() - encoded_tmp_file_name = encoding_util.tool_shed_encode( os.path.abspath( tmp_file_name ) ) - galaxy_url = web.url_for( '/', qualified=True ) - # Redirect to the tool shed to enable it to read the persisted encoded_required_repository_str. - url = suc.url_join( tool_shed_url, - '/repository/handle_large_repo_info_dict?encoded_tmp_file_name=%s&galaxy_url=%s' % \ - ( encoded_tmp_file_name, galaxy_url ) ) - return trans.response.send_redirect( url ) + log.exception( e ) + return all_repo_info_dicts + required_repo_info_dicts = [] + encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] + for encoded_dict_str in encoded_dict_strings: + decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) + required_repo_info_dicts.append( decoded_dict ) + if required_repo_info_dicts: + for required_repo_info_dict in required_repo_info_dicts: + if required_repo_info_dict not in all_repo_info_dicts: + all_repo_info_dicts.append( required_repo_info_dict ) + return all_repo_info_dicts def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ): """ @@ -387,24 +364,3 @@ app.model.ToolDependency.installation_status.ERROR ]: installed_tool_dependencies.append( tool_dependency ) return installed_tool_dependencies - -def process_repo_info_dict( trans, required_repo_info_dict ): - all_repo_info_dicts = [] - required_repo_info_dicts = [] - encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ] - for encoded_dict_str in encoded_dict_strings: - decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str ) - required_repo_info_dicts.append( decoded_dict ) - if required_repo_info_dicts: - for required_repo_info_dict in required_repo_info_dicts: - if required_repo_info_dict not in all_repo_info_dicts: - all_repo_info_dicts.append( required_repo_info_dict ) - # Remove the temporary file that stored the long encoded_required_repository_str if possible. - encoded_tmp_file_name = required_repo_info_dict.get( 'encoded_tmp_file_name', None ) - if encoded_tmp_file_name: - tmp_file_name = encoding_util.tool_shed_decode( encoded_tmp_file_name ) - try: - os.unlink( tmp_file_name ) - except: - pass - return all_repo_info_dicts diff -r d92704335a2774e1bde2be72f9f78ce63c0447fd -r b8af8fa7ab2e61700875249c62308302a105bb38 lib/tool_shed/util/shed_util_common.py --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -466,14 +466,6 @@ return manifest_ctx, ctx_file return None, None -def get_file_contents( file_path ): - if os.path.exists( file_path ): - fh = open( file_path ) - contents = fh.read() - fh.close() - return contents - return '' - def get_file_context_from_ctx( ctx, filename ): """Return the mercurial file context for a specified file.""" # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org