commit/galaxy-central: 3 new changesets
3 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/995ca7707640/ Changeset: 995ca7707640 User: dannon Date: 2014-01-10 13:29:05 Summary: Remove dependency on simplejson; convert everything over to using the json module in the python standard library. Affected #: 44 files diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 eggs.ini --- a/eggs.ini +++ b/eggs.ini @@ -26,7 +26,6 @@ pysam = 0.4.2 pysqlite = 2.5.6 python_lzo = 1.08_2.03_static -simplejson = 2.1.1 threadframe = 0.2 guppy = 0.1.8 SQLAlchemy = 0.7.9 diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/datatypes/metadata.py --- a/lib/galaxy/datatypes/metadata.py +++ b/lib/galaxy/datatypes/metadata.py @@ -2,15 +2,13 @@ Galaxy Metadata """ -from galaxy import eggs -eggs.require("simplejson") import copy import cPickle +import json import logging import os import shutil -import simplejson import sys import tempfile import weakref @@ -130,7 +128,7 @@ def from_JSON_dict( self, filename ): dataset = self.parent log.debug( 'loading metadata from file for: %s %s' % ( dataset.__class__.__name__, dataset.id ) ) - JSONified_dict = simplejson.load( open( filename ) ) + JSONified_dict = json.load( open( filename ) ) for name, spec in self.spec.items(): if name in JSONified_dict: dataset._metadata[ name ] = spec.param.from_external_value( JSONified_dict[ name ], dataset ) @@ -146,7 +144,7 @@ for name, spec in self.spec.items(): if name in dataset_meta_dict: meta_dict[ name ] = spec.param.to_external_value( dataset_meta_dict[ name ] ) - simplejson.dump( meta_dict, open( filename, 'wb+' ) ) + json.dump( meta_dict, open( filename, 'wb+' ) ) def __getstate__( self ): return None #cannot pickle a weakref item (self._parent), when data._metadata_collection is None, it will be recreated on demand @@ -456,7 +454,7 @@ class DictParameter( MetadataParameter ): def to_string( self, value ): - return simplejson.dumps( value ) + return json.dumps( value ) class PythonObjectParameter( MetadataParameter ): @@ -594,7 +592,7 @@ @classmethod def cleanup_from_JSON_dict_filename( cls, filename ): try: - for key, value in simplejson.load( open( filename ) ).items(): + for key, value in json.load( open( filename ) ).items(): if cls.is_JSONified_value( value ): value = cls.from_JSON( value ) if isinstance( value, cls ) and os.path.exists( value.file_name ): @@ -686,10 +684,10 @@ #file to store a 'return code' indicating the results of the set_meta() call #results code is like (True/False - if setting metadata was successful/failed , exception or string of reason of success/failure ) metadata_files.filename_results_code = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_results_%s_" % key ).name ) - simplejson.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) + json.dump( ( False, 'External set_meta() not called' ), open( metadata_files.filename_results_code, 'wb+' ) ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) #file to store kwds passed to set_meta() metadata_files.filename_kwds = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_kwds_%s_" % key ).name ) - simplejson.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True ) + json.dump( kwds, open( metadata_files.filename_kwds, 'wb+' ), ensure_ascii=True ) #existing metadata file parameters need to be overridden with cluster-writable file locations metadata_files.filename_override_metadata = abspath( tempfile.NamedTemporaryFile( dir = tmp_dir, prefix = "metadata_override_%s_" % key ).name ) open( metadata_files.filename_override_metadata, 'wb+' ) # create the file on disk, so it cannot be reused by tempfile (unlikely, but possible) @@ -699,7 +697,7 @@ metadata_temp = MetadataTempFile() shutil.copy( dataset.metadata.get( meta_key, None ).file_name, metadata_temp.file_name ) override_metadata.append( ( meta_key, metadata_temp.to_JSON() ) ) - simplejson.dump( override_metadata, open( metadata_files.filename_override_metadata, 'wb+' ) ) + json.dump( override_metadata, open( metadata_files.filename_override_metadata, 'wb+' ) ) #add to session and flush sa_session.add( metadata_files ) sa_session.flush() @@ -711,7 +709,7 @@ metadata_files = self.get_output_filenames_by_dataset( dataset, sa_session ) if not metadata_files: return False # this file doesn't exist - rval, rstring = simplejson.load( open( metadata_files.filename_results_code ) ) + rval, rstring = json.load( open( metadata_files.filename_results_code ) ) if not rval: log.debug( 'setting metadata externally failed for %s %s: %s' % ( dataset.__class__.__name__, dataset.id, rstring ) ) return rval diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/datatypes/sequence.py --- a/lib/galaxy/datatypes/sequence.py +++ b/lib/galaxy/datatypes/sequence.py @@ -2,12 +2,14 @@ Sequence classes """ -import data +from . import data import gzip +import json import logging import os import re import string + from cgi import escape from galaxy import eggs, util @@ -16,8 +18,6 @@ from galaxy.datatypes.sniff import get_test_fname, get_headers from galaxy.datatypes.metadata import MetadataElement -eggs.require("simplejson") -import simplejson try: eggs.require( "bx-python" ) @@ -44,8 +44,8 @@ def set_peek( self, dataset, is_multi_byte=False ): if not dataset.dataset.purged: try: - parsed_data = simplejson.load(open(dataset.file_name)) - # dataset.peek = simplejson.dumps(data, sort_keys=True, indent=4) + parsed_data = json.load(open(dataset.file_name)) + # dataset.peek = json.dumps(data, sort_keys=True, indent=4) dataset.peek = data.get_file_peek( dataset.file_name, is_multi_byte=is_multi_byte ) dataset.blurb = '%d sections' % len(parsed_data['sections']) except Exception, e: @@ -60,7 +60,7 @@ def sniff( self, filename ): if os.path.getsize(filename) < 50000: try: - data = simplejson.load(open(filename)) + data = json.load(open(filename)) sections = data['sections'] for section in sections: if 'start' not in section or 'end' not in section or 'sequences' not in section: @@ -155,7 +155,7 @@ do_slow_split = classmethod(do_slow_split) def do_fast_split( cls, input_datasets, toc_file_datasets, subdir_generator_function, split_params): - data = simplejson.load(open(toc_file_datasets[0].file_name)) + data = json.load(open(toc_file_datasets[0].file_name)) sections = data['sections'] total_sequences = long(0) for section in sections: @@ -191,7 +191,7 @@ toc = toc_file_datasets[ds_no] split_data['args']['toc_file'] = toc.file_name f = open(os.path.join(dir, 'split_info_%s.json' % base_name), 'w') - simplejson.dump(split_data, f) + json.dump(split_data, f) f.close() start_sequence += sequences_per_file[part_no] return directories @@ -557,7 +557,7 @@ sequence_count = long(args['num_sequences']) if 'toc_file' in args: - toc_file = simplejson.load(open(args['toc_file'], 'r')) + toc_file = json.load(open(args['toc_file'], 'r')) commands = Sequence.get_split_commands_with_toc(input_name, output_name, toc_file, start_sequence, sequence_count) else: commands = Sequence.get_split_commands_sequential(is_gzip(input_name), input_name, output_name, start_sequence, sequence_count) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/drmaa.py --- a/lib/galaxy/jobs/runners/drmaa.py +++ b/lib/galaxy/jobs/runners/drmaa.py @@ -2,13 +2,13 @@ Job control via the DRMAA API. """ +import json +import logging import os +import string +import subprocess import sys import time -import string -import logging -import subprocess -import simplejson as json from galaxy import eggs from galaxy import model diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/lwr_client/action_mapper.py --- a/lib/galaxy/jobs/runners/lwr_client/action_mapper.py +++ b/lib/galaxy/jobs/runners/lwr_client/action_mapper.py @@ -1,4 +1,4 @@ -from simplejson import load +from json import load from os.path import abspath from os.path import dirname from os.path import join diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/jobs/runners/lwr_client/client.py --- a/lib/galaxy/jobs/runners/lwr_client/client.py +++ b/lib/galaxy/jobs/runners/lwr_client/client.py @@ -1,7 +1,6 @@ import os import shutil -import simplejson -from simplejson import dumps +from json import dumps, loads from time import sleep from .destination import submit_params @@ -16,7 +15,7 @@ def __call__(self, func): def replacement(*args, **kwargs): response = func(*args, **kwargs) - return simplejson.loads(response) + return loads(response) return replacement diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -6,7 +6,6 @@ """ from galaxy import eggs -eggs.require("simplejson") eggs.require("pexpect") import codecs @@ -15,7 +14,7 @@ import operator import os import pexpect -import simplejson +import json import socket import time from string import Template @@ -2250,7 +2249,7 @@ template_data[template.name] = tmp_dict return template_data def templates_json( self, use_name=False ): - return simplejson.dumps( self.templates_dict( use_name=use_name ) ) + return json.dumps( self.templates_dict( use_name=use_name ) ) def get_display_name( self ): """ diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/model/custom_types.py --- a/lib/galaxy/model/custom_types.py +++ b/lib/galaxy/model/custom_types.py @@ -1,8 +1,6 @@ from sqlalchemy.types import * -import pkg_resources -pkg_resources.require("simplejson") -import simplejson +import json import pickle import copy import uuid @@ -19,8 +17,8 @@ log = logging.getLogger( __name__ ) # Default JSON encoder and decoder -json_encoder = simplejson.JSONEncoder( sort_keys=True ) -json_decoder = simplejson.JSONDecoder( ) +json_encoder = json.JSONEncoder( sort_keys=True ) +json_decoder = json.JSONDecoder( ) def _sniffnfix_pg9_hex(value): """ diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/__init__.py --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -4,6 +4,7 @@ import binascii import glob +import json import logging import os import pipes @@ -19,14 +20,12 @@ from math import isinf from galaxy import eggs -eggs.require( "simplejson" ) eggs.require( "MarkupSafe" ) #MarkupSafe must load before mako eggs.require( "Mako" ) eggs.require( "elementtree" ) eggs.require( "Paste" ) eggs.require( "SQLAlchemy >= 0.4" ) -import simplejson from cgi import FieldStorage from elementtree import ElementTree from mako.template import Template @@ -869,7 +868,7 @@ value = params_to_strings( tool.inputs, self.inputs, app ) value["__page__"] = self.page value["__rerun_remap_job_id__"] = self.rerun_remap_job_id - value = simplejson.dumps( value ) + value = json.dumps( value ) # Make it secure if secure: a = hmac_new( app.config.tool_secret, value ) @@ -888,7 +887,7 @@ test = hmac_new( app.config.tool_secret, value ) assert a == test # Restore from string - values = json_fix( simplejson.loads( value ) ) + values = json_fix( json.loads( value ) ) self.page = values.pop( "__page__" ) if '__rerun_remap_job_id__' in values: self.rerun_remap_job_id = values.pop( "__rerun_remap_job_id__" ) @@ -2921,7 +2920,7 @@ try: json_file = open( os.path.join( job_working_directory, jobs.TOOL_PROVIDED_JOB_METADATA_FILE ), 'r' ) for line in json_file: - line = simplejson.loads( line ) + line = json.loads( line ) if line.get( 'type' ) == 'new_primary_dataset': new_primary_datasets[ os.path.split( line.get( 'filename' ) )[-1] ] = line except Exception: @@ -3085,7 +3084,7 @@ if json_filename is None: json_filename = file_name out = open( json_filename, 'w' ) - out.write( simplejson.dumps( json_params ) ) + out.write( json.dumps( json_params ) ) out.close() class DataSourceTool( OutputParameterJSONTool ): @@ -3145,7 +3144,7 @@ if json_filename is None: json_filename = file_name out = open( json_filename, 'w' ) - out.write( simplejson.dumps( json_params ) ) + out.write( json.dumps( json_params ) ) out.close() class AsyncDataSourceTool( DataSourceTool ): diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/data_manager/manager.py --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -1,9 +1,6 @@ -import pkg_resources - -pkg_resources.require( "simplejson" ) - -import os, errno -import simplejson +import errno +import json +import os from galaxy import util from galaxy.util.odict import odict @@ -226,7 +223,7 @@ #TODO: fix this merging below for output_name, output_dataset in out_data.iteritems(): try: - output_dict = simplejson.loads( open( output_dataset.file_name ).read() ) + output_dict = json.loads( open( output_dataset.file_name ).read() ) except Exception, e: log.warning( 'Error reading DataManagerTool json for "%s": %s' % ( output_name, e ) ) continue diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/genome_index/__init__.py --- a/lib/galaxy/tools/genome_index/__init__.py +++ b/lib/galaxy/tools/genome_index/__init__.py @@ -1,6 +1,11 @@ from __future__ import with_statement -import os, shutil, logging, tempfile, tarfile +import json +import logging +import os +import shutil +import tarfile +import tempfile from galaxy import model, util from galaxy.web.framework.helpers import to_unicode @@ -9,9 +14,6 @@ from galaxy.web.base.controller import UsesHistoryMixin from galaxy.tools.data import ToolDataTableManager -import pkg_resources -pkg_resources.require("simplejson") -import simplejson log = logging.getLogger(__name__) @@ -74,7 +76,7 @@ fp = open( gitd.dataset.get_file_name(), 'r' ) deferred = sa_session.query( model.DeferredJob ).filter_by( id=gitd.deferred_job_id ).first() try: - logloc = simplejson.load( fp ) + logloc = json.load( fp ) except ValueError: deferred.state = app.model.DeferredJob.states.ERROR sa_session.add( deferred ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/genome_index/index_genome.py --- a/lib/galaxy/tools/genome_index/index_genome.py +++ b/lib/galaxy/tools/genome_index/index_genome.py @@ -7,11 +7,17 @@ """ from __future__ import with_statement -import optparse, sys, os, tempfile, time, subprocess, shlex, tarfile, shutil +import json +import optparse +import os +import shlex +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time -import pkg_resources -pkg_resources.require("simplejson") -import simplejson class ManagedIndexer(): def __init__( self, output_file, infile, workingdir, rsync_url, tooldata ): @@ -76,7 +82,7 @@ return result def _flush_files( self ): - simplejson.dump( self.locations, self.outfile ) + json.dump( self.locations, self.outfile ) self.outfile.close() self.logfile.close() @@ -318,4 +324,4 @@ returncode = idxobj.run_indexer( indexer ) if not returncode: exit(1) - exit(0) \ No newline at end of file + exit(0) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/tools/imp_exp/__init__.py --- a/lib/galaxy/tools/imp_exp/__init__.py +++ b/lib/galaxy/tools/imp_exp/__init__.py @@ -1,4 +1,4 @@ -import os, shutil, logging, tempfile, simplejson +import os, shutil, logging, tempfile, json from galaxy import model from galaxy.tools.parameters.basic import UnvalidatedValue from galaxy.web.framework.helpers import to_unicode @@ -226,13 +226,13 @@ self.sa_session.add( imported_job ) self.sa_session.flush() - class HistoryDatasetAssociationIDEncoder( simplejson.JSONEncoder ): + class HistoryDatasetAssociationIDEncoder( json.JSONEncoder ): """ Custom JSONEncoder for a HistoryDatasetAssociation that encodes an HDA as its ID. """ def default( self, obj ): """ Encode an HDA, default encoding for everything else. """ if isinstance( obj, model.HistoryDatasetAssociation ): return obj.id - return simplejson.JSONEncoder.default( self, obj ) + return json.JSONEncoder.default( self, obj ) # Set parameters. May be useful to look at metadata.py for creating parameters. # TODO: there may be a better way to set parameters, e.g.: @@ -311,7 +311,7 @@ del metadata[ name ] return metadata - class HistoryDatasetAssociationEncoder( simplejson.JSONEncoder ): + class HistoryDatasetAssociationEncoder( json.JSONEncoder ): """ Custom JSONEncoder for a HistoryDatasetAssociation. """ def default( self, obj ): """ Encode an HDA, default encoding for everything else. """ @@ -337,7 +337,7 @@ } if isinstance( obj, UnvalidatedValue ): return obj.__str__() - return simplejson.JSONEncoder.default( self, obj ) + return json.JSONEncoder.default( self, obj ) # # Create attributes/metadata files for export. diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -2,7 +2,26 @@ Utility functions used systemwide. """ -import binascii, errno, grp, logging, os, pickle, random, re, shutil, smtplib, stat, string, sys, tempfile, threading + +from __future__ import absolute_import + +import binascii +import errno +import grp +import json +import logging +import os +import pickle +import random +import re +import shutil +import smtplib +import stat +import string +import sys +import tempfile +import threading + from email.MIMEText import MIMEText from os.path import relpath @@ -21,12 +40,9 @@ eggs.require( "wchartype" ) import wchartype -from inflection import Inflector, English +from .inflection import Inflector, English inflector = Inflector(English) -eggs.require( "simplejson" ) -import simplejson - log = logging.getLogger(__name__) _lock = threading.RLock() @@ -292,8 +308,8 @@ def pretty_print_json(json_data, is_json_string=False): if is_json_string: - json_data = simplejson.loads(json_data) - return simplejson.dumps(json_data, sort_keys=True, indent=4 * ' ') + json_data = json.loads(json_data) + return json.dumps(json_data, sort_keys=True, indent=4 * ' ') # characters that are valid valid_chars = set(string.letters + string.digits + " -=_.()/+*^,:?!") diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/util/json.py --- a/lib/galaxy/util/json.py +++ b/lib/galaxy/util/json.py @@ -1,16 +1,15 @@ +from __future__ import absolute_import __all__ = [ "to_json_string", "from_json_string", "json_fix", "validate_jsonrpc_request", "validate_jsonrpc_response", "jsonrpc_request", "jsonrpc_response" ] -import random, string, logging +import json +import logging +import random import socket +import string -import pkg_resources -pkg_resources.require( "simplejson" ) - -import simplejson - -to_json_string = simplejson.dumps -from_json_string = simplejson.loads +to_json_string = json.dumps +from_json_string = json.loads log = logging.getLogger( __name__ ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/visualization/data_providers/phyloviz/baseparser.py --- a/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py +++ b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py @@ -1,6 +1,4 @@ -import pkg_resources -pkg_resources.require("simplejson") -import simplejson +import json class Node(object): """Node class of PhyloTree, which represents a CLAUDE in a phylogenetic tree""" @@ -118,7 +116,7 @@ def toJson(self, jsonDict): """Convenience method to get a json string from a python json dict""" - return simplejson.dumps(jsonDict) + return json.dumps(jsonDict) def _writeJsonToFile(self, filepath, json): """Writes the file out to the system""" diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/web/framework/__init__.py --- a/lib/galaxy/web/framework/__init__.py +++ b/lib/galaxy/web/framework/__init__.py @@ -2,6 +2,7 @@ Galaxy web application framework """ +import hashlib import inspect import os import pkg_resources @@ -9,27 +10,24 @@ import socket import string import time -import hashlib + +from functools import wraps from Cookie import CookieError - pkg_resources.require( "Cheetah" ) from Cheetah.Template import Template + +#TODO: Relative imports to be removed import base -from functools import wraps +import helpers + from galaxy import util from galaxy.exceptions import MessageException -from galaxy.util.json import to_json_string, from_json_string +from galaxy.util import asbool +from galaxy.util import safe_str_cmp from galaxy.util.backports.importlib import import_module +from galaxy.util.json import from_json_string, to_json_string from galaxy.util.sanitize_html import sanitize_html -from galaxy.util import safe_str_cmp - -pkg_resources.require( "simplejson" ) -import simplejson - -import helpers - -from galaxy.util import asbool import paste.httpexceptions @@ -76,7 +74,7 @@ @wraps(func) def decorator( self, trans, *args, **kwargs ): trans.response.set_content_type( "text/javascript" ) - return simplejson.dumps( func( self, trans, *args, **kwargs ) ) + return to_json_string( func( self, trans, *args, **kwargs ) ) if not hasattr(func, '_orig'): decorator._orig = func decorator.exposed = True @@ -86,7 +84,7 @@ @wraps(func) def decorator( self, trans, *args, **kwargs ): trans.response.set_content_type( "text/javascript" ) - return simplejson.dumps( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True ) + return to_json_string( func( self, trans, *args, **kwargs ), indent=4, sort_keys=True ) if not hasattr(func, '_orig'): decorator._orig = func decorator.exposed = True @@ -158,7 +156,7 @@ for k, v in payload.iteritems(): if isinstance(v, (str, unicode)): try: - payload[k] = simplejson.loads(v) + payload[k] = from_json_string(v) except: # may not actually be json, just continue pass @@ -167,7 +165,7 @@ # Assume application/json content type and parse request body manually, since wsgi won't do it. However, the order of this check # should ideally be in reverse, with the if clause being a check for application/json and the else clause assuming a standard encoding # such as multipart/form-data. Leaving it as is for backward compatibility, just in case. - payload = util.recursively_stringify_dictionary_keys( simplejson.loads( trans.request.body ) ) + payload = util.recursively_stringify_dictionary_keys( from_json_string( trans.request.body ) ) return payload try: kwargs['payload'] = extract_payload_from_request(trans, func, kwargs) @@ -198,9 +196,9 @@ try: rval = func( self, trans, *args, **kwargs) if to_json and trans.debug: - rval = simplejson.dumps( rval, indent=4, sort_keys=True ) + rval = to_json_string( rval, indent=4, sort_keys=True ) elif to_json: - rval = simplejson.dumps( rval ) + rval = to_json_string( rval ) return rval except paste.httpexceptions.HTTPException: raise # handled diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/demo_sequencer/framework/__init__.py --- a/lib/galaxy/webapps/demo_sequencer/framework/__init__.py +++ b/lib/galaxy/webapps/demo_sequencer/framework/__init__.py @@ -2,9 +2,15 @@ Demo sequencer web application framework """ +import json +import os import pkg_resources +import random +import socket +import string +import sys +import time -import os, sys, time, socket, random, string pkg_resources.require( "Cheetah" ) from Cheetah.Template import Template @@ -19,9 +25,6 @@ from galaxy.util import asbool -pkg_resources.require( "simplejson" ) -import simplejson - pkg_resources.require( "Mako" ) import mako.template import mako.lookup diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/data_admin.py --- a/lib/galaxy/webapps/galaxy/controllers/data_admin.py +++ b/lib/galaxy/webapps/galaxy/controllers/data_admin.py @@ -1,14 +1,13 @@ -import sys, ftplib +import ftplib +import json +import sys from galaxy import model, util from galaxy.jobs import transfer_manager +from galaxy.model.orm import * from galaxy.web.base.controller import * -from galaxy.web.framework.helpers import time_ago, iff, grids -from galaxy.model.orm import * +from galaxy.web.framework.helpers import grids, iff, time_ago from library_common import get_comptypes, lucene_search, whoosh_search -import pkg_resources -pkg_resources.require("simplejson") -import simplejson # Older py compatibility try: @@ -161,7 +160,7 @@ gname = deferred.params[ 'intname' ] indexers = ', '.join( deferred.params[ 'indexes' ] ) jobs = self._get_jobs( deferred, trans ) - jsonjobs = simplejson.dumps( jobs ) + jsonjobs = json.dumps( jobs ) return trans.fill_template( '/admin/data_admin/download_status.mako', name=gname, indexers=indexers, mainjob=jobid, jobs=jobs, jsonjobs=jsonjobs ) @web.expose @@ -173,7 +172,7 @@ jobid = params.get( 'jobid', '' ) job = sa_session.query( model.DeferredJob ).filter_by( id=jobid ).first() jobs = self._get_jobs( job, trans ) - return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=simplejson.dumps( jobs ) ) + return trans.fill_template( '/admin/data_admin/ajax_status.mako', json=json.dumps( jobs ) ) def _get_job( self, jobid, jobtype, trans ): sa = trans.app.model.context.current @@ -297,4 +296,4 @@ params = dict( status='ok', dbkey=dbkey, datatype='fasta', url=url, user=trans.user.id, liftover=newlift, longname=longname, indexers=indexers ) - return params \ No newline at end of file + return params diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/root.py --- a/lib/galaxy/webapps/galaxy/controllers/root.py +++ b/lib/galaxy/webapps/galaxy/controllers/root.py @@ -508,7 +508,7 @@ Attempts to parse values passed as boolean, float, then int. Defaults to string. Non-recursive (will not parse lists). """ - #TODO: use simplejson or json + #TODO: use json rval = {} for k in kwd: rval[ k ] = kwd[k] diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/galaxy/webapps/galaxy/controllers/workflow.py --- a/lib/galaxy/webapps/galaxy/controllers/workflow.py +++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py @@ -1,13 +1,12 @@ import pkg_resources -pkg_resources.require( "simplejson" ) pkg_resources.require( "SVGFig" ) import base64 import httplib +import json import math import os import sgmllib -import simplejson import svgfig import urllib2 @@ -20,17 +19,17 @@ from galaxy import web from galaxy.datatypes.data import Data from galaxy.jobs.actions.post import ActionBox +from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings from galaxy.model.mapping import desc from galaxy.tools.parameters import RuntimeValue, visit_input_values from galaxy.tools.parameters.basic import DataToolParameter, DrillDownSelectToolParameter, SelectToolParameter, UnvalidatedValue from galaxy.tools.parameters.grouping import Conditional, Repeat +from galaxy.util.json import to_json_string from galaxy.util.odict import odict -from galaxy.util.json import to_json_string from galaxy.util.sanitize_html import sanitize_html from galaxy.util.topsort import CycleError, topsort, topsort_levels from galaxy.web import error, url_for from galaxy.web.base.controller import BaseUIController, SharableMixin, UsesStoredWorkflowMixin -from galaxy.model.item_attrs import UsesAnnotations, UsesItemRatings from galaxy.web.framework import form from galaxy.web.framework.helpers import grids, time_ago from galaxy.web.framework.helpers import to_unicode @@ -812,7 +811,7 @@ # Put parameters in workflow mode trans.workflow_building_mode = True # Convert incoming workflow data from json - data = simplejson.loads( workflow_data ) + data = json.loads( workflow_data ) # Create new workflow from incoming data workflow = model.Workflow() # Just keep the last name (user can rename later) @@ -920,7 +919,7 @@ # # Create workflow content JSON. - workflow_content = simplejson.dumps( workflow_dict, indent=4, sort_keys=True ) + workflow_content = json.dumps( workflow_dict, indent=4, sort_keys=True ) # Create myExperiment request. request_raw = trans.fill_template( "workflow/myexp_export.mako", \ @@ -1073,7 +1072,7 @@ if workflow_data: # Convert incoming workflow data from json try: - data = simplejson.loads( workflow_data ) + data = json.loads( workflow_data ) except Exception, e: data = None message = "The data content does not appear to be a Galaxy workflow.<br/>Exception: %s" % str( e ) @@ -1294,7 +1293,7 @@ # It is possible for a workflow to have 0 steps if len( workflow.steps ) == 0: error( "Workflow cannot be run because it does not have any steps" ) - #workflow = Workflow.from_simple( simplejson.loads( stored.encoded_value ), trans.app ) + #workflow = Workflow.from_simple( json.loads( stored.encoded_value ), trans.app ) if workflow.has_cycles: error( "Workflow cannot be run because it contains cycles" ) if workflow.has_errors: diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -1,4 +1,8 @@ -import os, sys, urllib, urllib2 +import json +import os +import sys +import urllib +import urllib2 new_path = [ os.path.join( os.path.dirname( __file__ ), '..', '..', '..', '..', 'lib' ) ] new_path.extend( sys.path[ 1: ] ) @@ -7,9 +11,6 @@ from galaxy import eggs import pkg_resources -pkg_resources.require( "simplejson" ) -import simplejson - pkg_resources.require( "pycrypto" ) from Crypto.Cipher import Blowfish from Crypto.Util.randpool import RandomPool @@ -29,9 +30,9 @@ # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. try: url = make_url( api_key, url ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'DELETE' - r = simplejson.loads( urllib2.urlopen( req ).read() ) + r = json.loads( urllib2.urlopen( req ).read() ) except urllib2.HTTPError, e: if return_formatted: print e @@ -86,8 +87,8 @@ # Do the actual GET. url = make_url( url, api_key=api_key ) try: - return simplejson.loads( urllib2.urlopen( url ).read() ) - except simplejson.decoder.JSONDecodeError, e: + return json.loads( urllib2.urlopen( url ).read() ) + except ValueError, e: print "URL did not return JSON data" sys.exit(1) @@ -106,15 +107,15 @@ def post( url, data, api_key=None ): # Do the actual POST. url = make_url( url, api_key=api_key ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) ) - return simplejson.loads( urllib2.urlopen( req ).read() ) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) ) + return json.loads( urllib2.urlopen( req ).read() ) def put( url, data, api_key=None ): # Do the actual PUT. url = make_url( url, api_key=api_key ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'PUT' - return simplejson.loads( urllib2.urlopen( req ).read() ) + return json.loads( urllib2.urlopen( req ).read() ) def submit( url, data, api_key=None, return_formatted=True ): # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py --- a/lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py +++ b/lib/tool_shed/scripts/api/tool_shed_repository_revision_update.py @@ -5,17 +5,16 @@ usage: tool_shed_repository_revision_update.py key url key1=value1 key2=value2 ... """ -import os, sys +import json +import os +import sys + sys.path.insert( 0, os.path.dirname( __file__ ) ) from common import update -import pkg_resources -pkg_resources.require( "simplejson" ) -import simplejson - -to_json_string = simplejson.dumps -from_json_string = simplejson.loads +to_json_string = json.dumps +from_json_string = json.loads data = {} for key, value in [ kwarg.split( '=', 1 ) for kwarg in sys.argv[ 3: ] ]: diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 lib/tool_shed/util/encoding_util.py --- a/lib/tool_shed/util/encoding_util.py +++ b/lib/tool_shed/util/encoding_util.py @@ -1,14 +1,10 @@ import binascii +import json import logging from galaxy import eggs from galaxy.util.hash_util import hmac_new from galaxy.util.json import json_fix -import pkg_resources - -pkg_resources.require( "simplejson" ) -import simplejson - log = logging.getLogger( __name__ ) encoding_sep = '__esep__' @@ -23,7 +19,7 @@ # Restore from string values = None try: - values = simplejson.loads( value ) + values = json.loads( value ) except Exception, e: #log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) ) pass @@ -39,7 +35,7 @@ def tool_shed_encode( val ): if isinstance( val, dict ): - value = simplejson.dumps( val ) + value = json.dumps( val ) else: value = val a = hmac_new( 'ToolShedAndGalaxyMustHaveThisSameKey', value ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/api/common.py --- a/scripts/api/common.py +++ b/scripts/api/common.py @@ -1,3 +1,4 @@ +import json import logging import os import sys @@ -10,9 +11,6 @@ from galaxy import eggs import pkg_resources -pkg_resources.require( "simplejson" ) -import simplejson - pkg_resources.require( "pycrypto" ) from Crypto.Cipher import Blowfish from Crypto.Util.randpool import RandomPool @@ -35,30 +33,30 @@ # Do the actual GET. url = make_url( api_key, url ) try: - return simplejson.loads( urllib2.urlopen( url ).read() ) - except simplejson.decoder.JSONDecodeError, e: + return json.loads( urllib2.urlopen( url ).read() ) + except json.decoder.JSONDecodeError, e: print "URL did not return JSON data" sys.exit(1) def post( api_key, url, data ): # Do the actual POST. url = make_url( api_key, url ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data ) ) - return simplejson.loads( urllib2.urlopen( req ).read() ) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) ) + return json.loads( urllib2.urlopen( req ).read() ) def put( api_key, url, data ): # Do the actual PUT url = make_url( api_key, url ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'PUT' - return simplejson.loads( urllib2.urlopen( req ).read() ) + return json.loads( urllib2.urlopen( req ).read() ) def __del( api_key, url, data ): # Do the actual DELETE url = make_url( api_key, url ) - req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = simplejson.dumps( data )) + req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'DELETE' - return simplejson.loads( urllib2.urlopen( req ).read() ) + return json.loads( urllib2.urlopen( req ).read() ) def display( api_key, url, return_formatted=True ): diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/api/workflow_import_from_file_rpark.py --- a/scripts/api/workflow_import_from_file_rpark.py +++ b/scripts/api/workflow_import_from_file_rpark.py @@ -11,11 +11,11 @@ from common import submit ### Rpark edit ### -import simplejson +import json def openWorkflow(in_file): with open(in_file) as f: - temp_data = simplejson.load(f) + temp_data = json.load(f) return temp_data; diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/drmaa_external_killer.py --- a/scripts/drmaa_external_killer.py +++ b/scripts/drmaa_external_killer.py @@ -4,10 +4,11 @@ Terminates a DRMAA job if given a job id and (appropriate) user id. """ +import errno +import json import os +import pwd import sys -import errno -import pwd #import drmaa new_path = [ os.path.join( os.getcwd(), "lib" ) ] new_path.extend( sys.path[1:] ) # remove scripts/ from the path @@ -15,8 +16,6 @@ from galaxy import eggs import pkg_resources -pkg_resources.require("simplejson") -import simplejson as json pkg_resources.require("drmaa") import drmaa diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/drmaa_external_runner.py --- a/scripts/drmaa_external_runner.py +++ b/scripts/drmaa_external_runner.py @@ -10,8 +10,8 @@ import sys import errno import pwd +import json -#import simplejson as json #import drmaa new_path = [ os.path.join( os.getcwd(), "lib" ) ] new_path.extend( sys.path[1:] ) # remove scripts/ from the path @@ -19,8 +19,6 @@ from galaxy import eggs import pkg_resources -pkg_resources.require("simplejson") -import simplejson as json pkg_resources.require("drmaa") import drmaa diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/external_chown_script.py --- a/scripts/external_chown_script.py +++ b/scripts/external_chown_script.py @@ -1,17 +1,17 @@ #!/usr/bin/env python +import errno +import json import os +import pwd import sys -import errno -import pwd #import drmaa + new_path = [ os.path.join( os.getcwd(), "lib" ) ] new_path.extend( sys.path[1:] ) # remove scripts/ from the path sys.path = new_path from galaxy import eggs import pkg_resources -pkg_resources.require("simplejson") -import simplejson as json pkg_resources.require("drmaa") import drmaa diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/extract_dataset_part.py --- a/scripts/extract_dataset_part.py +++ b/scripts/extract_dataset_part.py @@ -6,9 +6,10 @@ on a gid in a scatter-gather mode. This does part of the scatter. """ +import json +import logging import os import sys -import logging logging.basicConfig() log = logging.getLogger( __name__ ) @@ -16,11 +17,6 @@ new_path.extend( sys.path[1:] ) # remove scripts/ from the path sys.path = new_path -from galaxy import eggs -import pkg_resources -pkg_resources.require("simplejson") -import simplejson - # This junk is here to prevent loading errors import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here @@ -33,7 +29,7 @@ if not os.path.isfile(file_path): #Nothing to do - some splitters don't write a JSON file sys.exit(0) - data = simplejson.load(open(file_path, 'r')) + data = json.load(open(file_path, 'r')) try: class_name_parts = data['class_name'].split('.') module_name = '.'.join(class_name_parts[:-1]) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/galaxy_messaging/server/data_transfer.py --- a/scripts/galaxy_messaging/server/data_transfer.py +++ b/scripts/galaxy_messaging/server/data_transfer.py @@ -13,12 +13,21 @@ """ import ConfigParser -import sys, os, time, traceback +import cookielib +import datetime +import logging import optparse -import urllib,urllib2, cookielib, shutil -import logging, time, datetime +import os +import shutil +import sys +import time +import time +import traceback +import urllib +import urllib2 import xml.dom.minidom + from xml_helper import get_value, get_value_index log = logging.getLogger( "datatx_" + str( os.getpid() ) ) @@ -39,14 +48,11 @@ sys.path = new_path from galaxy import eggs -from galaxy.util.json import from_json_string, to_json_string from galaxy.model import SampleDataset from galaxy.web.api.samples import SamplesAPIController import pkg_resources pkg_resources.require( "pexpect" ) import pexpect -pkg_resources.require( "simplejson" ) -import simplejson log.debug(str(dir(api))) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 scripts/set_metadata.py --- a/scripts/set_metadata.py +++ b/scripts/set_metadata.py @@ -10,9 +10,11 @@ logging.basicConfig() log = logging.getLogger( __name__ ) +import cPickle +import json import os import sys -import cPickle + # ensure supported version from check_python import check_python try: @@ -26,8 +28,6 @@ from galaxy import eggs import pkg_resources -pkg_resources.require("simplejson") -import simplejson import galaxy.model.mapping # need to load this before we unpickle, in order to setup properties assigned by the mappers galaxy.model.Job() # this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here from galaxy.util import stringify_dictionary_keys @@ -107,17 +107,17 @@ dataset.extension = ext_override[ dataset.dataset.id ] # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: - override_metadata = simplejson.load( open( override_metadata ) ) + override_metadata = json.load( open( override_metadata ) ) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override ): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) setattr( dataset.metadata, metadata_name, metadata_file_override ) - kwds = stringify_dictionary_keys( simplejson.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode + kwds = stringify_dictionary_keys( json.load( open( filename_kwds ) ) ) # load kwds; need to ensure our keywords are not unicode dataset.datatype.set_meta( dataset, **kwds ) dataset.metadata.to_JSON_dict( filename_out ) # write out results of set_meta - simplejson.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded + json.dump( ( True, 'Metadata has been set successfully' ), open( filename_results_code, 'wb+' ) ) # setting metadata has succeeded except Exception, e: - simplejson.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow + json.dump( ( False, str( e ) ), open( filename_results_code, 'wb+' ) ) # setting metadata has failed somehow clear_mappers() # Shut down any additional threads that might have been created via the ObjectStore object_store.shutdown() diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/base/interactor.py --- a/test/base/interactor.py +++ b/test/base/interactor.py @@ -5,7 +5,7 @@ import galaxy.model from galaxy.model.orm import and_, desc from base.test_db_util import sa_session -from simplejson import dumps, loads +from json import dumps, loads from logging import getLogger log = getLogger( __name__ ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/functional/test_workflow.py --- a/test/functional/test_workflow.py +++ b/test/functional/test_workflow.py @@ -5,7 +5,7 @@ from galaxy.util import parse_xml from galaxy.tools.test import parse_param_elem, require_file, test_data_iter, parse_output_elems -from simplejson import load, dumps +from json import load, dumps from logging import getLogger log = getLogger( __name__ ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 test/tool_shed/base/twilltestcase.py --- a/test/tool_shed/base/twilltestcase.py +++ b/test/tool_shed/base/twilltestcase.py @@ -3,7 +3,6 @@ import os import re import test_db_util -import simplejson import shutil import logging import time diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/filters/join.py --- a/tools/filters/join.py +++ b/tools/filters/join.py @@ -8,20 +8,15 @@ """ -import optparse, os, sys, tempfile, struct +import json +import optparse +import os import psyco_full - -try: - simple_json_exception = None - from galaxy import eggs - from galaxy.util.bunch import Bunch - from galaxy.util import stringify_dictionary_keys - import pkg_resources - pkg_resources.require("simplejson") - import simplejson -except Exception, e: - simplejson_exception = e - simplejson = None +import struct +import sys +import tempfile +from galaxy.util.bunch import Bunch +from galaxy.util import stringify_dictionary_keys class OffsetList: @@ -337,11 +332,9 @@ fill_options = None if options.fill_options_file is not None: try: - if simplejson is None: - raise simplejson_exception - fill_options = Bunch( **stringify_dictionary_keys( simplejson.load( open( options.fill_options_file ) ) ) ) #simplejson.load( open( options.fill_options_file ) ) + fill_options = Bunch( **stringify_dictionary_keys( json.load( open( options.fill_options_file ) ) ) ) #json.load( open( options.fill_options_file ) ) except Exception, e: - print "Warning: Ignoring fill options due to simplejson error (%s)." % e + print "Warning: Ignoring fill options due to json error (%s)." % e if fill_options is None: fill_options = Bunch() if 'fill_unjoined_only' not in fill_options: diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/filters/joiner.xml --- a/tools/filters/joiner.xml +++ b/tools/filters/joiner.xml @@ -51,7 +51,7 @@ </inputs><configfiles><configfile name="fill_options_file"><% -import simplejson +import json %> #set $__fill_options = {} #if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty': @@ -72,7 +72,7 @@ #end for #end if #end if -${simplejson.dumps( __fill_options )} +${json.dumps( __fill_options )} </configfile></configfiles><outputs> diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_exporter.py --- a/tools/genomespace/genomespace_exporter.py +++ b/tools/genomespace/genomespace_exporter.py @@ -1,12 +1,16 @@ #Dan Blankenberg -import optparse, os, urllib2, urllib, cookielib, hashlib, base64, cgi, binascii, logging - -from galaxy import eggs -import pkg_resources - -pkg_resources.require( "simplejson" ) -import simplejson +import base64 +import binascii +import cgi +import cookielib +import hashlib +import json +import logging +import optparse +import os +import urllib +import urllib2 log = logging.getLogger( "tools.genomespace.genomespace_exporter" )#( __name__ ) @@ -58,7 +62,7 @@ dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } ) dir_request.get_method = lambda: 'GET' try: - dir_dict = simplejson.loads( url_opener.open( dir_request ).read() ) + dir_dict = json.loads( url_opener.open( dir_request ).read() ) except urllib2.HTTPError, e: #print "e", e, url #punting, assuming lack of permissions at this low of a level... continue @@ -81,16 +85,16 @@ if dir_slice in ( '', '/', None ): continue url = '/'.join( ( directory_dict['url'], urllib.quote( dir_slice.replace( '/', '_' ), safe='' ) ) ) - new_dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' }, data = simplejson.dumps( payload ) ) + new_dir_request = urllib2.Request( url, headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' }, data = json.dumps( payload ) ) new_dir_request.get_method = lambda: 'PUT' - directory_dict = simplejson.loads( url_opener.open( new_dir_request ).read() ) + directory_dict = json.loads( url_opener.open( new_dir_request ).read() ) return directory_dict def get_genome_space_launch_apps( atm_url, url_opener, file_url, file_type ): gs_request = urllib2.Request( "%s/%s/webtool/descriptor" % ( atm_url, GENOMESPACE_API_VERSION_STRING ) ) gs_request.get_method = lambda: 'GET' opened_gs_request = url_opener.open( gs_request ) - webtool_descriptors = simplejson.loads( opened_gs_request.read() ) + webtool_descriptors = json.loads( opened_gs_request.read() ) webtools = [] for webtool in webtool_descriptors: webtool_name = webtool.get( 'name' ) @@ -125,7 +129,7 @@ except urllib2.HTTPError, e: log.debug( 'GenomeSpace export tool failed reading a directory "%s": %s' % ( url, e ) ) return #bad url, go to next - cur_directory = simplejson.loads( cur_directory ) + cur_directory = json.loads( cur_directory ) directory = cur_directory.get( 'directory', {} ) contents = cur_directory.get( 'contents', [] ) if directory.get( 'isDirectory', False ): diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_file_browser.py --- a/tools/genomespace/genomespace_file_browser.py +++ b/tools/genomespace/genomespace_file_browser.py @@ -1,12 +1,13 @@ #Dan Blankenberg -import optparse, os, urllib, urllib2, urlparse, cookielib +import cookielib +import json +import optparse +import os +import urllib +import urllib2 +import urlparse -from galaxy import eggs -import pkg_resources - -pkg_resources.require( "simplejson" ) -import simplejson GENOMESPACE_API_VERSION_STRING = "v1.0" GENOMESPACE_SERVER_URL_PROPERTIES = "https://dm.genomespace.org/config/%s/serverurl.properties" % ( GENOMESPACE_API_VERSION_STRING ) @@ -87,12 +88,12 @@ gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) ) gs_request.get_method = lambda: 'GET' opened_gs_request = url_opener.open( gs_request ) - genomespace_formats = simplejson.loads( opened_gs_request.read() ) + genomespace_formats = json.loads( opened_gs_request.read() ) for format in genomespace_formats: GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name'] def download_from_genomespace_file_browser( json_parameter_file, genomespace_site ): - json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() ) + json_params = json.loads( open( json_parameter_file, 'r' ).read() ) datasource_params = json_params.get( 'param_dict' ) username = datasource_params.get( "gs-username", None ) token = datasource_params.get( "gs-token", None ) @@ -150,14 +151,14 @@ filename = "-%s" % filename used_filenames.append( filename ) output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_%s_visible_%s' % ( hda_id, filename, galaxy_ext ) ) - metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'new_primary_dataset', + metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'new_primary_dataset', base_dataset_id = dataset_id, ext = galaxy_ext, filename = output_filename, name = "GenomeSpace import on %s" % ( original_filename ) ) ) ) else: if dataset_id is not None: - metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset', + metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'dataset', dataset_id = dataset_id, ext = galaxy_ext, name = "GenomeSpace import on %s" % ( filename ) ) ) ) diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/genomespace/genomespace_importer.py --- a/tools/genomespace/genomespace_importer.py +++ b/tools/genomespace/genomespace_importer.py @@ -1,12 +1,14 @@ #Dan Blankenberg -import optparse, os, urllib2, urllib, cookielib, urlparse, tempfile, shutil - -from galaxy import eggs -import pkg_resources - -pkg_resources.require( "simplejson" ) -import simplejson +import cookielib +import json +import optparse +import os +import shutil +import tempfile +import urllib +import urllib2 +import urlparse import galaxy.model # need to import model before sniff to resolve a circular import dependency from galaxy.datatypes import sniff @@ -91,12 +93,12 @@ gs_request = urllib2.Request( "%s/%s/dataformat/list" % ( dm_site, GENOMESPACE_API_VERSION_STRING ) ) gs_request.get_method = lambda: 'GET' opened_gs_request = url_opener.open( gs_request ) - genomespace_formats = simplejson.loads( opened_gs_request.read() ) + genomespace_formats = json.loads( opened_gs_request.read() ) for format in genomespace_formats: GENOMESPACE_FORMAT_IDENTIFIER_TO_GENOMESPACE_EXT[ format['url'] ] = format['name'] def download_from_genomespace_importer( username, token, json_parameter_file, genomespace_site ): - json_params = simplejson.loads( open( json_parameter_file, 'r' ).read() ) + json_params = json.loads( open( json_parameter_file, 'r' ).read() ) datasource_params = json_params.get( 'param_dict' ) assert None not in [ username, token ], "Missing GenomeSpace username or token." output_filename = datasource_params.get( "output_file1", None ) @@ -152,7 +154,7 @@ metadata_request = urllib2.Request( "%s/%s/filemetadata/%s" % ( genomespace_site_dict['dmServer'], GENOMESPACE_API_VERSION_STRING, download_file_path ) ) metadata_request.get_method = lambda: 'GET' metadata_url = url_opener.open( metadata_request ) - file_metadata_dict = simplejson.loads( metadata_url.read() ) + file_metadata_dict = json.loads( metadata_url.read() ) metadata_url.close() file_type = file_metadata_dict.get( 'dataFormat', None ) if file_type and file_type.get( 'url' ): @@ -176,7 +178,7 @@ #save json info for single primary dataset if dataset_id is not None: - metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'dataset', + metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'dataset', dataset_id = dataset_id, ext = file_type, name = "GenomeSpace importer on %s" % ( filename ) ) ) ) @@ -189,7 +191,7 @@ used_filenames.append( filename ) target_output_filename = os.path.join( datasource_params['__new_file_path__'], 'primary_%i_%s_visible_%s' % ( hda_id, filename, file_type ) ) shutil.move( output_filename, target_output_filename ) - metadata_parameter_file.write( "%s\n" % simplejson.dumps( dict( type = 'new_primary_dataset', + metadata_parameter_file.write( "%s\n" % json.dumps( dict( type = 'new_primary_dataset', base_dataset_id = base_dataset_id, ext = file_type, filename = target_output_filename, diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/new_operations/column_join.py --- a/tools/new_operations/column_join.py +++ b/tools/new_operations/column_join.py @@ -13,19 +13,16 @@ other_inputs: the other input files to join """ -import optparse, os, re, struct, sys, tempfile +import json +import optparse +import os +import re +import struct +import sys +import tempfile -try: - simple_json_exception = None - from galaxy import eggs - from galaxy.util.bunch import Bunch - from galaxy.util import stringify_dictionary_keys - import pkg_resources - pkg_resources.require("simplejson") - import simplejson -except Exception, e: - simplejson_exception = e - simplejson = None +from galaxy.util.bunch import Bunch +from galaxy.util import stringify_dictionary_keys def stop_err( msg ): sys.stderr.write( msg ) @@ -162,11 +159,9 @@ fill_options = None if options.fill_options_file != 'None' and options.fill_options_file is not None: try: - if simplejson is None: - raise simplejson_exception - fill_options = Bunch( **stringify_dictionary_keys( simplejson.load( open( options.fill_options_file ) ) ) ) + fill_options = Bunch( **stringify_dictionary_keys( json.load( open( options.fill_options_file ) ) ) ) except Exception, e: - print 'Warning: Ignoring fill options due to simplejson error (%s).' % e + print 'Warning: Ignoring fill options due to json error (%s).' % e if fill_options is None: fill_options = Bunch() if 'file1_columns' not in fill_options: diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/new_operations/column_join.xml --- a/tools/new_operations/column_join.xml +++ b/tools/new_operations/column_join.xml @@ -49,7 +49,7 @@ </inputs><configfiles><configfile name="fill_options_file"><% -import simplejson +import json %> #set $__fill_options = {} #if $fill_empty_columns['fill_empty_columns_switch'] == 'fill_empty': @@ -65,7 +65,7 @@ #end for #end if #end if -${simplejson.dumps( __fill_options )} +${json.dumps( __fill_options )} </configfile></configfiles><outputs> diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/peak_calling/macs_wrapper.py --- a/tools/peak_calling/macs_wrapper.py +++ b/tools/peak_calling/macs_wrapper.py @@ -1,8 +1,12 @@ -import sys, subprocess, tempfile, shutil, glob, os, os.path, gzip -from galaxy import eggs -import pkg_resources -pkg_resources.require( "simplejson" ) -import simplejson +import glob +import gzip +import json +import os +import os.path +import shutil +import subprocess +import sys +import tempfile CHUNK_SIZE = 1024 @@ -42,7 +46,7 @@ out.close() def main(): - options = simplejson.load( open( sys.argv[1] ) ) + options = json.load( open( sys.argv[1] ) ) output_bed = sys.argv[2] output_extra_html = sys.argv[3] output_extra_path = sys.argv[4] diff -r 0c5e20558f6bb8d690dd9c9d25c172f08e837b1b -r 995ca770764013bcf0c61092de8436c9278357f7 tools/peak_calling/macs_wrapper.xml --- a/tools/peak_calling/macs_wrapper.xml +++ b/tools/peak_calling/macs_wrapper.xml @@ -93,7 +93,7 @@ </outputs><configfiles><configfile name="options_file"><% -import simplejson +import json %> #set $__options = { 'experiment_name':str( $experiment_name ), 'gsize':int( float( str( $gsize ) ) ), 'tsize':str( $tsize ), 'bw':str( $bw ), 'pvalue':str( $pvalue ), 'mfold':str( $mfold ), 'nolambda':str( $nolambda ), 'lambdaset': str( $lambdaset ), 'futurefdr':str( $futurefdr ) } #if str( $xls_to_interval ) == 'create': @@ -135,7 +135,7 @@ #if $diag_type['diag_type_selector'] == 'diag': #set $__options['diag'] = { 'fe-min':str( $diag_type['fe-min'] ), 'fe-max':str( $diag_type['fe-max'] ), 'fe-step':str( $diag_type['fe-step'] ) } #end if -${ simplejson.dumps( __options ) } +${ json.dumps( __options ) } </configfile></configfiles><tests> https://bitbucket.org/galaxy/galaxy-central/commits/2483cc9e597d/ Changeset: 2483cc9e597d User: dannon Date: 2014-01-10 13:40:02 Summary: Merge. Affected #: 172 files diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 job_conf.xml.sample_advanced --- a/job_conf.xml.sample_advanced +++ b/job_conf.xml.sample_advanced @@ -6,7 +6,19 @@ --><plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner"/><plugin id="pbs" type="runner" load="galaxy.jobs.runners.pbs:PBSJobRunner" workers="2"/> - <plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner"/> + <plugin id="drmaa" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner"> + <!-- Different DRMs handle successfully completed jobs differently, + these options can be changed to handle such differences and + are explained in detail on the Galaxy wiki. Defaults are shown --> + <param id="invalidjobexception_state">ok</param> + <param id="invalidjobexception_retries">0</param> + <param id="internalexception_state">ok</param> + <param id="internalexception_retries">0</param> + </plugin> + <plugin id="sge" type="runner" load="galaxy.jobs.runners.drmaa:DRMAAJobRunner"> + <!-- Override the $DRMAA_LIBRARY_PATH environment variable --> + <param id="drmaa_library_path">/sge/lib/libdrmaa.so</param> + </plugin><plugin id="lwr" type="runner" load="galaxy.jobs.runners.lwr:LwrJobRunner"><!-- More information on LWR can be found at https://lwr.readthedocs.org --><!-- Uncomment following line to use libcurl to perform HTTP calls (defaults to urllib) --> @@ -14,6 +26,7 @@ </plugin><plugin id="cli" type="runner" load="galaxy.jobs.runners.cli:ShellJobRunner" /><plugin id="condor" type="runner" load="galaxy.jobs.runners.condor:CondorJobRunner" /> + <plugin id="slurm" type="runner" load="galaxy.jobs.runners.slurm:SlurmJobRunner" /></plugins><handlers default="handlers"><!-- Additional job handlers - the id should match the name of a @@ -21,6 +34,15 @@ --><handler id="handler0" tags="handlers"/><handler id="handler1" tags="handlers"/> + <!-- Handlers will load all plugins defined in the <plugins> collection + above by default, but can be limited to a subset using <plugin> + tags. This is useful for heterogenous environments where the DRMAA + plugin would need to be loaded more than once with different + configs. + --> + <handler id="sge_handler"> + <plugin id="sge"/> + </handler><handler id="special_handler0" tags="special_handlers"/><handler id="special_handler1" tags="special_handlers"/><handler id="trackster_handler"/> diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -86,6 +86,7 @@ self.app = app self.runner_plugins = [] self.handlers = {} + self.handler_runner_plugins = {} self.default_handler_id = None self.destinations = {} self.destination_tags = {} @@ -138,6 +139,10 @@ else: log.debug("Read definition for handler '%s'" % id) self.handlers[id] = (id,) + for plugin in handler.findall('plugin'): + if id not in self.handler_runner_plugins: + self.handler_runner_plugins[id] = [] + self.handler_runner_plugins[id].append( plugin.get('id') ) if handler.get('tags', None) is not None: for tag in [ x.strip() for x in handler.get('tags').split(',') ]: if tag in self.handlers: @@ -420,13 +425,19 @@ """ return self.destinations.get(id_or_tag, None) - def get_job_runner_plugins(self): + def get_job_runner_plugins(self, handler_id): """Load all configured job runner plugins :returns: list of job runner plugins """ rval = {} - for runner in self.runner_plugins: + if handler_id in self.handler_runner_plugins: + plugins_to_load = [ rp for rp in self.runner_plugins if rp['id'] in self.handler_runner_plugins[handler_id] ] + log.info( "Handler '%s' will load specified runner plugins: %s", handler_id, ', '.join( [ rp['id'] for rp in plugins_to_load ] ) ) + else: + plugins_to_load = self.runner_plugins + log.info( "Handler '%s' will load all configured runner plugins", handler_id ) + for runner in plugins_to_load: class_names = [] module = None id = runner['id'] @@ -477,7 +488,7 @@ try: rval[id] = runner_class( self.app, runner[ 'workers' ], **runner.get( 'kwds', {} ) ) except TypeError: - log.warning( "Job runner '%s:%s' has not been converted to a new-style runner" % ( module_name, class_name ) ) + log.exception( "Job runner '%s:%s' has not been converted to a new-style runner or encountered TypeError on load" % ( module_name, class_name ) ) rval[id] = runner_class( self.app ) log.debug( "Loaded job runner '%s:%s' as '%s'" % ( module_name, class_name, id ) ) return rval diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/handler.py --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -565,7 +565,7 @@ class DefaultJobDispatcher( object ): def __init__( self, app ): self.app = app - self.job_runners = self.app.job_config.get_job_runner_plugins() + self.job_runners = self.app.job_config.get_job_runner_plugins( self.app.config.server_name ) # Once plugins are loaded, all job destinations that were created from # URLs can have their URL params converted to the destination's param # dict by the plugin. diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/__init__.py --- a/lib/galaxy/jobs/runners/__init__.py +++ b/lib/galaxy/jobs/runners/__init__.py @@ -22,13 +22,39 @@ STOP_SIGNAL = object() + +class RunnerParams( object ): + + def __init__( self, specs = None, params = None ): + self.specs = specs or dict() + self.params = params or dict() + for name, value in self.params.items(): + assert name in self.specs, 'Invalid job runner parameter for this plugin: %s' % name + if 'map' in self.specs[ name ]: + try: + self.params[ name ] = self.specs[ name ][ 'map' ]( value ) + except Exception, e: + raise Exception( 'Job runner parameter "%s" value "%s" could not be converted to the correct type: %s' % ( name, value, e ) ) + if 'valid' in self.specs[ name ]: + assert self.specs[ name ][ 'valid' ]( value ), 'Job runner parameter %s failed validation' % name + + def __getattr__( self, name ): + return self.params.get( name, self.specs[ name ][ 'default' ] ) + + class BaseJobRunner( object ): - def __init__( self, app, nworkers ): + def __init__( self, app, nworkers, **kwargs ): """Start the job runner """ self.app = app self.sa_session = app.model.context self.nworkers = nworkers + runner_param_specs = dict( recheck_missing_job_retries = dict( map = int, valid = lambda x: x >= 0, default = 0 ) ) + if 'runner_param_specs' in kwargs: + runner_param_specs.update( kwargs.pop( 'runner_param_specs' ) ) + if kwargs: + log.debug( 'Loading %s with params: %s', self.runner_name, kwargs ) + self.runner_params = RunnerParams( specs = runner_param_specs, params = kwargs ) def _init_worker_threads(self): """Start ``nworkers`` worker threads. @@ -115,7 +141,7 @@ job_wrapper.cleanup() return False elif job_state != model.Job.states.QUEUED: - log.info( "(%d) Job is in state %s, skipping execution" % ( job_id, job_state ) ) + log.info( "(%s) Job is in state %s, skipping execution" % ( job_id, job_state ) ) # cleanup may not be safe in all states return False @@ -226,6 +252,10 @@ options.update(**kwds) return job_script(**options) + def _complete_terminal_job( self, ajs, **kwargs ): + if ajs.job_wrapper.get_state() != model.Job.states.DELETED: + self.work_queue.put( ( self.finish_job, ajs ) ) + class AsynchronousJobState( object ): """ @@ -287,8 +317,8 @@ to the correct methods (queue, finish, cleanup) at appropriate times.. """ - def __init__( self, app, nworkers ): - super( AsynchronousJobRunner, self ).__init__( app, nworkers ) + def __init__( self, app, nworkers, **kwargs ): + super( AsynchronousJobRunner, self ).__init__( app, nworkers, **kwargs ) # 'watched' and 'queue' are both used to keep track of jobs to watch. # 'queue' is used to add new watched jobs, and can be called from # any thread (usually by the 'queue_job' method). 'watched' must only diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/drmaa.py --- a/lib/galaxy/jobs/runners/drmaa.py +++ b/lib/galaxy/jobs/runners/drmaa.py @@ -16,27 +16,12 @@ from galaxy.jobs.runners import AsynchronousJobState, AsynchronousJobRunner eggs.require( "drmaa" ) -# We foolishly named this file the same as the name exported by the drmaa -# library... 'import drmaa' imports itself. -drmaa = __import__( "drmaa" ) log = logging.getLogger( __name__ ) __all__ = [ 'DRMAAJobRunner' ] -drmaa_state = { - drmaa.JobState.UNDETERMINED: 'process status cannot be determined', - drmaa.JobState.QUEUED_ACTIVE: 'job is queued and active', - drmaa.JobState.SYSTEM_ON_HOLD: 'job is queued and in system hold', - drmaa.JobState.USER_ON_HOLD: 'job is queued and in user hold', - drmaa.JobState.USER_SYSTEM_ON_HOLD: 'job is queued and in user and system hold', - drmaa.JobState.RUNNING: 'job is running', - drmaa.JobState.SYSTEM_SUSPENDED: 'job is system suspended', - drmaa.JobState.USER_SUSPENDED: 'job is user suspended', - drmaa.JobState.DONE: 'job finished normally', - drmaa.JobState.FAILED: 'job finished, but failed', -} - +drmaa = None DRMAA_jobTemplate_attributes = [ 'args', 'remoteCommand', 'outputPath', 'errorPath', 'nativeSpecification', 'jobName', 'email', 'project' ] @@ -48,8 +33,50 @@ """ runner_name = "DRMAARunner" - def __init__( self, app, nworkers ): + def __init__( self, app, nworkers, **kwargs ): """Start the job runner""" + + global drmaa + + runner_param_specs = dict( + drmaa_library_path = dict( map = str, default = os.environ.get( 'DRMAA_LIBRARY_PATH', None ) ), + invalidjobexception_state = dict( map = str, valid = lambda x: x in ( model.Job.states.OK, model.Job.states.ERROR ), default = model.Job.states.OK ), + invalidjobexception_retries = dict( map = int, valid = lambda x: int >= 0, default = 0 ), + internalexception_state = dict( map = str, valid = lambda x: x in ( model.Job.states.OK, model.Job.states.ERROR ), default = model.Job.states.OK ), + internalexception_retries = dict( map = int, valid = lambda x: int >= 0, default = 0 ) ) + + if 'runner_param_specs' not in kwargs: + kwargs[ 'runner_param_specs' ] = dict() + kwargs[ 'runner_param_specs' ].update( runner_param_specs ) + + super( DRMAAJobRunner, self ).__init__( app, nworkers, **kwargs ) + + # This allows multiple drmaa runners (although only one per handler) in the same job config file + if 'drmaa_library_path' in kwargs: + log.info( 'Overriding DRMAA_LIBRARY_PATH due to runner plugin parameter: %s', self.runner_params.drmaa_library_path ) + os.environ['DRMAA_LIBRARY_PATH'] = self.runner_params.drmaa_library_path + + # We foolishly named this file the same as the name exported by the drmaa + # library... 'import drmaa' imports itself. + drmaa = __import__( "drmaa" ) + + # Subclasses may need access to state constants + self.drmaa_job_states = drmaa.JobState + + # Descriptive state strings pulled from the drmaa lib itself + self.drmaa_job_state_strings = { + drmaa.JobState.UNDETERMINED: 'process status cannot be determined', + drmaa.JobState.QUEUED_ACTIVE: 'job is queued and active', + drmaa.JobState.SYSTEM_ON_HOLD: 'job is queued and in system hold', + drmaa.JobState.USER_ON_HOLD: 'job is queued and in user hold', + drmaa.JobState.USER_SYSTEM_ON_HOLD: 'job is queued and in user and system hold', + drmaa.JobState.RUNNING: 'job is running', + drmaa.JobState.SYSTEM_SUSPENDED: 'job is system suspended', + drmaa.JobState.USER_SUSPENDED: 'job is user suspended', + drmaa.JobState.DONE: 'job finished normally', + drmaa.JobState.FAILED: 'job finished, but failed', + } + self.ds = drmaa.Session() self.ds.initialize() @@ -58,7 +85,6 @@ self.external_killJob_script = app.config.drmaa_external_killjob_script self.userid = None - super( DRMAAJobRunner, self ).__init__( app, nworkers ) self._init_monitor_thread() self._init_worker_threads() @@ -137,8 +163,10 @@ job_wrapper.cleanup() return - log.debug( "(%s) submitting file %s" % ( galaxy_id_tag, ajs.job_file ) ) - log.debug( "(%s) command is: %s" % ( galaxy_id_tag, command_line ) ) + log.debug( "(%s) submitting file %s", galaxy_id_tag, ajs.job_file ) + log.debug( "(%s) command is: %s", galaxy_id_tag, command_line ) + if native_spec: + log.debug( "(%s) native specification is: %s", galaxy_id_tag, native_spec ) # runJob will raise if there's a submit problem if self.external_runJob_script is None: @@ -175,6 +203,20 @@ # Add to our 'queue' of jobs to monitor self.monitor_queue.put( ajs ) + def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ): + """ + Handle a job upon its termination in the DRM. This method is meant to + be overridden by subclasses to improve post-mortem and reporting of + failures. + """ + if drmaa_state == drmaa.JobState.FAILED: + if ajs.job_wrapper.get_state() != model.Job.states.DELETED: + ajs.stop_job = False + ajs.fail_message = "The cluster DRM system terminated this job" + self.work_queue.put( ( self.fail_job, ajs ) ) + elif drmaa_state == drmaa.JobState.DONE: + super( DRMAAJobRunner, self )._complete_terminal_job( ajs ) + def check_watched_items( self ): """ Called by the monitor thread to look at each watched job and deal @@ -188,16 +230,27 @@ try: assert external_job_id not in ( None, 'None' ), '(%s/%s) Invalid job id' % ( galaxy_id_tag, external_job_id ) state = self.ds.jobStatus( external_job_id ) - # InternalException was reported to be necessary on some DRMs, but - # this could cause failures to be detected as completion! Please - # report if you experience problems with this. - except ( drmaa.InvalidJobException, drmaa.InternalException ), e: - # we should only get here if an orphaned job was put into the queue at app startup - log.info( "(%s/%s) job left DRM queue with following message: %s" % ( galaxy_id_tag, external_job_id, e ) ) - self.work_queue.put( ( self.finish_job, ajs ) ) + except ( drmaa.InternalException, drmaa.InvalidJobException ), e: + ecn = e.__class__.__name__ + retry_param = ecn.lower() + '_retries' + state_param = ecn.lower() + '_state' + retries = getattr( ajs, retry_param, 0 ) + if self.runner_params[ retry_param ] > 0: + if retries < self.runner_params[ retry_param ]: + # will retry check on next iteration + setattr( ajs, retry_param, retries + 1 ) + continue + if self.runner_params[ state_param ] == model.Job.states.OK: + log.info( "(%s/%s) job left DRM queue with following message: %s", galaxy_id_tag, external_job_id, e ) + self.work_queue.put( ( self.finish_job, ajs ) ) + elif self.runner_params[ state_param ] == model.Job.states.ERROR: + log.info( "(%s/%s) job check resulted in %s after %s tries: %s", galaxy_id_tag, external_job_id, ecn, retries, e ) + self.work_queue.put( ( self.fail_job, ajs ) ) + else: + raise Exception( "%s is set to an invalid value (%s), this should not be possible. See galaxy.jobs.drmaa.__init__()", state_param, self.runner_params[ state_param ] ) continue except drmaa.DrmCommunicationException, e: - log.warning( "(%s/%s) unable to communicate with DRM: %s" % ( galaxy_id_tag, external_job_id, e )) + log.warning( "(%s/%s) unable to communicate with DRM: %s", galaxy_id_tag, external_job_id, e ) new_watched.append( ajs ) continue except Exception, e: @@ -208,19 +261,12 @@ self.work_queue.put( ( self.fail_job, ajs ) ) continue if state != old_state: - log.debug( "(%s/%s) state change: %s" % ( galaxy_id_tag, external_job_id, drmaa_state[state] ) ) + log.debug( "(%s/%s) state change: %s" % ( galaxy_id_tag, external_job_id, self.drmaa_job_state_strings[state] ) ) if state == drmaa.JobState.RUNNING and not ajs.running: ajs.running = True ajs.job_wrapper.change_state( model.Job.states.RUNNING ) - if state == drmaa.JobState.FAILED: - if ajs.job_wrapper.get_state() != model.Job.states.DELETED: - ajs.stop_job = False - ajs.fail_message = "The cluster DRM system terminated this job" - self.work_queue.put( ( self.fail_job, ajs ) ) - continue - if state == drmaa.JobState.DONE: - if ajs.job_wrapper.get_state() != model.Job.states.DELETED: - self.work_queue.put( ( self.finish_job, ajs ) ) + if state in ( drmaa.JobState.FAILED, drmaa.JobState.DONE ): + self._complete_terminal_job( ajs, drmaa_state = state ) continue ajs.old_state = state new_watched.append( ajs ) diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/jobs/runners/slurm.py --- /dev/null +++ b/lib/galaxy/jobs/runners/slurm.py @@ -0,0 +1,57 @@ +""" +SLURM job control via the DRMAA API. +""" + +import time +import logging +import subprocess + +from galaxy import model +from galaxy.jobs.runners.drmaa import DRMAAJobRunner + +log = logging.getLogger( __name__ ) + +__all__ = [ 'SlurmJobRunner' ] + + +class SlurmJobRunner( DRMAAJobRunner ): + runner_name = "SlurmRunner" + + def _complete_terminal_job( self, ajs, drmaa_state, **kwargs ): + def __get_jobinfo(): + scontrol_out = subprocess.check_output( ( 'scontrol', '-o', 'show', 'job', ajs.job_id ) ) + return dict( [ out_param.split( '=', 1 ) for out_param in scontrol_out.split() ] ) + if drmaa_state == self.drmaa_job_states.FAILED: + try: + job_info = __get_jobinfo() + sleep = 1 + while job_info['JobState'] == 'COMPLETING': + log.debug( '(%s/%s) Waiting %s seconds for failed job to exit COMPLETING state for post-mortem', ajs.job_wrapper.get_id_tag(), ajs.job_id, sleep ) + time.sleep( sleep ) + sleep *= 2 + if sleep > 64: + ajs.fail_message = "This job failed and the system timed out while trying to determine the cause of the failure." + break + job_info = __get_jobinfo() + if job_info['JobState'] == 'TIMEOUT': + ajs.fail_message = "This job was terminated because it ran longer than the maximum allowed job run time." + elif job_info['JobState'] == 'NODE_FAIL': + log.warning( '(%s/%s) Job failed due to node failure, attempting resubmission', ajs.job_wrapper.get_id_tag(), ajs.job_id ) + ajs.job_wrapper.change_state( model.Job.states.QUEUED, info = 'Job was resubmitted due to node failure' ) + try: + self.queue_job( ajs.job_wrapper ) + return + except: + ajs.fail_message = "This job failed due to a cluster node failure, and an attempt to resubmit the job failed." + elif job_info['JobState'] == 'CANCELLED': + ajs.fail_message = "This job failed because it was cancelled by an administrator." + else: + ajs.fail_message = "This job failed for reasons that could not be determined." + ajs.fail_message += '\nPlease click the bug icon to report this problem if you need help.' + ajs.stop_job = False + self.work_queue.put( ( self.fail_job, ajs ) ) + except Exception, e: + log.exception( '(%s/%s) Unable to inspect failed slurm job using scontrol, job will be unconditionally failed: %s', ajs.job_wrapper.get_id_tag(), ajs.job_id, e ) + super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state ) + elif drmaa_state == self.drmaa_job_states.DONE: + super( SlurmJobRunner, self )._complete_terminal_job( ajs, drmaa_state = drmaa_state ) diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/model/__init__.py --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -18,6 +18,7 @@ import socket import time from string import Template +from itertools import ifilter import galaxy.datatypes import galaxy.datatypes.registry @@ -42,6 +43,11 @@ # Default Value Required for unit tests datatypes_registry.load_datatypes() +# When constructing filters with in for a fixed set of ids, maximum +# number of items to place in the IN statement. Different databases +# are going to have different limits so it is likely best to not let +# this be unlimited - filter in Python if over this limit. +MAX_IN_FILTER_LENGTH = 100 class NoConverterException(Exception): def __init__(self, value): @@ -892,6 +898,33 @@ rval = galaxy.datatypes.data.nice_size( rval ) return rval + def contents_iter( self, **kwds ): + """ + Fetch filtered list of contents of history. + """ + python_filter = None + db_session = object_session( self ) + assert db_session != None + query = db_session.query( HistoryDatasetAssociation ).filter( HistoryDatasetAssociation.table.c.history_id == self.id ) + query = query.order_by( HistoryDatasetAssociation.table.c.hid.asc() ) + deleted = galaxy.util.string_as_bool_or_none( kwds.get( 'deleted', None ) ) + if deleted is not None: + query = query.filter( HistoryDatasetAssociation.deleted == bool( kwds['deleted'] ) ) + visible = galaxy.util.string_as_bool_or_none( kwds.get( 'visible', None ) ) + if visible is not None: + query = query.filter( HistoryDatasetAssociation.visible == bool( kwds['visible'] ) ) + if 'ids' in kwds: + ids = kwds['ids'] + max_in_filter_length = kwds.get('max_in_filter_length', MAX_IN_FILTER_LENGTH) + if len(ids) < max_in_filter_length: + query = query.filter( HistoryDatasetAssociation.id.in_(ids) ) + else: + python_filter = lambda hda: hda.id in ids + if python_filter: + return ifilter(python_filter, query) + else: + return query + def copy_tags_from(self,target_user,source_history): for src_shta in source_history.tags: new_shta = src_shta.copy() diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/galaxy/api/history_contents.py --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -51,47 +51,28 @@ else: history = self.get_history( trans, history_id, check_ownership=True, check_accessible=True ) - # if ids, return _FULL_ data (as show) for each id passed + contents_kwds = {} if ids: - ids = ids.split( ',' ) - for index, hda in enumerate( history.datasets ): - encoded_hda_id = trans.security.encode_id( hda.id ) - if encoded_hda_id in ids: - #TODO: share code with show - rval.append( self._detailed_hda_dict( trans, hda ) ) - - # if no ids passed, return a _SUMMARY_ of _all_ datasets in the history + ids = map( lambda id: trans.security.decode_id( id ), ids.split( ',' ) ) + contents_kwds[ 'ids' ] = ids + # If explicit ids given, always used detailed result. + details = 'all' else: + contents_kwds[ 'deleted' ] = kwd.get( 'deleted', None ) + contents_kwds[ 'visible' ] = kwd.get( 'visible', None ) # details param allows a mixed set of summary and detailed hdas #TODO: this is getting convoluted due to backwards compat details = kwd.get( 'details', None ) or [] if details and details != 'all': details = util.listify( details ) - # by default return all datasets - even if deleted or hidden (defaulting the next switches to None) - # if specified return those datasets that match the setting - # backwards compat - return_deleted = util.string_as_bool_or_none( kwd.get( 'deleted', None ) ) - return_visible = util.string_as_bool_or_none( kwd.get( 'visible', None ) ) - - for hda in history.datasets: - # if either return_ setting has been requested (!= None), skip hdas that don't match the request - if return_deleted is not None: - if( ( return_deleted and not hda.deleted ) - or ( not return_deleted and hda.deleted ) ): - continue - if return_visible is not None: - if( ( return_visible and not hda.visible ) - or ( not return_visible and hda.visible ) ): - continue - - encoded_hda_id = trans.security.encode_id( hda.id ) - if( ( encoded_hda_id in details ) - or ( details == 'all' ) ): - rval.append( self._detailed_hda_dict( trans, hda ) ) - else: - rval.append( self._summary_hda_dict( trans, history_id, hda ) ) - + for hda in history.contents_iter( **contents_kwds ): + encoded_hda_id = trans.security.encode_id( hda.id ) + detailed = details == 'all' or ( encoded_hda_id in details ) + if detailed: + rval.append( self._detailed_hda_dict( trans, hda ) ) + else: + rval.append( self._summary_hda_dict( trans, history_id, hda ) ) except Exception, e: # for errors that are not specific to one hda (history lookup or summary list) rval = "Error in history API at listing contents: " + str( e ) diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -19,11 +19,6 @@ log = logging.getLogger( __name__ ) -def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ): - value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ), - 'error_message' : tool_shed_repository.error_message or '' } - return value_mapper - def get_message_for_no_shed_tool_config(): # This Galaxy instance is not configured with a shed-related tool panel configuration file. message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> ' @@ -48,8 +43,8 @@ :param id: the encoded id of the ToolShedRepository object """ # Example URL: http://localhost:8763/api/tool_shed_repositories/f2db41e1fa331b3e/exported_w... - # Since exported workflows are dictionaries with very few attributes that differentiate them from each other, we'll build the - # list based on the following dictionary of those few attributes. + # Since exported workflows are dictionaries with very few attributes that differentiate them from each + # other, we'll build the list based on the following dictionary of those few attributes. exported_workflows = [] repository = suc.get_tool_shed_repository_by_id( trans, id ) metadata = repository.metadata @@ -58,17 +53,23 @@ else: exported_workflow_tups = [] for index, exported_workflow_tup in enumerate( exported_workflow_tups ): - # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of relative_path is the location - # on disk (relative to the root of the installed repository) where the exported_workflow_dict file (.ga file) is located. + # The exported_workflow_tup looks like ( relative_path, exported_workflow_dict ), where the value of + # relative_path is the location on disk (relative to the root of the installed repository) where the + # exported_workflow_dict file (.ga file) is located. exported_workflow_dict = exported_workflow_tup[ 1 ] annotation = exported_workflow_dict.get( 'annotation', '' ) format_version = exported_workflow_dict.get( 'format-version', '' ) workflow_name = exported_workflow_dict.get( 'name', '' ) - # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. + # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's + # location (i.e., index) in the list. display_dict = dict( index=index, annotation=annotation, format_version=format_version, workflow_name=workflow_name ) exported_workflows.append( display_dict ) return exported_workflows + def __get_value_mapper( self, trans ): + value_mapper = { 'id' : trans.security.encode_id } + return value_mapper + @web.expose_api def import_workflow( self, trans, payload, **kwd ): """ @@ -96,13 +97,11 @@ # Since we don't have an in-memory object with an id, we'll identify the exported workflow via it's location (i.e., index) in the list. exported_workflow = exported_workflows[ int( index ) ] workflow_name = exported_workflow[ 'workflow_name' ] - workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name ) if status == 'error': - log.error( message, exc_info=True ) - trans.response.status = 500 - return message - else: - return workflow.to_dict( view='element' ) + log.debug( error_message ) + return {} + return workflow.to_dict( view='element' ) @web.expose_api def import_workflows( self, trans, **kwd ): @@ -125,11 +124,9 @@ imported_workflow_dicts = [] for exported_workflow_dict in exported_workflows: workflow_name = exported_workflow_dict[ 'workflow_name' ] - workflow, status, message = workflow_util.import_workflow( trans, repository, workflow_name ) + workflow, status, error_message = workflow_util.import_workflow( trans, repository, workflow_name ) if status == 'error': - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( error_message ) else: imported_workflow_dicts.append( workflow.to_dict( view='element' ) ) return imported_workflow_dicts @@ -142,22 +139,15 @@ """ # Example URL: http://localhost:8763/api/tool_shed_repositories tool_shed_repository_dicts = [] - try: - query = trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \ - .order_by( trans.app.install_model.ToolShedRepository.table.c.name ) \ - .all() - for tool_shed_repository in query: - tool_shed_repository_dict = tool_shed_repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) - tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', - action='show', - id=trans.security.encode_id( tool_shed_repository.id ) ) - tool_shed_repository_dicts.append( tool_shed_repository_dict ) - return tool_shed_repository_dicts - except Exception, e: - message = "Error in the tool_shed_repositories API in index: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + for tool_shed_repository in trans.install_model.context.query( trans.app.install_model.ToolShedRepository ) \ + .order_by( trans.app.install_model.ToolShedRepository.table.c.name ): + tool_shed_repository_dict = \ + tool_shed_repository.to_dict( value_mapper=self.__get_value_mapper( trans ) ) + tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', + action='show', + id=trans.security.encode_id( tool_shed_repository.id ) ) + tool_shed_repository_dicts.append( tool_shed_repository_dict ) + return tool_shed_repository_dicts @web.expose_api def install_repository_revision( self, trans, payload, **kwd ): @@ -208,8 +198,7 @@ # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file. if not suc.have_shed_tool_conf_for_install( trans ): message = get_message_for_no_shed_tool_config() - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) # Make sure the current user's API key proves he is an admin user in this Galaxy instance. if not trans.user_is_admin(): @@ -225,18 +214,20 @@ except Exception, e: message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \ ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) if raw_text: + # If successful, the response from get_repository_revision_install_info will be 3 + # dictionaries, a dictionary defining the Repository, a dictionary defining the + # Repository revision (RepositoryMetadata), and a dictionary including the additional + # information required to install the repository. items = json.from_json_string( raw_text ) repository_revision_dict = items[ 1 ] repo_info_dict = items[ 2 ] else: message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \ ( str( tool_shed_url ), str( changeset_revision ), str( name ), str( owner ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) repo_info_dicts = [ repo_info_dict ] # Make sure the tool shed returned everything we need for installing the repository. @@ -345,7 +336,7 @@ tool_path, install_tool_dependencies, reinstalling=False ) - tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) + tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) ) tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( tool_shed_repository.id ) ) @@ -394,8 +385,7 @@ if not suc.have_shed_tool_conf_for_install( trans ): # This Galaxy instance is not configured with a shed-related tool panel configuration file. message = get_message_for_no_shed_tool_config() - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) if not trans.user_is_admin(): raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' ) @@ -410,8 +400,7 @@ len( changeset_revisions ) != num_specified_repositories: message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered ' message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.' - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( message ) return dict( status='error', error=message ) # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information # about each of the repositories being installed. @@ -482,7 +471,7 @@ repair_dict = repository_util.repair_tool_shed_repository( trans, repository, encoding_util.tool_shed_encode( repo_info_dict ) ) - repository_dict = repository.to_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, repository ) ) + repository_dict = repository.to_dict( value_mapper=self.__get_value_mapper( trans ) ) repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', action='show', id=trans.security.encode_id( repository.id ) ) @@ -502,39 +491,39 @@ :param key: the API key of the Galaxy admin user. """ - try: - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results = dict( start_time=start_time, - successful_count=0, - unsuccessful_count=0, - repository_status=[] ) - # Make sure the current user's API key proves he is an admin user in this Galaxy instance. - if not trans.user_is_admin(): - raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' ) - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False ) - # Now reset metadata on all remaining repositories. - for repository in query: - repository_id = trans.security.encode_id( repository.id ) - try: - invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id ) - if invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False ) - results[ 'unsuccessful_count' ] += 1 - else: - message = "Successfully reset metadata on repository %s owned by %s" % ( str( repository.name ), str( repository.owner ) ) - results[ 'successful_count' ] += 1 - except Exception, e: - message = "Error resetting metadata on repository %s owned by %s: %s" % ( str( repository.name ), str( repository.owner ), str( e ) ) + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results = dict( start_time=start_time, + successful_count=0, + unsuccessful_count=0, + repository_status=[] ) + # Make sure the current user's API key proves he is an admin user in this Galaxy instance. + if not trans.user_is_admin(): + raise HTTPForbidden( detail='You are not authorized to reset metadata on repositories installed into this Galaxy instance.' ) + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=False, order=False ) + # Now reset metadata on all remaining repositories. + for repository in query: + repository_id = trans.security.encode_id( repository.id ) + try: + invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_installed_repository( trans, repository_id ) + if invalid_file_tups: + message = tool_util.generate_message_for_invalid_tools( trans, + invalid_file_tups, + repository, + None, + as_html=False ) results[ 'unsuccessful_count' ] += 1 - results[ 'repository_status' ].append( message ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Galaxy tool_shed_repositories API in reset_metadata_on_installed_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + message = "Successfully reset metadata on repository %s owned by %s" % \ + ( str( repository.name ), str( repository.owner ) ) + results[ 'successful_count' ] += 1 + except Exception, e: + message = "Error resetting metadata on repository %s owned by %s: %s" % \ + ( str( repository.name ), str( repository.owner ), str( e ) ) + results[ 'unsuccessful_count' ] += 1 + results[ 'repository_status' ].append( message ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api def show( self, trans, id, **kwd ): @@ -545,15 +534,12 @@ :param id: the encoded id of the ToolShedRepository object """ # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e - try: - tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) - tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) ) - tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', - action='show', - id=trans.security.encode_id( tool_shed_repository.id ) ) - return tool_shed_repository_dict - except Exception, e: - message = "Error in tool_shed_repositories API in index: " + str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id ) + if tool_shed_repository is None: + log.debug( "Unable to locate tool_shed_repository record for id %s." % ( str( id ) ) ) + return {} + tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=self.__get_value_mapper( trans ) ) + tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories', + action='show', + id=trans.security.encode_id( tool_shed_repository.id ) ) + return tool_shed_repository_dict diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/api/repositories.py --- a/lib/galaxy/webapps/tool_shed/api/repositories.py +++ b/lib/galaxy/webapps/tool_shed/api/repositories.py @@ -36,22 +36,27 @@ :param name: the name of the Repository :param owner: the owner of the Repository - Returns the ordered list of changeset revision hash strings that are associated with installable revisions. As in the changelog, the - list is ordered oldest to newest. + Returns the ordered list of changeset revision hash strings that are associated with installable revisions. + As in the changelog, the list is ordered oldest to newest. """ # Example URL: http://localhost:9009/api/repositories/get_installable_revisions?name=add_column&owner=test - try: + if name and owner: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "cannot locate repository %s owned by %s." % ( str( name ), str( owner ) ) + log.debug( error_message ) + return [] repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True ) return ordered_installable_revisions - except Exception, e: - message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "invalid name %s or owner %s received." % ( str( name ), str( owner ) ) + log.debug( error_message ) + return [] @web.expose_api_anonymous def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ): @@ -106,49 +111,65 @@ ] } """ - repository_value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } - # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info?name=add_column&owner=test&changeset_revision=3a08cc21466f - try: + # Example URL: + # http://<xyz>/api/repositories/get_repository_revision_install_info?name=<n>&owner=<o>&changeset_revision=<cr> + if name and owner and changeset_revision: # Get the repository information. repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + log.debug( 'Cannot locate repository %s owned by %s' % ( str( name ), str( owner ) ) ) + return {}, {}, {} encoded_repository_id = trans.security.encode_id( repository.id ) - repository_dict = repository.to_dict( view='element', value_mapper=repository_value_mapper ) + repository_dict = repository.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) repository_dict[ 'url' ] = web.url_for( controller='repositories', action='show', id=encoded_repository_id ) # Get the repository_metadata information. - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + encoded_repository_id, + changeset_revision ) if not repository_metadata: - # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the - # changeset_revision to which we need to update. + # The changeset_revision column in the repository_metadata table has been updated with a new + # value value, so find the changeset_revision to which we need to update. repo_dir = repository.repo_path( trans.app ) repo = hg.repository( suc.get_configured_ui(), repo_dir ) new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision ) - repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision ) + repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, + encoded_repository_id, + new_changeset_revision ) changeset_revision = new_changeset_revision if repository_metadata: encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id ) repository_metadata_dict = repository_metadata.to_dict( view='collection', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) + value_mapper=self.__get_value_mapper( trans ) ) repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', action='show', id=encoded_repository_metadata_id ) # Get the repo_info_dict for installing the repository. - repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, \ - has_repository_dependencies, has_repository_dependencies_only_if_compiling_contained_td = \ + repo_info_dict, \ + includes_tools, \ + includes_tool_dependencies, \ + includes_tools_for_display_in_tool_panel, \ + has_repository_dependencies, \ + has_repository_dependencies_only_if_compiling_contained_td = \ repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision ) return repository_dict, repository_metadata_dict, repo_info_dict else: - message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision ) - log.error( message, exc_info=True ) - trans.response.status = 500 + log.debug( "Unable to locate repository_metadata record for repository id %s and changeset_revision %s" % \ + ( str( repository.id ), str( changeset_revision ) ) ) return repository_dict, {}, {} - except Exception, e: - message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + else: + debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " + debug_msg += "Invalid name %s or owner %s or changeset_revision %s received." % \ + ( str( name ), str( owner ), str( changeset_revision ) ) + log.debug( debug_msg ) + return {}, {}, {} + + def __get_value_mapper( self, trans ): + value_mapper = { 'id' : trans.security.encode_id, + 'repository_id' : trans.security.encode_id } + return value_mapper @web.expose_api def import_capsule( self, trans, payload, **kwd ): @@ -177,29 +198,27 @@ uploaded_file=None, capsule_file_name=None ) if os.path.getsize( os.path.abspath( capsule_file_name ) ) == 0: - message = 'Your capsule file is empty.' - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'Your capsule file %s is empty.' % str( capsule_file_name ) ) + return {} try: # Open for reading with transparent compression. tar_archive = tarfile.open( capsule_file_path, 'r:*' ) except tarfile.ReadError, e: - message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'Error opening capsule file %s: %s' % ( str( capsule_file_name ), str( e ) ) ) + return {} capsule_dict[ 'tar_archive' ] = tar_archive capsule_dict[ 'capsule_file_name' ] = capsule_file_name capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict ) capsule_dict = import_util.validate_capsule( trans, **capsule_dict ) status = capsule_dict.get( 'status', 'error' ) if status == 'error': - message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + log.debug( 'The capsule contents are invalid and cannpt be imported:<br/>%s' % \ + str( capsule_dict.get( 'error_message', '' ) ) ) + return {} encoded_file_path = capsule_dict.get( 'encoded_file_path', None ) + if encoded_file_path is None: + log.debug( 'The capsule_dict %s is missing the required encoded_file_path entry.' % str( capsule_dict ) ) + return {} file_path = encoding_util.tool_shed_decode( encoded_file_path ) export_info_file_path = os.path.join( file_path, 'export_info.xml' ) export_info_dict = import_util.get_export_info_dict( export_info_file_path ) @@ -216,12 +235,14 @@ # Add the capsule_file_name and encoded_file_path to the repository_status_info_dict. repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path - import_results_tups = repository_maintenance_util.create_repository_and_import_archive( trans, - repository_status_info_dict, - import_results_tups ) + import_results_tups = \ + repository_maintenance_util.create_repository_and_import_archive( trans, + repository_status_info_dict, + import_results_tups ) import_util.check_status_and_reset_downloadable( trans, import_results_tups ) suc.remove_dir( file_path ) - # NOTE: the order of installation is defined in import_results_tups, but order will be lost when transferred to return_dict. + # NOTE: the order of installation is defined in import_results_tups, but order will be lost + # when transferred to return_dict. return_dict = {} for import_results_tup in import_results_tups: ok, name_owner, message = import_results_tup @@ -237,28 +258,19 @@ GET /api/repositories Displays a collection (list) of repositories. """ - value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } # Example URL: http://localhost:9009/api/repositories repository_dicts = [] - deleted = util.string_as_bool( deleted ) - try: - query = trans.sa_session.query( trans.app.model.Repository ) \ - .filter( trans.app.model.Repository.table.c.deleted == deleted ) \ - .order_by( trans.app.model.Repository.table.c.name ) \ - .all() - for repository in query: - repository_dict = repository.to_dict( view='collection', value_mapper=value_mapper ) - repository_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=trans.security.encode_id( repository.id ) ) - repository_dicts.append( repository_dict ) - return repository_dicts - except Exception, e: - message = "Error in the Tool Shed repositories API in index: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + deleted = util.asbool( deleted ) + for repository in trans.sa_session.query( trans.app.model.Repository ) \ + .filter( trans.app.model.Repository.table.c.deleted == deleted ) \ + .order_by( trans.app.model.Repository.table.c.name ): + repository_dict = repository.to_dict( view='collection', + value_mapper=self.__get_value_mapper( trans ) ) + repository_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=trans.security.encode_id( repository.id ) ) + repository_dicts.append( repository_dict ) + return repository_dicts @web.expose_api def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ): @@ -273,28 +285,22 @@ in addition to those repositories of type tool_dependency_definition. This param is ignored if the current user is not an admin user, in which case this same restriction is automatic. """ - try: - if trans.user_is_admin(): - my_writable = util.asbool( my_writable ) - else: - my_writable = True - handled_repository_ids = [] - repository_ids = [] - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) - # Make sure repositories of type tool_dependency_definition are first in the list. - for repository in query: - if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - repository_ids.append( trans.security.encode_id( repository.id ) ) - # Now add all remaining repositories to the list. - for repository in query: - if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - repository_ids.append( trans.security.encode_id( repository.id ) ) - return repository_ids - except Exception, e: - message = "Error in the Tool Shed repositories API in repository_ids_for_setting_metadata: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + if trans.user_is_admin(): + my_writable = util.asbool( my_writable ) + else: + my_writable = True + handled_repository_ids = [] + repository_ids = [] + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) + # Make sure repositories of type tool_dependency_definition are first in the list. + for repository in query: + if repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + repository_ids.append( trans.security.encode_id( repository.id ) ) + # Now add all remaining repositories to the list. + for repository in query: + if repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + repository_ids.append( trans.security.encode_id( repository.id ) ) + return repository_ids @web.expose_api def reset_metadata_on_repositories( self, trans, payload, **kwd ): @@ -318,6 +324,7 @@ :param skip_file (optional): A local file name that contains the encoded repository ids associated with repositories to skip. This param can be used as an alternative to the above encoded_ids_to_skip. """ + def handle_repository( trans, repository, results ): log.debug( "Resetting metadata on repository %s" % str( repository.name ) ) repository_id = trans.security.encode_id( repository.id ) @@ -335,53 +342,48 @@ status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results - try: - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results = dict( start_time=start_time, - repository_status=[], - successful_count=0, - unsuccessful_count=0 ) - handled_repository_ids = [] - encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] ) - skip_file = payload.get( 'skip_file', None ) - if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip: - # Load the list of encoded_ids_to_skip from the skip_file. - # Contents of file must be 1 encoded repository id per line. - lines = open( skip_file, 'rb' ).readlines() - for line in lines: - if line.startswith( '#' ): - # Skip comments. - continue - encoded_ids_to_skip.append( line.rstrip( '\n' ) ) - if trans.user_is_admin(): - my_writable = util.asbool( payload.get( 'my_writable', False ) ) - else: - my_writable = True - query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) - # First reset metadata on all repositories of type repository_dependency_definition. - for repository in query: - encoded_id = trans.security.encode_id( repository.id ) - if encoded_id in encoded_ids_to_skip: - log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ - ( str( repository.id ), str( encoded_ids_to_skip ) ) ) - elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - results = handle_repository( trans, repository, results ) - # Now reset metadata on all remaining repositories. - for repository in query: - encoded_id = trans.security.encode_id( repository.id ) - if encoded_id in encoded_ids_to_skip: - log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ - ( str( repository.id ), str( encoded_ids_to_skip ) ) ) - elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: - results = handle_repository( trans, repository, results ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results = dict( start_time=start_time, + repository_status=[], + successful_count=0, + unsuccessful_count=0 ) + handled_repository_ids = [] + encoded_ids_to_skip = payload.get( 'encoded_ids_to_skip', [] ) + skip_file = payload.get( 'skip_file', None ) + if skip_file and os.path.exists( skip_file ) and not encoded_ids_to_skip: + # Load the list of encoded_ids_to_skip from the skip_file. + # Contents of file must be 1 encoded repository id per line. + lines = open( skip_file, 'rb' ).readlines() + for line in lines: + if line.startswith( '#' ): + # Skip comments. + continue + encoded_ids_to_skip.append( line.rstrip( '\n' ) ) + if trans.user_is_admin(): + my_writable = util.asbool( payload.get( 'my_writable', False ) ) + else: + my_writable = True + query = suc.get_query_for_setting_metadata_on_repositories( trans, my_writable=my_writable, order=False ) + # First reset metadata on all repositories of type repository_dependency_definition. + for repository in query: + encoded_id = trans.security.encode_id( repository.id ) + if encoded_id in encoded_ids_to_skip: + log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ + ( str( repository.id ), str( encoded_ids_to_skip ) ) ) + elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + results = handle_repository( trans, repository, results ) + # Now reset metadata on all remaining repositories. + for repository in query: + encoded_id = trans.security.encode_id( repository.id ) + if encoded_id in encoded_ids_to_skip: + log.debug( "Skipping repository with id %s because it is in encoded_ids_to_skip %s" % \ + ( str( repository.id ), str( encoded_ids_to_skip ) ) ) + elif repository.type != rt_util.TOOL_DEPENDENCY_DEFINITION and repository.id not in handled_repository_ids: + results = handle_repository( trans, repository, results ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api def reset_metadata_on_repository( self, trans, payload, **kwd ): @@ -395,6 +397,7 @@ The following parameters must be included in the payload. :param repository_id: the encoded id of the repository on which metadata is to be reset. """ + def handle_repository( trans, start_time, repository ): results = dict( start_time=start_time, repository_status=[] ) @@ -410,21 +413,16 @@ status = '%s : %s' % ( str( repository.name ), message ) results[ 'repository_status' ].append( status ) return results - try: - repository_id = payload.get( 'repository_id', None ) - if repository_id is not None: - repository = suc.get_repository_in_tool_shed( trans, repository_id ) - start_time = strftime( "%Y-%m-%d %H:%M:%S" ) - log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) ) - results = handle_repository( trans, start_time, repository ) - stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) - results[ 'stop_time' ] = stop_time - return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) - except Exception, e: - message = "Error in the Tool Shed repositories API in reset_metadata_on_repositories: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + + repository_id = payload.get( 'repository_id', None ) + if repository_id is not None: + repository = suc.get_repository_in_tool_shed( trans, repository_id ) + start_time = strftime( "%Y-%m-%d %H:%M:%S" ) + log.debug( "%s...resetting metadata on repository %s" % ( start_time, str( repository.name ) ) ) + results = handle_repository( trans, start_time, repository ) + stop_time = strftime( "%Y-%m-%d %H:%M:%S" ) + results[ 'stop_time' ] = stop_time + return json.to_json_string( results, sort_keys=True, indent=4 * ' ' ) @web.expose_api_anonymous def show( self, trans, id, **kwd ): @@ -434,27 +432,14 @@ :param id: the encoded id of the Repository object """ - value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 - try: - repository = suc.get_repository_in_tool_shed( trans, id ) - repository_dict = repository.to_dict( view='element', value_mapper=value_mapper ) - repository_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=trans.security.encode_id( repository.id ) ) - return repository_dict - except Exception, e: - message = "Error in the Tool Shed repositories API in show: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message - - def __get_value_mapper( self, trans, repository_metadata ): - value_mapper = { 'id' : trans.security.encode_id, - 'repository_id' : trans.security.encode_id } - if repository_metadata.time_last_tested is not None: - # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires - # a function rather than a mapped value, so just pass the time_ago function here. - value_mapper[ 'time_last_tested' ] = time_ago - return value_mapper + repository = suc.get_repository_in_tool_shed( trans, id ) + if repository is None: + log.debug( "Unable to locate repository record for id %s." % ( str( id ) ) ) + return {} + repository_dict = repository.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + repository_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=trans.security.encode_id( repository.id ) ) + return repository_dict diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/api/repository_revisions.py --- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py +++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py @@ -1,6 +1,5 @@ import datetime import logging -from galaxy.web.framework.helpers import time_ago from tool_shed.util import metadata_util from galaxy import web from galaxy import util @@ -48,61 +47,29 @@ if not changeset_revision: raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." ) export_repository_dependencies = payload.get( 'export_repository_dependencies', False ) - try: - # We'll currently support only gzip-compressed tar archives. - file_type = 'gz' - export_repository_dependencies = util.string_as_bool( export_repository_dependencies ) - # Get the repository information. - repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repository_id = trans.security.encode_id( repository.id ) - response = export_util.export_repository( trans, - tool_shed_url, - repository_id, - str( repository.name ), - changeset_revision, - file_type, - export_repository_dependencies, - api=True ) - return response - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in export: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + # We'll currently support only gzip-compressed tar archives. + file_type = 'gz' + export_repository_dependencies = util.asbool( export_repository_dependencies ) + # Get the repository information. + repository = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository is None: + error_message = 'Cannot locate repository with name %s and owner %s,' % ( str( name ), str( owner ) ) + log.debug( error_message ) + return None, error_message + repository_id = trans.security.encode_id( repository.id ) + return export_util.export_repository( trans, + tool_shed_url, + repository_id, + str( repository.name ), + changeset_revision, + file_type, + export_repository_dependencies, + api=True ) - @web.expose_api_anonymous - def repository_dependencies( self, trans, id, **kwd ): - """ - GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies - Displays information about a repository_metadata record in the Tool Shed. - - :param id: the encoded id of the `RepositoryMetadata` object - """ - # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb125... + def __get_value_mapper( self, trans ): value_mapper = { 'id' : trans.security.encode_id, - 'user_id' : trans.security.encode_id } - repository_dependencies_dicts = [] - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) - metadata = repository_metadata.metadata - if metadata and 'repository_dependencies' in metadata: - rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] - for rd_tup in rd_tups: - tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ] - repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner ) - repository_dependency_dict = repository_dependency.to_dict( view='element', value_mapper=value_mapper ) - # We have to add the changeset_revision of of the repository dependency. - repository_dependency_dict[ 'changeset_revision' ] = changeset_revision - repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories', - action='show', - id=trans.security.encode_id( repository_dependency.id ) ) - repository_dependencies_dicts.append( repository_dependency_dict ) - return repository_dependencies_dicts - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in repository_dependencies: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + 'repository_id' : trans.security.encode_id } + return value_mapper @web.expose_api_anonymous def index( self, trans, **kwd ): @@ -117,59 +84,94 @@ # Filter by downloadable if received. downloadable = kwd.get( 'downloadable', None ) if downloadable is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.string_as_bool( downloadable ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.downloadable == util.asbool( downloadable ) ) # Filter by malicious if received. malicious = kwd.get( 'malicious', None ) if malicious is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.string_as_bool( malicious ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.malicious == util.asbool( malicious ) ) # Filter by tools_functionally_correct if received. tools_functionally_correct = kwd.get( 'tools_functionally_correct', None ) if tools_functionally_correct is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.string_as_bool( tools_functionally_correct ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.tools_functionally_correct == util.asbool( tools_functionally_correct ) ) # Filter by missing_test_components if received. missing_test_components = kwd.get( 'missing_test_components', None ) if missing_test_components is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.string_as_bool( missing_test_components ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.missing_test_components == util.asbool( missing_test_components ) ) # Filter by do_not_test if received. do_not_test = kwd.get( 'do_not_test', None ) if do_not_test is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.string_as_bool( do_not_test ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.do_not_test == util.asbool( do_not_test ) ) # Filter by includes_tools if received. includes_tools = kwd.get( 'includes_tools', None ) if includes_tools is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.string_as_bool( includes_tools ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.includes_tools == util.asbool( includes_tools ) ) # Filter by test_install_error if received. test_install_error = kwd.get( 'test_install_error', None ) if test_install_error is not None: - clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.string_as_bool( test_install_error ) ) + clause_list.append( trans.model.RepositoryMetadata.table.c.test_install_error == util.asbool( test_install_error ) ) # Filter by skip_tool_test if received. skip_tool_test = kwd.get( 'skip_tool_test', None ) if skip_tool_test is not None: - skip_tool_test = util.string_as_bool( skip_tool_test ) + skip_tool_test = util.asbool( skip_tool_test ) skipped_metadata_ids_subquery = select( [ trans.app.model.SkipToolTest.table.c.repository_metadata_id ] ) if skip_tool_test: clause_list.append( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) else: clause_list.append( not_( trans.model.RepositoryMetadata.id.in_( skipped_metadata_ids_subquery ) ) ) - # Generate and execute the query. - try: - query = trans.sa_session.query( trans.app.model.RepositoryMetadata ) \ - .filter( and_( *clause_list ) ) \ - .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ) \ - .all() - for repository_metadata in query: - repository_metadata_dict = repository_metadata.to_dict( view='collection', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) - repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', - action='show', - id=trans.security.encode_id( repository_metadata.id ) ) - repository_metadata_dicts.append( repository_metadata_dict ) - return repository_metadata_dicts - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in index: " + str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + for repository_metadata in trans.sa_session.query( trans.app.model.RepositoryMetadata ) \ + .filter( and_( *clause_list ) ) \ + .order_by( trans.app.model.RepositoryMetadata.table.c.repository_id.desc() ): + repository_metadata_dict = repository_metadata.to_dict( view='collection', + value_mapper=self.__get_value_mapper( trans ) ) + repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', + action='show', + id=trans.security.encode_id( repository_metadata.id ) ) + repository_metadata_dicts.append( repository_metadata_dict ) + return repository_metadata_dicts + + @web.expose_api_anonymous + def repository_dependencies( self, trans, id, **kwd ): + """ + GET /api/repository_revisions/{encoded repository_metadata id}/repository_dependencies + Displays information about a repository_metadata record in the Tool Shed. + + :param id: the encoded id of the `RepositoryMetadata` object + """ + # Example URL: http://localhost:9009/api/repository_revisions/repository_dependencies/bb125... + repository_dependencies_dicts = [] + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) + if repository_metadata is None: + log.debug( 'Invalid repository_metadata id received: %s' % str( id ) ) + return repository_dependencies_dicts + metadata = repository_metadata.metadata + if metadata is None: + log.debug( 'The repository_metadata record with id %s has no metadata.' % str ( id ) ) + return repository_dependencies_dicts + if 'repository_dependencies' in metadata: + rd_tups = metadata[ 'repository_dependencies' ][ 'repository_dependencies' ] + for rd_tup in rd_tups: + tool_shed, name, owner, changeset_revision = rd_tup[ 0:4 ] + repository_dependency = suc.get_repository_by_name_and_owner( trans.app, name, owner ) + if repository_dependency is None: + log.dbug( 'Cannot locate repository dependency %s owned by %s.' % ( name, owner ) ) + continue + repository_dependency_id = trans.security.encode_id( repository_dependency.id ) + repository_dependency_repository_metadata = \ + suc.get_repository_metadata_by_changeset_revision( trans, repository_dependency_id, changeset_revision ) + if repository_dependency_repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s for repository dependency %s owned by %s.' % \ + ( str( repository_dependency_id ), str( name ), str( owner ) ) ) + continue + repository_dependency_repository_metadata_id = trans.security.encode_id( repository_dependency_repository_metadata.id ) + repository_dependency_dict = repository_dependency.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + # We have to add the changeset_revision of of the repository dependency. + repository_dependency_dict[ 'changeset_revision' ] = changeset_revision + repository_dependency_dict[ 'url' ] = web.url_for( controller='repositories', + action='show', + id=repository_dependency_repository_metadata_id ) + repository_dependencies_dicts.append( repository_dependency_dict ) + return repository_dependencies_dicts @web.expose_api_anonymous def show( self, trans, id, **kwd ): @@ -180,19 +182,16 @@ :param id: the encoded id of the `RepositoryMetadata` object """ # Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620 - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) - repository_metadata_dict = repository_metadata.to_dict( view='element', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) - repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', - action='show', - id=trans.security.encode_id( repository_metadata.id ) ) - return repository_metadata_dict - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id ) + if repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s' % str( id ) ) + return {} + repository_metadata_dict = repository_metadata.to_dict( view='element', + value_mapper=self.__get_value_mapper( trans ) ) + repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', + action='show', + id=trans.security.encode_id( repository_metadata.id ) ) + return repository_metadata_dict @web.expose_api def update( self, trans, payload, **kwd ): @@ -201,41 +200,32 @@ Updates the value of specified columns of the repository_metadata table based on the key / value pairs in payload. """ repository_metadata_id = kwd.get( 'id', None ) - try: - repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) - flush_needed = False - for key, new_value in payload.items(): - if key == 'time_last_tested': - repository_metadata.time_last_tested = datetime.datetime.utcnow() - flush_needed = True - elif hasattr( repository_metadata, key ): - # log information when setting attributes associated with the Tool Shed's install and test framework. - if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error', - 'tools_functionally_correct' ]: - log.debug( 'Setting repository_metadata table column %s to value %s for changeset_revision %s via the Tool Shed API.' % \ - ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) ) - setattr( repository_metadata, key, new_value ) - flush_needed = True - if flush_needed: - trans.sa_session.add( repository_metadata ) - trans.sa_session.flush() - except Exception, e: - message = "Error in the Tool Shed repository_revisions API in update: %s" % str( e ) - log.error( message, exc_info=True ) - trans.response.status = 500 - return message + if repository_metadata_id is None: + raise HTTPBadRequest( detail="Missing required parameter 'id'." ) + repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id ) + if repository_metadata is None: + log.debug( 'Cannot locate repository_metadata with id %s' % str( repository_metadata_id ) ) + return {} + flush_needed = False + for key, new_value in payload.items(): + if key == 'time_last_tested': + repository_metadata.time_last_tested = datetime.datetime.utcnow() + flush_needed = True + elif hasattr( repository_metadata, key ): + # log information when setting attributes associated with the Tool Shed's install and test framework. + if key in [ 'do_not_test', 'includes_tools', 'missing_test_components', 'test_install_error', + 'tools_functionally_correct' ]: + log.debug( 'Setting repository_metadata column %s to value %s for changeset_revision %s via the Tool Shed API.' % \ + ( str( key ), str( new_value ), str( repository_metadata.changeset_revision ) ) ) + setattr( repository_metadata, key, new_value ) + flush_needed = True + if flush_needed: + trans.sa_session.add( repository_metadata ) + trans.sa_session.flush() + trans.sa_session.refresh( repository_metadata ) repository_metadata_dict = repository_metadata.to_dict( view='element', - value_mapper=self.__get_value_mapper( trans, repository_metadata ) ) + value_mapper=self.__get_value_mapper( trans ) ) repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions', action='show', id=trans.security.encode_id( repository_metadata.id ) ) return repository_metadata_dict - - def __get_value_mapper( self, trans, repository_metadata ): - value_mapper = { 'id' : trans.security.encode_id, - 'repository_id' : trans.security.encode_id } - if repository_metadata.time_last_tested is not None: - # For some reason the Dictifiable.to_dict() method in ~/galaxy/model/item_attrs.py requires - # a function rather than a mapped value, so just pass the time_ago function here. - value_mapper[ 'time_last_tested' ] = time_ago - return value_mapper diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/galaxy/webapps/tool_shed/controllers/hg.py --- a/lib/galaxy/webapps/tool_shed/controllers/hg.py +++ b/lib/galaxy/webapps/tool_shed/controllers/hg.py @@ -2,6 +2,7 @@ from galaxy import web from galaxy.web.base.controller import BaseUIController from tool_shed.util.shed_util_common import get_repository_by_name_and_owner +from tool_shed.util.shed_util_common import update_repository from tool_shed.util.metadata_util import set_repository_metadata from galaxy import eggs @@ -9,6 +10,8 @@ import mercurial.__version__ from mercurial.hgweb.hgwebdir_mod import hgwebdir from mercurial.hgweb.request import wsgiapplication +from mercurial import hg +from mercurial import ui log = logging.getLogger(__name__) @@ -36,6 +39,11 @@ repository = get_repository_by_name_and_owner( trans.app, name, owner ) if repository: if hg_version >= '2.2.3': + # Update the repository on disk to the tip revision, because the web upload form uses the on-disk working + # directory. If the repository is not updated on disk, pushing from the command line and then uploading + # via the web interface will result in a new head being created. + repo = hg.repository( ui.ui(), repository.repo_path( trans.app ) ) + update_repository( repo, ctx_rev=None ) # Set metadata using the repository files on disk. error_message, status = set_repository_metadata( trans, repository ) if status == 'ok' and error_message: diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/tool_shed/galaxy_install/install_manager.py --- a/lib/tool_shed/galaxy_install/install_manager.py +++ b/lib/tool_shed/galaxy_install/install_manager.py @@ -567,14 +567,17 @@ def order_repositories_for_installation( self, tool_shed_repositories, repository_dependencies_dict ): """ - Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will - inspect the list of repositories about to be installed and make sure to order them appropriately. For each repository about to be installed, - if required repositories are not contained in the list of repositories about to be installed, then they are not considered. Repository - dependency definitions that contain circular dependencies should not result in an infinite loop, but obviously prior installation will not be - handled for one or more of the repositories that require prior installation. This process is similar to the process used when installing tool - shed repositories (i.e., the order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not handle - managing tool panel sections and other components since repository dependency definitions contained in tool shed repositories with migrated - tools must never define a relationship to a repository dependency that contains a tool. + Some repositories may have repository dependencies that are required to be installed before the dependent + repository. This method will inspect the list of repositories about to be installed and make sure to order + them appropriately. For each repository about to be installed, if required repositories are not contained + in the list of repositories about to be installed, then they are not considered. Repository dependency + definitions that contain circular dependencies should not result in an infinite loop, but obviously prior + installation will not be handled for one or more of the repositories that require prior installation. This + process is similar to the process used when installing tool shed repositories (i.e., the + order_components_for_installation() method in ~/lib/tool_shed/galaxy_install/repository_util), but does not + handle managing tool panel sections and other components since repository dependency definitions contained + in tool shed repositories with migrated tools must never define a relationship to a repository dependency + that contains a tool. """ ordered_tool_shed_repositories = [] ordered_tsr_ids = [] diff -r 995ca770764013bcf0c61092de8436c9278357f7 -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -8,28 +8,18 @@ new_path.extend( sys.path[ 1: ] ) sys.path = new_path +import tool_shed.util.shed_util_common as suc + from galaxy import eggs import pkg_resources -pkg_resources.require( "pycrypto" ) -from Crypto.Cipher import Blowfish -from Crypto.Util.randpool import RandomPool -from Crypto.Util import number - -def encode_id( config_id_secret, obj_id ): - # Utility method to encode ID's - id_cipher = Blowfish.new( config_id_secret ) - # Convert to string - s = str( obj_id ) - # Pad to a multiple of 8 with leading "!" - s = ( "!" * ( 8 - len(s) % 8 ) ) + s - # Encrypt - return id_cipher.encrypt( s ).encode( 'hex' ) - def delete( api_key, url, data, return_formatted=True ): - # Sends an API DELETE request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API DELETE request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: - url = make_url( api_key, url ) + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'DELETE' r = json.loads( urllib2.urlopen( req ).read() ) @@ -47,12 +37,13 @@ print r def display( url, api_key=None, return_formatted=True ): - # Sends an API GET request and acts as a generic formatter for the JSON response. + """Sends an API GET request and acts as a generic formatter for the JSON response.""" try: r = get( url, api_key=api_key ) except urllib2.HTTPError, e: print e - print e.read( 1024 ) # Only return the first 1K of errors. + # Only return the first 1K of errors. + print e.read( 1024 ) sys.exit( 1 ) if type( r ) == unicode: print 'error: %s' % r @@ -84,16 +75,94 @@ print 'response is unknown type: %s' % type( r ) def get( url, api_key=None ): - # Do the actual GET. - url = make_url( url, api_key=api_key ) + """Do the GET.""" + url = make_url( url, api_key=api_key, args=None ) try: return json.loads( urllib2.urlopen( url ).read() ) except ValueError, e: print "URL did not return JSON data" - sys.exit(1) + sys.exit( 1 ) + +def get_api_url( base, parts=[], params=None ): + """Compose and return a URL for the Tool Shed API.""" + if 'api' in parts and parts.index( 'api' ) != 0: + parts.pop( parts.index( 'api' ) ) + parts.insert( 0, 'api' ) + elif 'api' not in parts: + parts.insert( 0, 'api' ) + url = suc.url_join( base, *parts ) + if params is not None: + try: + query_string = urllib.urlencode( params ) + except Exception, e: + # The value of params must be a string. + query_string = params + url += '?%s' % query_string + return url + +def get_latest_downloadable_changeset_revision_via_api( url, name, owner ): + """ + Return the latest downloadable changeset revision for the repository defined by the received + name and owner. + """ + error_message = '' + parts = [ 'api', 'repositories', 'get_ordered_installable_revisions' ] + params = dict( name=name, owner=owner ) + api_url = get_api_url( base=url, parts=parts, params=params ) + changeset_revisions, error_message = json_from_url( api_url ) + if changeset_revisions is None or error_message: + return None, error_message + if len( changeset_revisions ) >= 1: + return changeset_revisions[ -1 ], error_message + return suc.INITIAL_CHANGELOG_HASH, error_message + +def get_repository_dict( url, repository_dict ): + """ + Send a request to the Tool Shed to get additional information about the repository defined + by the received repository_dict. Add the information to the repository_dict and return it. + """ + error_message = '' + if not isinstance( repository_dict, dict ): + error_message = 'Invalid repository_dict received: %s' % str( repository_dict ) + return None, error_message + repository_id = repository_dict.get( 'repository_id', None ) + if repository_id is None: + error_message = 'Invalid repository_dict does not contain a repository_id entry: %s' % str( repository_dict ) + return None, error_message + parts = [ 'api', 'repositories', repository_id ] + api_url = get_api_url( base=url, parts=parts ) + extended_dict, error_message = json_from_url( api_url ) + if extended_dict is None or error_message: + return None, error_message + name = extended_dict.get( 'name', None ) + owner = extended_dict.get( 'owner', None ) + if name is not None and owner is not None: + name = str( name ) + owner = str( owner ) + latest_changeset_revision, error_message = get_latest_downloadable_changeset_revision_via_api( url, name, owner ) + if latest_changeset_revision is None or error_message: + return None, error_message + extended_dict[ 'latest_revision' ] = str( latest_changeset_revision ) + return extended_dict, error_message + else: + error_message = 'Invalid extended_dict does not contain name or woner entries: %s' % str( extended_dict ) + return None, error_message + +def json_from_url( url ): + """Send a request to the Tool Shed via the Tool Shed API and handle the response.""" + error_message = '' + url_handle = urllib.urlopen( url ) + url_contents = url_handle.read() + try: + parsed_json = simplejson.loads( url_contents ) + except Exception, e: + error_message = str( url_contents ) + print 'Error parsing JSON data in json_from_url(): ', str( e ) + return None, error_message + return parsed_json, error_message def make_url( url, api_key=None, args=None ): - # Adds the API Key to the URL if it's not already there. + """Adds the API Key to the URL if it's not already there.""" if args is None: args = [] argsep = '&' @@ -105,20 +174,23 @@ return url + argsep + '&'.join( [ '='.join( t ) for t in args ] ) def post( url, data, api_key=None ): - # Do the actual POST. - url = make_url( url, api_key=api_key ) + """Do the POST.""" + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data ) ) return json.loads( urllib2.urlopen( req ).read() ) def put( url, data, api_key=None ): - # Do the actual PUT. - url = make_url( url, api_key=api_key ) + """Do the PUT.""" + url = make_url( url, api_key=api_key, args=None ) req = urllib2.Request( url, headers = { 'Content-Type': 'application/json' }, data = json.dumps( data )) req.get_method = lambda: 'PUT' return json.loads( urllib2.urlopen( req ).read() ) def submit( url, data, api_key=None, return_formatted=True ): - # Sends an API POST request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API POST request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: r = post( url, data, api_key=api_key ) except urllib2.HTTPError, e: @@ -133,7 +205,8 @@ print 'Response' print '--------' if type( r ) == list: - # Currently the only implemented responses are lists of dicts, because submission creates some number of collection elements. + # Currently the only implemented responses are lists of dicts, because submission creates + # some number of collection elements. for i in r: if type( i ) == dict: if 'url' in i: @@ -150,9 +223,12 @@ print r def update( api_key, url, data, return_formatted=True ): - # Sends an API PUT request and acts as a generic formatter for the JSON response - 'data' will become the JSON payload read by Galaxy. + """ + Sends an API PUT request and acts as a generic formatter for the JSON response. The + 'data' will become the JSON payload read by the Tool Shed. + """ try: - r = put( api_key, url, data ) + r = put( url, data, api_key=api_key ) except urllib2.HTTPError, e: if return_formatted: print e This diff is so big that we needed to truncate the remainder. https://bitbucket.org/galaxy/galaxy-central/commits/3dde0f4765fb/ Changeset: 3dde0f4765fb User: dannon Date: 2014-01-10 13:42:58 Summary: One more simplejson/json fix that slipped through the merge. Affected #: 1 file diff -r 2483cc9e597d37ab75c5bb46d467622f33b48bc7 -r 3dde0f4765fbff33789badeaa17503349559ff72 lib/tool_shed/scripts/api/common.py --- a/lib/tool_shed/scripts/api/common.py +++ b/lib/tool_shed/scripts/api/common.py @@ -154,7 +154,7 @@ url_handle = urllib.urlopen( url ) url_contents = url_handle.read() try: - parsed_json = simplejson.loads( url_contents ) + parsed_json = json.loads( url_contents ) except Exception, e: error_message = str( url_contents ) print 'Error parsing JSON data in json_from_url(): ', str( e ) Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.
participants (1)
-
commits-noreply@bitbucket.org