8 new commits in galaxy-central: https://bitbucket.org/galaxy/galaxy-central/commits/ce088c6f1fd4/ Changeset: ce088c6f1fd4 User: dannon Date: 2014-09-08 22:41:53 Summary: First pass removing to_json_string/from_json_string. Affected #: 107 files diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py --- a/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py +++ b/lib/galaxy/datatypes/converters/bedgraph_to_array_tree_converter.py @@ -52,4 +52,4 @@ FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ) ) if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/fasta_to_len.py --- a/lib/galaxy/datatypes/converters/fasta_to_len.py +++ b/lib/galaxy/datatypes/converters/fasta_to_len.py @@ -49,4 +49,4 @@ out.close() if __name__ == "__main__" : - compute_fasta_length( sys.argv[1], sys.argv[2], sys.argv[3], True ) \ No newline at end of file + compute_fasta_length( sys.argv[1], sys.argv[2], sys.argv[3], True ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py --- a/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py +++ b/lib/galaxy/datatypes/converters/fasta_to_tabular_converter.py @@ -40,4 +40,4 @@ print >> out, "%s\t%s" %( fasta_title, sequence ) out.close() -if __name__ == "__main__" : __main__() \ No newline at end of file +if __name__ == "__main__" : __main__() diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py --- a/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py +++ b/lib/galaxy/datatypes/converters/fastqsolexa_to_fasta_converter.py @@ -52,4 +52,4 @@ outfile.close() -if __name__ == "__main__": __main__() \ No newline at end of file +if __name__ == "__main__": __main__() diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/interval_to_coverage.py --- a/lib/galaxy/datatypes/converters/interval_to_coverage.py +++ b/lib/galaxy/datatypes/converters/interval_to_coverage.py @@ -149,4 +149,4 @@ fix_strand=True ) main( interval, coverage ) temp_file.close() - coverage.close() \ No newline at end of file + coverage.close() diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py --- a/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py +++ b/lib/galaxy/datatypes/converters/wiggle_to_array_tree_converter.py @@ -26,4 +26,4 @@ FileArrayTreeDict.dict_to_file( d, open( out_fname, "w" ) ) if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/tabular.py --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -14,7 +14,7 @@ from galaxy.datatypes.checkers import is_gzip from galaxy.datatypes.metadata import MetadataElement from galaxy.datatypes.sniff import get_headers, get_test_fname -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps import dataproviders log = logging.getLogger(__name__) @@ -272,7 +272,7 @@ while cursor and ck_data[-1] != '\n': ck_data += cursor cursor = f.read(1) - return to_json_string( { 'ck_data': util.unicodify( ck_data ), 'ck_index': ck_index + 1 } ) + return dumps( { 'ck_data': util.unicodify( ck_data ), 'ck_index': ck_index + 1 } ) def display_data(self, trans, dataset, preview=False, filename=None, to_ext=None, chunk=None, **kwd): preview = util.string_as_bool( preview ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/datatypes/util/__init__.py --- a/lib/galaxy/datatypes/util/__init__.py +++ b/lib/galaxy/datatypes/util/__init__.py @@ -1,3 +1,3 @@ """ Utilities for Galaxy datatypes. -""" \ No newline at end of file +""" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/external_services/result_handlers/basic.py --- a/lib/galaxy/external_services/result_handlers/basic.py +++ b/lib/galaxy/external_services/result_handlers/basic.py @@ -1,4 +1,4 @@ -from galaxy.util.json import to_json_string, from_json_string +from galaxy.util.json import dumps, loads from galaxy.util.template import fill_template import logging @@ -44,7 +44,7 @@ type = 'json_display' def handle_result( self, result, param_dict, trans ): - rval = from_json_string( result.content ) + rval = loads( result.content ) return trans.fill_template( '/external_services/generic_json.mako', result = rval, param_dict = param_dict, action=self.parent ) class ExternalServiceActionJQueryGridResultHandler( ExternalServiceActionResultHandler ): @@ -53,7 +53,7 @@ type = 'jquery_grid' def handle_result( self, result, param_dict, trans ): - rval = from_json_string( result.content ) + rval = loads( result.content ) return trans.fill_template( '/external_services/generic_jquery_grid.mako', result = rval, param_dict = param_dict, action=self.parent ) result_type_to_class = {} diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/jobs/__init__.py --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -25,7 +25,7 @@ from galaxy.jobs.runners import BaseJobRunner, JobState from galaxy.util.bunch import Bunch from galaxy.util.expressions import ExpressionContext -from galaxy.util.json import from_json_string +from galaxy.util.json import loads from galaxy.util import unicodify from .output_checker import check_output @@ -723,7 +723,7 @@ self.job_runner_mapper = JobRunnerMapper( self, queue.dispatcher.url_to_destination, self.app.job_config ) self.params = None if job.params: - self.params = from_json_string( job.params ) + self.params = loads( job.params ) if use_persisted_destination: self.job_runner_mapper.cached_job_destination = JobDestination( from_job=job ) @@ -1390,7 +1390,7 @@ if os.path.exists( meta_file ): for line in open( meta_file, 'r' ): try: - line = from_json_string( line ) + line = loads( line ) assert 'type' in line except: log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/jobs/actions/__init__.py --- a/lib/galaxy/jobs/actions/__init__.py +++ b/lib/galaxy/jobs/actions/__init__.py @@ -1,4 +1,4 @@ """ This package contains job action classes. -""" \ No newline at end of file +""" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/jobs/actions/post.py --- a/lib/galaxy/jobs/actions/post.py +++ b/lib/galaxy/jobs/actions/post.py @@ -5,7 +5,7 @@ import datetime import logging from galaxy.util import send_mail -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps log = logging.getLogger( __name__ ) @@ -461,7 +461,7 @@ else: # Not pja stuff. pass - return to_json_string(npd) + return dumps(npd) @classmethod def get_add_list(cls): diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py --- a/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py +++ b/lib/galaxy/jobs/deferred/pacific_biosciences_smrt_portal.py @@ -83,7 +83,7 @@ return self.job_states.INVALID url = 'http://' + job.params[ 'smrt_host' ] + self.api_path + '/Jobs/' + job.params[ 'smrt_job_id' ] + '/Status' r = urllib2.urlopen( url ) - status = json.from_json_string( r.read() ) + status = json.loads( r.read() ) # TODO: error handling: unexpected json or bad response, bad url, etc. if status[ 'Code' ] == 'Completed': log.debug( "SMRT Portal job '%s' is Completed. Initiating transfer." % job.params[ 'smrt_job_id' ] ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/jobs/transfer_manager.py --- a/lib/galaxy/jobs/transfer_manager.py +++ b/lib/galaxy/jobs/transfer_manager.py @@ -82,7 +82,7 @@ sock = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) sock.settimeout( 5 ) sock.connect( ( 'localhost', tj.socket ) ) - sock.send( json.to_json_string( request ) ) + sock.send( json.dumps( request ) ) response = sock.recv( 8192 ) valid, response = json.validate_jsonrpc_response( response, id=request['id'] ) if not valid: diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/managers/context.py --- a/lib/galaxy/managers/context.py +++ b/lib/galaxy/managers/context.py @@ -4,7 +4,7 @@ import os -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps from galaxy.util import bunch class ProvidesAppContext( object ): @@ -19,7 +19,7 @@ Application-level logging of user actions. """ if self.app.config.log_actions: - action = self.app.model.UserAction(action=action, context=context, params=unicode( to_json_string( params ) ) ) + action = self.app.model.UserAction(action=action, context=context, params=unicode( dumps( params ) ) ) try: if user: action.user = user diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py --- a/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py +++ b/lib/galaxy/model/migrate/versions/0052_sample_dataset_table.py @@ -10,7 +10,7 @@ from sqlalchemy.exc import * from galaxy.model.custom_types import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import datetime now = datetime.datetime.utcnow @@ -62,7 +62,7 @@ for r in result: sample_id = r[0] if r[1]: - dataset_files = from_json_string(r[1]) + dataset_files = loads(r[1]) for df in dataset_files: if type(df) == type(dict()): cmd = "INSERT INTO sample_dataset VALUES (%s, %s, %s, %s, '%s', '%s', '%s', '%s', '%s')" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py --- a/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py +++ b/lib/galaxy/model/migrate/versions/0054_visualization_dbkey.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import * from migrate import * from migrate.changeset import * -from galaxy.util.json import from_json_string +from galaxy.util.json import loads import logging log = logging.getLogger( __name__ ) @@ -41,7 +41,7 @@ viz_id = viz['viz_id'] viz_rev_id = viz['viz_rev_id'] if viz[Visualization_revision_table.c.config]: - dbkey = from_json_string(viz[Visualization_revision_table.c.config]).get('dbkey', "").replace("'", "\\'") + dbkey = loads(viz[Visualization_revision_table.c.config]).get('dbkey', "").replace("'", "\\'") migrate_engine.execute("UPDATE visualization_revision SET dbkey='%s' WHERE id=%s" % (dbkey, viz_rev_id)) migrate_engine.execute("UPDATE visualization SET dbkey='%s' WHERE id=%s" % (dbkey, viz_id)) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0057_request_notify.py --- a/lib/galaxy/model/migrate/versions/0057_request_notify.py +++ b/lib/galaxy/model/migrate/versions/0057_request_notify.py @@ -10,7 +10,7 @@ from sqlalchemy.exc import * from galaxy.model.custom_types import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import datetime now = datetime.datetime.utcnow @@ -46,13 +46,13 @@ id = int(r[0]) notify_old = r[1] notify_new = dict(email=[], sample_states=[], body='', subject='') - cmd = "update request set notification='%s' where id=%i" % (to_json_string(notify_new), id) + cmd = "update request set notification='%s' where id=%i" % (dumps(notify_new), id) migrate_engine.execute( cmd ) cmd = "SELECT id, notification FROM request" result = migrate_engine.execute( cmd ) for r in result: - rr = from_json_string(str(r[1])) + rr = loads(str(r[1])) # remove the 'notify' column for non-sqlite databases. if migrate_engine.name != 'sqlite': diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py --- a/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py +++ b/lib/galaxy/model/migrate/versions/0059_sample_dataset_file_path.py @@ -10,7 +10,7 @@ from sqlalchemy.exc import * from galaxy.model.custom_types import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import datetime now = datetime.datetime.utcnow diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py --- a/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py +++ b/lib/galaxy/model/migrate/versions/0065_add_name_to_form_fields_and_values.py @@ -9,7 +9,7 @@ from migrate import * from migrate.changeset import * from sqlalchemy.exc import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps from galaxy.model.custom_types import _sniffnfix_pg9_hex import datetime @@ -53,13 +53,13 @@ fields = str( row[1] ) if not fields.strip(): continue - fields_list = from_json_string( _sniffnfix_pg9_hex( fields ) ) + fields_list = loads( _sniffnfix_pg9_hex( fields ) ) if len( fields_list ): for index, field in enumerate( fields_list ): field[ 'name' ] = 'field_%i' % index field[ 'helptext' ] = field[ 'helptext' ].replace("'", "''").replace('"', "") field[ 'label' ] = field[ 'label' ].replace("'", "''") - fields_json = to_json_string( fields_list ) + fields_json = dumps( fields_list ) if migrate_engine.name == 'mysql': cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" %( fields_json, form_definition_id ) else: @@ -76,16 +76,16 @@ if not str( row[1] ).strip(): continue row1 = str(row[1]).replace('\n', '').replace('\r', '') - values_list = from_json_string( str( row1 ).strip() ) + values_list = loads( str( row1 ).strip() ) if not str( row[2] ).strip(): continue - fields_list = from_json_string( str( row[2] ).strip() ) + fields_list = loads( str( row[2] ).strip() ) if fields_list and type(values_list) == type(list()): values_dict = {} for field_index, field in enumerate( fields_list ): field_name = field[ 'name' ] values_dict[ field_name ] = get_value(values_list, field_index ) - cmd = "UPDATE form_values SET content='%s' WHERE id=%i" %( to_json_string( values_dict ), form_values_id ) + cmd = "UPDATE form_values SET content='%s' WHERE id=%i" %( dumps( values_dict ), form_values_id ) migrate_engine.execute( cmd ) def downgrade(migrate_engine): @@ -110,17 +110,17 @@ form_values_id = int( row[0] ) if not str( row[1] ).strip(): continue - values_dict = from_json_string( str( row[1] ) ) + values_dict = loads( str( row[1] ) ) if not str( row[2] ).strip(): continue - fields_list = from_json_string( str( row[2] ) ) + fields_list = loads( str( row[2] ) ) if fields_list: values_list = [] for field_index, field in enumerate( fields_list ): field_name = field[ 'name' ] field_value = values_dict[ field_name ] values_list.append( field_value ) - cmd = "UPDATE form_values SET content='%s' WHERE id=%i" %( to_json_string( values_list ), form_values_id ) + cmd = "UPDATE form_values SET content='%s' WHERE id=%i" %( dumps( values_list ), form_values_id ) migrate_engine.execute( cmd ) # remove name attribute from the field column of the form_definition table cmd = "SELECT f.id, f.fields FROM form_definition AS f" @@ -130,13 +130,13 @@ fields = str( row[1] ) if not fields.strip(): continue - fields_list = from_json_string( _sniffnfix_pg9_hex( fields ) ) + fields_list = loads( _sniffnfix_pg9_hex( fields ) ) if len( fields_list ): for index, field in enumerate( fields_list ): if field.has_key( 'name' ): del field[ 'name' ] if migrate_engine.name == 'mysql': - cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" %( to_json_string( fields_list ), form_definition_id ) + cmd = "UPDATE form_definition AS f SET f.fields='%s' WHERE f.id=%i" %( dumps( fields_list ), form_definition_id ) else: - cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" %( to_json_string( fields_list ), form_definition_id ) + cmd = "UPDATE form_definition SET fields='%s' WHERE id=%i" %( dumps( fields_list ), form_definition_id ) migrate_engine.execute( cmd ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py --- a/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py +++ b/lib/galaxy/model/migrate/versions/0067_populate_sequencer_table.py @@ -13,7 +13,7 @@ from galaxy.model.custom_types import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import datetime now = datetime.datetime.utcnow @@ -104,7 +104,7 @@ 'layout': 'none', 'default': '' } ) form_definition_type = 'Sequencer Information Form' - form_definition_layout = to_json_string('[]') + form_definition_layout = dumps('[]') cmd = "INSERT INTO form_definition VALUES ( %s, %s, %s, '%s', '%s', %s, '%s', '%s', '%s' )" cmd = cmd % ( nextval( 'form_definition' ), localtimestamp(), @@ -112,7 +112,7 @@ form_definition_name, form_definition_desc, form_definition_current_id, - to_json_string( form_definition_fields ), + dumps( form_definition_fields ), form_definition_type, form_definition_layout ) migrate_engine.execute( cmd ) @@ -134,7 +134,7 @@ values = str( row[1] ) if not values.strip(): continue - values = from_json_string( values ) + values = loads( values ) # proceed only if sequencer_info is a valid list if values and type( values ) == type( dict() ): if sequencer_info.get( 'host', '' ) == values.get( 'field_0', '' ) \ @@ -148,7 +148,7 @@ def add_sequencer( sequencer_index, sequencer_form_definition_id, sequencer_info ): '''Adds a new sequencer to the sequencer table along with its form values.''' # Create a new form values record with the supplied sequencer information - values = to_json_string( { 'field_0': sequencer_info.get( 'host', '' ), + values = dumps( { 'field_0': sequencer_info.get( 'host', '' ), 'field_1': sequencer_info.get( 'username', '' ), 'field_2': sequencer_info.get( 'password', '' ), 'field_3': sequencer_info.get( 'data_dir', '' ), @@ -233,7 +233,7 @@ # skip if sequencer_info is empty if not sequencer_info.strip() or sequencer_info in ['None', 'null']: continue - sequencer_info = from_json_string( sequencer_info.strip() ) + sequencer_info = loads( sequencer_info.strip() ) # proceed only if sequencer_info is a valid dict if sequencer_info and type( sequencer_info ) == type( dict() ): # check if this sequencer has already been added to the sequencer table @@ -277,9 +277,9 @@ result = migrate_engine.execute( cmd ) for row in result: request_type_id = row[0] - seq_values = from_json_string( str( row[1] ) ) + seq_values = loads( str( row[1] ) ) # create the datatx_info json dict - datatx_info = to_json_string( dict( host = seq_values.get( 'field_0', '' ), + datatx_info = dumps( dict( host = seq_values.get( 'field_0', '' ), username = seq_values.get( 'field_1', '' ), password = seq_values.get( 'field_2', '' ), data_dir = seq_values.get( 'field_3', '' ), diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py --- a/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py +++ b/lib/galaxy/model/migrate/versions/0068_rename_sequencer_to_external_services.py @@ -15,7 +15,7 @@ from galaxy.model.custom_types import * -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import datetime now = datetime.datetime.utcnow diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py --- a/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py +++ b/lib/galaxy/model/migrate/versions/0076_fix_form_values_data_corruption.py @@ -9,7 +9,7 @@ from sqlalchemy.exc import * import binascii -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import logging log = logging.getLogger( __name__ ) @@ -42,11 +42,11 @@ # first check if loading the dict from the json succeeds # if that fails, it means that the content field is corrupted. try: - field_values_dict = from_json_string( _sniffnfix_pg9_hex( str( row['field_values'] ) ) ) + field_values_dict = loads( _sniffnfix_pg9_hex( str( row['field_values'] ) ) ) except Exception, e: corrupted_rows = corrupted_rows + 1 # content field is corrupted - fields_list = from_json_string( _sniffnfix_pg9_hex( str( row['fdfields'] ) ) ) + fields_list = loads( _sniffnfix_pg9_hex( str( row['fdfields'] ) ) ) field_values_str = _sniffnfix_pg9_hex( str( row['field_values'] ) ) try: #Encoding errors? Just to be safe. @@ -85,7 +85,7 @@ # add to the new values dict field_values_dict[ field['name'] ] = value # update the db - json_values = to_json_string(field_values_dict) + json_values = dumps(field_values_dict) cmd = "UPDATE form_values SET content='%s' WHERE id=%i" %( json_values, int( row['id'] ) ) migrate_engine.execute( cmd ) try: diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py --- a/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py +++ b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py @@ -12,7 +12,7 @@ # Need our custom types, but don't import anything else from model from galaxy.model.custom_types import * from galaxy.model.custom_types import _sniffnfix_pg9_hex -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps import sys, logging log = logging.getLogger( __name__ ) @@ -77,7 +77,7 @@ for row in result: if row[1]: tool_shed_repository_id = row[0] - repository_metadata = from_json_string( _sniffnfix_pg9_hex( str( row[1] ) ) ) + repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) ) # Create a new row in the tool table for each tool included in repository. We will NOT # handle tool_version_associaions because we do not have the information we need to do so. tools = repository_metadata.get( 'tools', [] ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py --- a/lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py +++ b/lib/galaxy/model/migrate/versions/0115_longer_user_password_field.py @@ -22,4 +22,4 @@ try: user.c.password.alter(type=String(40)) except: - log.exception( "Altering password column failed" ) \ No newline at end of file + log.exception( "Altering password column failed" ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/migrate/versions/0117_add_user_activation.py --- a/lib/galaxy/model/migrate/versions/0117_add_user_activation.py +++ b/lib/galaxy/model/migrate/versions/0117_add_user_activation.py @@ -54,4 +54,4 @@ user_active.drop() user_activation_token.drop() except Exception, e: - log.debug( "Dropping 'active' and 'activation_token' columns from galaxy_user table failed: %s" % ( str( e ) ) ) \ No newline at end of file + log.debug( "Dropping 'active' and 'activation_token' columns from galaxy_user table failed: %s" % ( str( e ) ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/orm/logging_connection_proxy.py --- a/lib/galaxy/model/orm/logging_connection_proxy.py +++ b/lib/galaxy/model/orm/logging_connection_proxy.py @@ -45,4 +45,4 @@ self.trace_logger.log( "sqlalchemy_query", message="Query executed", statement=statement, parameters=parameters, executemany=executemany, duration=duration ) - return rval \ No newline at end of file + return rval diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/search.py --- a/lib/galaxy/model/search.py +++ b/lib/galaxy/model/search.py @@ -39,7 +39,7 @@ Page, PageRevision) from galaxy.model.tool_shed_install import ToolVersion -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps from sqlalchemy import and_ from sqlalchemy.orm import aliased @@ -429,7 +429,7 @@ and_( Job.id == alias.job_id, alias.name == param_name, - alias.value == to_json_string(right) + alias.value == dumps(right) ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0001_add_tool_shed_repository_table.py @@ -1,1 +1,51 @@ -../../../migrate/versions/0082_add_tool_shed_repository_table.py \ No newline at end of file +""" +Migration script to add the tool_shed_repository table. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +# New table to store information about cloned tool shed repositories. +ToolShedRepository_table = Table( "tool_shed_repository", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed", TrimmedString( 255 ), index=True ), + Column( "name", TrimmedString( 255 ), index=True ), + Column( "description" , TEXT ), + Column( "owner", TrimmedString( 255 ), index=True ), + Column( "changeset_revision", TrimmedString( 255 ), index=True ), + Column( "deleted", Boolean, index=True, default=False ) ) + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + try: + ToolShedRepository_table.create() + except Exception, e: + log.debug( "Creating tool_shed_repository table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + try: + ToolShedRepository_table.drop() + except Exception, e: + log.debug( "Dropping tool_shed_repository table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0002_add_tool_shed_repository_table_columns.py @@ -1,1 +1,79 @@ -../../../migrate/versions/0086_add_tool_shed_repository_table_columns.py \ No newline at end of file +""" +Migration script to add the metadata, update_available and includes_datatypes columns to the tool_shed_repository table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def get_default_false(migrate_engine): + if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': + return "0" + elif migrate_engine.name in ['postgresql', 'postgres']: + return "false" + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + c = Column( "metadata", JSONType(), nullable=True ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.metadata + except Exception, e: + print "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Adding metadata column to the tool_shed_repository table failed: %s" % str( e ) ) + c = Column( "includes_datatypes", Boolean, index=True, default=False ) + try: + c.create( ToolShedRepository_table, index_name="ix_tool_shed_repository_includes_datatypes") + assert c is ToolShedRepository_table.c.includes_datatypes + migrate_engine.execute( "UPDATE tool_shed_repository SET includes_datatypes=%s" % get_default_false(migrate_engine)) + except Exception, e: + print "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Adding includes_datatypes column to the tool_shed_repository table failed: %s" % str( e ) ) + c = Column( "update_available", Boolean, default=False ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.update_available + migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % get_default_false(migrate_engine)) + except Exception, e: + print "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Adding update_available column to the tool_shed_repository table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + try: + ToolShedRepository_table.c.metadata.drop() + except Exception, e: + print "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Dropping column metadata from the tool_shed_repository table failed: %s" % str( e ) ) + try: + ToolShedRepository_table.c.includes_datatypes.drop() + except Exception, e: + print "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Dropping column includes_datatypes from the tool_shed_repository table failed: %s" % str( e ) ) + try: + ToolShedRepository_table.c.update_available.drop() + except Exception, e: + print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0003_tool_id_guid_map_table.py @@ -1,1 +1,52 @@ -../../../migrate/versions/0087_tool_id_guid_map_table.py \ No newline at end of file +""" +Migration script to create the tool_id_guid_map table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_id", String( 255 ) ), + Column( "tool_version", TEXT ), + Column( "tool_shed", TrimmedString( 255 ) ), + Column( "repository_owner", TrimmedString( 255 ) ), + Column( "repository_name", TrimmedString( 255 ) ), + Column( "guid", TEXT, index=True, unique=True ) ) + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + try: + ToolIdGuidMap_table.create() + except Exception, e: + log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + try: + ToolIdGuidMap_table.drop() + except Exception, e: + log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0004_add_installed_changeset_revison_column.py @@ -1,1 +1,64 @@ -../../../migrate/versions/0088_add_installed_changeset_revison_column.py \ No newline at end of file +""" +Migration script to add the installed_changeset_revision column to the tool_shed_repository table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + col = Column( "installed_changeset_revision", TrimmedString( 255 ) ) + try: + col.create( ToolShedRepository_table ) + assert col is ToolShedRepository_table.c.installed_changeset_revision + except Exception, e: + print "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Adding installed_changeset_revision column to the tool_shed_repository table failed: %s" % str( e ) ) + # Update each row by setting the value of installed_changeset_revison to be the value of changeset_revision. + # This will be problematic if the value of changeset_revision was updated to something other than the value + # that it was when the repository was installed (because the install path determined in real time will attempt to + # find the repository using the updated changeset_revison instead of the required installed_changeset_revision), + # but at the time this script was written, this scenario is extremely unlikely. + cmd = "SELECT id AS id, " \ + + "installed_changeset_revision AS installed_changeset_revision, " \ + + "changeset_revision AS changeset_revision " \ + + "FROM tool_shed_repository;" + tool_shed_repositories = migrate_engine.execute( cmd ).fetchall() + update_count = 0 + for row in tool_shed_repositories: + cmd = "UPDATE tool_shed_repository " \ + + "SET installed_changeset_revision = '%s' " % row.changeset_revision \ + + "WHERE changeset_revision = '%s';" % row.changeset_revision + migrate_engine.execute( cmd ) + update_count += 1 + print "Updated the installed_changeset_revision column for ", update_count, " rows in the tool_shed_repository table. " +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + try: + ToolShedRepository_table.c.installed_changeset_revision.drop() + except Exception, e: + print "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) + log.debug( "Dropping column installed_changeset_revision from the tool_shed_repository table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0005_add_tool_shed_repository_table_columns.py @@ -1,1 +1,63 @@ -../../../migrate/versions/0090_add_tool_shed_repository_table_columns.py \ No newline at end of file +""" +Migration script to add the uninstalled and dist_to_shed columns to the tool_shed_repository table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def default_false(migrate_engine): + if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': + return "0" + elif migrate_engine.name in ['postgresql', 'postgres']: + return "false" + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + c = Column( "uninstalled", Boolean, default=False ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.uninstalled + migrate_engine.execute( "UPDATE tool_shed_repository SET uninstalled=%s" % default_false(migrate_engine) ) + except Exception, e: + print "Adding uninstalled column to the tool_shed_repository table failed: %s" % str( e ) + c = Column( "dist_to_shed", Boolean, default=False ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.dist_to_shed + migrate_engine.execute( "UPDATE tool_shed_repository SET dist_to_shed=%s" % default_false(migrate_engine) ) + except Exception, e: + print "Adding dist_to_shed column to the tool_shed_repository table failed: %s" % str( e ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + try: + ToolShedRepository_table.c.uninstalled.drop() + except Exception, e: + print "Dropping column uninstalled from the tool_shed_repository table failed: %s" % str( e ) + try: + ToolShedRepository_table.c.dist_to_shed.drop() + except Exception, e: + print "Dropping column dist_to_shed from the tool_shed_repository table failed: %s" % str( e ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0006_add_tool_version_tables.py @@ -1,1 +1,122 @@ -../../../migrate/versions/0091_add_tool_version_tables.py \ No newline at end of file +""" +Migration script to create the tool_version and tool_version_association tables and drop the tool_id_guid_map table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * +from galaxy.model.custom_types import _sniffnfix_pg9_hex +from galaxy.util.json import loads, dumps + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() +#migrate_engine = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) + +def nextval( table, col='id' ): + if migrate_engine.name == 'postgres': + return "nextval('%s_%s_seq')" % ( table, col ) + elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': + return "null" + else: + raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name ) + +def localtimestamp(): + if migrate_engine.name == 'postgres' or migrate_engine.name == 'mysql': + return "LOCALTIMESTAMP" + elif migrate_engine.name == 'sqlite': + return "current_date || ' ' || current_time" + else: + raise Exception( 'Unable to convert data for unknown database type: %s' % db ) + +ToolVersion_table = Table( "tool_version", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_id", String( 255 ) ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) ) + +ToolVersionAssociation_table = Table( "tool_version_association", metadata, + Column( "id", Integer, primary_key=True ), + Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ), + Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) ) + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + + ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True ) + + metadata.reflect() + # Create the tables. + try: + ToolVersion_table.create() + except Exception, e: + log.debug( "Creating tool_version table failed: %s" % str( e ) ) + try: + ToolVersionAssociation_table.create() + except Exception, e: + log.debug( "Creating tool_version_association table failed: %s" % str( e ) ) + # Populate the tool table with tools included in installed tool shed repositories. + cmd = "SELECT id, metadata FROM tool_shed_repository" + result = migrate_engine.execute( cmd ) + count = 0 + for row in result: + if row[1]: + tool_shed_repository_id = row[0] + repository_metadata = loads( _sniffnfix_pg9_hex( str( row[1] ) ) ) + # Create a new row in the tool table for each tool included in repository. We will NOT + # handle tool_version_associaions because we do not have the information we need to do so. + tools = repository_metadata.get( 'tools', [] ) + for tool_dict in tools: + cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \ + ( nextval( 'tool_version' ), localtimestamp(), localtimestamp(), tool_dict[ 'guid' ], tool_shed_repository_id ) + migrate_engine.execute( cmd ) + count += 1 + print "Added %d rows to the new tool_version table." % count + # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary. + try: + ToolIdGuidMap_table.drop() + except Exception, e: + log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + + ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_id", String( 255 ) ), + Column( "tool_version", TEXT ), + Column( "tool_shed", TrimmedString( 255 ) ), + Column( "repository_owner", TrimmedString( 255 ) ), + Column( "repository_name", TrimmedString( 255 ) ), + Column( "guid", TEXT, index=True, unique=True ) ) + + metadata.reflect() + try: + ToolVersionAssociation_table.drop() + except Exception, e: + log.debug( "Dropping tool_version_association table failed: %s" % str( e ) ) + try: + ToolVersion_table.drop() + except Exception, e: + log.debug( "Dropping tool_version table failed: %s" % str( e ) ) + try: + ToolIdGuidMap_table.create() + except Exception, e: + log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0007_add_migrate_tools_table.py @@ -1,1 +1,50 @@ -../../../migrate/versions/0092_add_migrate_tools_table.py \ No newline at end of file +""" +Migration script to create the migrate_tools table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +MigrateTools_table = Table( "migrate_tools", metadata, + Column( "repository_id", TrimmedString( 255 ) ), + Column( "repository_path", TEXT ), + Column( "version", Integer ) ) + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + + metadata.reflect() + # Create the table. + try: + MigrateTools_table.create() + cmd = "INSERT INTO migrate_tools VALUES ('GalaxyTools', 'lib/galaxy/tool_shed/migrate', %d)" % 1 + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Creating migrate_tools table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + try: + MigrateTools_table.drop() + except Exception, e: + log.debug( "Dropping migrate_tools table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0008_add_ctx_rev_column.py @@ -1,1 +1,44 @@ -../../../migrate/versions/0097_add_ctx_rev_column.py \ No newline at end of file +""" +Migration script to add the ctx_rev column to the tool_shed_repository table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + col = Column( "ctx_rev", TrimmedString( 10 ) ) + try: + col.create( ToolShedRepository_table ) + assert col is ToolShedRepository_table.c.ctx_rev + except Exception, e: + print "Adding ctx_rev column to the tool_shed_repository table failed: %s" % str( e ) +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + try: + ToolShedRepository_table.c.ctx_rev.drop() + except Exception, e: + print "Dropping column ctx_rev from the tool_shed_repository table failed: %s" % str( e ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0009_add_tool_dependency_table.py @@ -1,1 +1,51 @@ -../../../migrate/versions/0099_add_tool_dependency_table.py \ No newline at end of file +""" +Migration script to add the tool_dependency table. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +# New table to store information about cloned tool shed repositories. +ToolDependency_table = Table( "tool_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ), + Column( "installed_changeset_revision", TrimmedString( 255 ) ), + Column( "name", TrimmedString( 255 ) ), + Column( "version", TrimmedString( 40 ) ), + Column( "type", TrimmedString( 40 ) ), + Column( "uninstalled", Boolean, default=False ) ) + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + try: + ToolDependency_table.create() + except Exception, e: + log.debug( "Creating tool_dependency table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + try: + ToolDependency_table.drop() + except Exception, e: + log.debug( "Dropping tool_dependency table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0010_alter_tool_dependency_table_version_column.py @@ -1,1 +1,54 @@ -../../../migrate/versions/0100_alter_tool_dependency_table_version_column.py \ No newline at end of file +""" +Migration script to alter the type of the tool_dependency.version column from TrimmedString(40) to Text. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) + # Change the tool_dependency table's version column from TrimmedString to Text. + if migrate_engine.name in ['postgresql', 'postgres']: + cmd = "ALTER TABLE tool_dependency ALTER COLUMN version TYPE Text;" + elif migrate_engine.name == 'mysql': + cmd = "ALTER TABLE tool_dependency MODIFY COLUMN version Text;" + else: + # We don't have to do anything for sqlite tables. From the sqlite documentation at http://sqlite.org/datatype3.html: + # 1.0 Storage Classes and Datatypes + # Each value stored in an SQLite database (or manipulated by the database engine) has one of the following storage classes: + # NULL. The value is a NULL value. + # INTEGER. The value is a signed integer, stored in 1, 2, 3, 4, 6, or 8 bytes depending on the magnitude of the value. + # REAL. The value is a floating point value, stored as an 8-byte IEEE floating point number. + # TEXT. The value is a text string, stored using the database encoding (UTF-8, UTF-16BE or UTF-16LE). + # BLOB. The value is a blob of data, stored exactly as it was input. + cmd = None + if cmd: + try: + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Altering tool_dependency.version column from TrimmedString(40) to Text failed: %s" % str( e ) ) +def downgrade(migrate_engine): + metadata.bind = migrate_engine + # Not necessary to change column type Text to TrimmedString(40). + pass diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0011_drop_installed_changeset_revision_column.py @@ -1,1 +1,41 @@ -../../../migrate/versions/0101_drop_installed_changeset_revision_column.py \ No newline at end of file +""" +Migration script to drop the installed_changeset_revision column from the tool_dependency table. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + try: + ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) + except NoSuchTableError: + ToolDependency_table = None + log.debug( "Failed loading table tool_dependency" ) + if ToolDependency_table is not None: + try: + col = ToolDependency_table.c.installed_changeset_revision + col.drop() + except Exception, e: + log.debug( "Dropping column 'installed_changeset_revision' from tool_dependency table failed: %s" % ( str( e ) ) ) +def downgrade(migrate_engine): + metadata.bind = migrate_engine + pass diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0012_add_tool_dependency_status_columns.py @@ -1,1 +1,71 @@ -../../../migrate/versions/0102_add_tool_dependency_status_columns.py \ No newline at end of file +""" +Migration script to add status and error_message columns to the tool_dependency table and drop the uninstalled column from the tool_dependency table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) + if migrate_engine.name == 'sqlite': + col = Column( "status", TrimmedString( 255 )) + else: + col = Column( "status", TrimmedString( 255 ), nullable=False) + try: + col.create( ToolDependency_table ) + assert col is ToolDependency_table.c.status + except Exception, e: + print "Adding status column to the tool_dependency table failed: %s" % str( e ) + col = Column( "error_message", TEXT ) + try: + col.create( ToolDependency_table ) + assert col is ToolDependency_table.c.error_message + except Exception, e: + print "Adding error_message column to the tool_dependency table failed: %s" % str( e ) + + if migrate_engine.name != 'sqlite': + #This breaks in sqlite due to failure to drop check constraint. + # TODO move to alembic. + try: + ToolDependency_table.c.uninstalled.drop() + except Exception, e: + print "Dropping uninstalled column from the tool_dependency table failed: %s" % str( e ) +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolDependency_table = Table( "tool_dependency", metadata, autoload=True ) + try: + ToolDependency_table.c.status.drop() + except Exception, e: + print "Dropping column status from the tool_dependency table failed: %s" % str( e ) + try: + ToolDependency_table.c.error_message.drop() + except Exception, e: + print "Dropping column error_message from the tool_dependency table failed: %s" % str( e ) + col = Column( "uninstalled", Boolean, default=False ) + try: + col.create( ToolDependency_table ) + assert col is ToolDependency_table.c.uninstalled + except Exception, e: + print "Adding uninstalled column to the tool_dependency table failed: %s" % str( e ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0013_add_tool_shed_repository_status_columns.py @@ -1,1 +1,69 @@ -../../../migrate/versions/0103_add_tool_shed_repository_status_columns.py \ No newline at end of file +"""Migration script to add status and error_message columns to the tool_shed_repository table.""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +metadata = MetaData() + +def upgrade(migrate_engine): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + # Add the status column to the tool_shed_repository table. + col = Column( "status", TrimmedString( 255 ) ) + try: + col.create( ToolShedRepository_table ) + assert col is ToolShedRepository_table.c.status + except Exception, e: + print "Adding status column to the tool_shed_repository table failed: %s" % str( e ) + # Add the error_message column to the tool_shed_repository table. + col = Column( "error_message", TEXT ) + try: + col.create( ToolShedRepository_table ) + assert col is ToolShedRepository_table.c.error_message + except Exception, e: + print "Adding error_message column to the tool_shed_repository table failed: %s" % str( e ) + # Update the status column value for tool_shed_repositories to the default value 'Installed'. + cmd = "UPDATE tool_shed_repository SET status = 'Installed';" + try: + migrate_engine.execute( cmd ) + except Exception, e: + print "Exception executing sql command: " + print cmd + print str( e ) + # Update the status column for tool_shed_repositories that have been uninstalled. + cmd = "UPDATE tool_shed_repository SET status = 'Uninstalled' WHERE uninstalled;" + try: + migrate_engine.execute( cmd ) + except Exception, e: + print "Exception executing sql command: " + print cmd + print str( e ) + # Update the status column for tool_shed_repositories that have been deactivated. + cmd = "UPDATE tool_shed_repository SET status = 'Deactivated' where deleted and not uninstalled;" + try: + migrate_engine.execute( cmd ) + except Exception, e: + print "Exception executing sql command: " + print cmd + print str( e ) +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + try: + ToolShedRepository_table.c.status.drop() + except Exception, e: + print "Dropping column status from the tool_shed_repository table failed: %s" % str( e ) + try: + ToolShedRepository_table.c.error_message.drop() + except Exception, e: + print "Dropping column error_message from the tool_shed_repository table failed: %s" % str( e ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0014_add_repository_dependency_tables.py @@ -1,1 +1,60 @@ -../../../migrate/versions/0109_add_repository_dependency_tables.py \ No newline at end of file +""" +Migration script to add the repository_dependency and repository_repository_dependency_association tables. +""" +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +RepositoryDependency_table = Table( "repository_dependency", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=False ) ) + +RepositoryRepositoryDependencyAssociation_table = Table( "repository_repository_dependency_association", metadata, + Column( "id", Integer, primary_key=True ), + Column( "create_time", DateTime, default=now ), + Column( "update_time", DateTime, default=now, onupdate=now ), + Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True ), + Column( "repository_dependency_id", Integer, ForeignKey( "repository_dependency.id" ), index=True ) ) + +def upgrade(migrate_engine): + print __doc__ + metadata.bind = migrate_engine + metadata.reflect() + try: + RepositoryDependency_table.create() + except Exception, e: + log.debug( "Creating repository_dependency table failed: %s" % str( e ) ) + try: + RepositoryRepositoryDependencyAssociation_table.create() + except Exception, e: + log.debug( "Creating repository_repository_dependency_association table failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + metadata.bind = migrate_engine + metadata.reflect() + try: + RepositoryRepositoryDependencyAssociation_table.drop() + except Exception, e: + log.debug( "Dropping repository_repository_dependency_association table failed: %s" % str( e ) ) + try: + RepositoryDependency_table.drop() + except Exception, e: + log.debug( "Dropping repository_dependency table failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0015_update_migrate_tools_table.py @@ -1,1 +1,38 @@ -../../../migrate/versions/0113_update_migrate_tools_table.py \ No newline at end of file +""" +Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +def upgrade(migrate_engine): + print __doc__ + # Create the table. + try: + cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + try: + cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0016_update_migrate_tools_table_again.py @@ -1,1 +1,40 @@ -../../../migrate/versions/0114_update_migrate_tools_table_again.py \ No newline at end of file +""" +Migration script to update the migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * + +import datetime +now = datetime.datetime.utcnow +# Need our custom types, but don't import anything else from model +from galaxy.model.custom_types import * + +import sys, logging +log = logging.getLogger( __name__ ) +log.setLevel(logging.DEBUG) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + + +def upgrade(migrate_engine): + print __doc__ + # Create the table. + try: + cmd = "UPDATE migrate_tools set repository_path='lib/tool_shed/galaxy_install/migrate';" + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the new location lib/tool_shed/galaxy_install/migrate failed: %s" % str( e ) ) + +def downgrade(migrate_engine): + try: + cmd = "UPDATE migrate_tools set repository_path='lib/galaxy/tool_shed/migrate';" + migrate_engine.execute( cmd ) + except Exception, e: + log.debug( "Updating migrate_tools.repository_path column to point to the old location lib/galaxy/tool_shed/migrate failed: %s" % str( e ) ) + diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py --- a/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py +++ b/lib/galaxy/model/tool_shed_install/migrate/versions/0017_drop_update_available_col_add_tool_shed_status_col.py @@ -1,1 +1,77 @@ -../../../migrate/versions/0116_drop_update_available_col_add_tool_shed_status_col.py \ No newline at end of file +""" +Migration script to drop the update_available Boolean column and replace it with the tool_shed_status JSONType column in the tool_shed_repository table. +""" + +from sqlalchemy import * +from sqlalchemy.orm import * +from migrate import * +from migrate.changeset import * +import sys, logging +from galaxy.model.custom_types import * +from sqlalchemy.exc import * +import datetime +now = datetime.datetime.utcnow + +log = logging.getLogger( __name__ ) +log.setLevel( logging.DEBUG ) +handler = logging.StreamHandler( sys.stdout ) +format = "%(name)s %(levelname)s %(asctime)s %(message)s" +formatter = logging.Formatter( format ) +handler.setFormatter( formatter ) +log.addHandler( handler ) + +metadata = MetaData() + +def default_false( migrate_engine ): + if migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite': + return "0" + elif migrate_engine.name in [ 'postgresql', 'postgres' ]: + return "false" + +def upgrade( migrate_engine ): + metadata.bind = migrate_engine + print __doc__ + metadata.reflect() + try: + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + except NoSuchTableError: + ToolShedRepository_table = None + log.debug( "Failed loading table tool_shed_repository" ) + if ToolShedRepository_table is not None: + # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database. + if migrate_engine.name != 'sqlite': + try: + col = ToolShedRepository_table.c.update_available + col.drop() + except Exception, e: + print "Dropping column update_available from the tool_shed_repository table failed: %s" % str( e ) + c = Column( "tool_shed_status", JSONType, nullable=True ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.tool_shed_status + except Exception, e: + print "Adding tool_shed_status column to the tool_shed_repository table failed: %s" % str( e ) + +def downgrade( migrate_engine ): + metadata.bind = migrate_engine + metadata.reflect() + try: + ToolShedRepository_table = Table( "tool_shed_repository", metadata, autoload=True ) + except NoSuchTableError: + ToolShedRepository_table = None + log.debug( "Failed loading table tool_shed_repository" ) + if ToolShedRepository_table is not None: + # For some unknown reason it is no longer possible to drop a column in a migration script if using the sqlite database. + if migrate_engine.name != 'sqlite': + try: + col = ToolShedRepository_table.c.tool_shed_status + col.drop() + except Exception, e: + print "Dropping column tool_shed_status from the tool_shed_repository table failed: %s" % str( e ) + c = Column( "update_available", Boolean, default=False ) + try: + c.create( ToolShedRepository_table ) + assert c is ToolShedRepository_table.c.update_available + migrate_engine.execute( "UPDATE tool_shed_repository SET update_available=%s" % default_false( migrate_engine ) ) + except Exception, e: + print "Adding column update_available to the tool_shed_repository table failed: %s" % str( e ) diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/openid/__init__.py --- a/lib/galaxy/openid/__init__.py +++ b/lib/galaxy/openid/__init__.py @@ -1,3 +1,3 @@ """ OpenID functionality -""" \ No newline at end of file +""" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/actions/__init__.py --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -4,7 +4,7 @@ from galaxy.tools.parameters import DataToolParameter from galaxy.tools.parameters import DataCollectionToolParameter from galaxy.tools.parameters.wrapped import WrappedParameters -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps from galaxy.util.none_like import NoneDataset from galaxy.util.odict import odict from galaxy.util.template import fill_template @@ -324,7 +324,7 @@ job.add_output_dataset( name, dataset ) job.object_store_id = object_store_populator.object_store_id if job_params: - job.params = to_json_string( job_params ) + job.params = dumps( job_params ) job.set_handler(tool.get_job_handler(job_params)) trans.sa_session.add( job ) # Now that we have a job id, we can remap any outputs if this is a rerun and the user chose to continue dependent jobs diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/actions/metadata.py --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -1,7 +1,7 @@ from __init__ import ToolAction from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper from galaxy.util.odict import odict -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps import logging log = logging.getLogger( __name__ ) @@ -50,7 +50,7 @@ if user: job.user_id = user.id if job_params: - job.params = to_json_string( job_params ) + job.params = dumps( job_params ) start_job_state = job.state #should be job.states.NEW try: # For backward compatibility, some tools may not have versions yet. diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/actions/upload_common.py --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -7,7 +7,7 @@ from galaxy import datatypes, util from galaxy.util.odict import odict from galaxy.datatypes import sniff -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps from galaxy.model.orm import eagerload_all from galaxy.exceptions import ObjectInvalid @@ -342,7 +342,7 @@ # user cannot remove it unless the parent directory is writable. if link_data_only == 'copy_files' and trans.app.config.external_chown_script: _chown( uploaded_dataset.path ) - json_file.write( to_json_string( json ) + '\n' ) + json_file.write( dumps( json ) + '\n' ) json_file.close() if trans.app.config.external_chown_script: _chown( json_file_path ) @@ -375,7 +375,7 @@ for name, value in tool.params_to_strings( params, trans.app ).iteritems(): job.add_parameter( name, value ) - job.add_parameter( 'paramfile', to_json_string( json_file_path ) ) + job.add_parameter( 'paramfile', dumps( json_file_path ) ) object_store_id = None for i, dataset in enumerate( data_list ): if folder: diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/imp_exp/__init__.py --- a/lib/galaxy/tools/imp_exp/__init__.py +++ b/lib/galaxy/tools/imp_exp/__init__.py @@ -8,7 +8,7 @@ from galaxy.tools.parameters.basic import UnvalidatedValue from galaxy.web.framework.helpers import to_unicode from galaxy.model.item_attrs import UsesAnnotations -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps from galaxy.web.base.controller import UsesHistoryMixin log = logging.getLogger(__name__) @@ -108,7 +108,7 @@ # history_attr_file_name = os.path.join( archive_dir, 'history_attrs.txt') history_attr_str = read_file_contents( history_attr_file_name ) - history_attrs = from_json_string( history_attr_str ) + history_attrs = loads( history_attr_str ) # Create history. new_history = model.History( name='imported from archive: %s' % history_attrs['name'].encode( 'utf-8' ), \ @@ -134,12 +134,12 @@ # datasets_attrs_file_name = os.path.join( archive_dir, 'datasets_attrs.txt') datasets_attr_str = read_file_contents( datasets_attrs_file_name ) - datasets_attrs = from_json_string( datasets_attr_str ) - + datasets_attrs = loads( datasets_attr_str ) + if os.path.exists( datasets_attrs_file_name + ".provenance" ): provenance_attr_str = read_file_contents( datasets_attrs_file_name + ".provenance" ) - provenance_attrs = from_json_string( provenance_attr_str ) - datasets_attrs += provenance_attrs + provenance_attrs = loads( provenance_attr_str ) + datasets_attrs += provenance_attrs # Get counts of how often each dataset file is used; a file can # be linked to multiple dataset objects (HDAs). @@ -230,7 +230,7 @@ return self.sa_session.query( model.HistoryDatasetAssociation ) \ .filter_by( history=new_history, hid=obj_dct['hid'] ).first() return obj_dct - jobs_attrs = from_json_string( jobs_attr_str, object_hook=as_hda ) + jobs_attrs = loads( jobs_attr_str, object_hook=as_hda ) # Create each job. for job_attrs in jobs_attrs: @@ -279,7 +279,7 @@ .filter_by( history=new_history, hid=value.hid ).first() value = input_hda.id #print "added parameter %s-->%s to job %i" % ( name, value, imported_job.id ) - imported_job.add_parameter( name, to_json_string( value, cls=HistoryDatasetAssociationIDEncoder ) ) + imported_job.add_parameter( name, dumps( value, cls=HistoryDatasetAssociationIDEncoder ) ) # TODO: Connect jobs to input datasets. @@ -292,14 +292,14 @@ imported_job.add_output_dataset( output_hda.name, output_hda ) # Connect jobs to input datasets. - if 'input_mapping' in job_attrs: + if 'input_mapping' in job_attrs: for input_name, input_hid in job_attrs[ 'input_mapping' ].items(): #print "%s job has input dataset %i" % (imported_job.id, input_hid) input_hda = self.sa_session.query( model.HistoryDatasetAssociation ) \ .filter_by( history=new_history, hid=input_hid ).first() if input_hda: imported_job.add_input_dataset( input_name, input_hda ) - + self.sa_session.flush() @@ -409,7 +409,7 @@ } history_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name history_attrs_out = open( history_attrs_filename, 'w' ) - history_attrs_out.write( to_json_string( history_attrs ) ) + history_attrs_out.write( dumps( history_attrs ) ) history_attrs_out.close() jeha.history_attrs_filename = history_attrs_filename @@ -427,12 +427,12 @@ included_datasets.append( dataset ) datasets_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name datasets_attrs_out = open( datasets_attrs_filename, 'w' ) - datasets_attrs_out.write( to_json_string( datasets_attrs, cls=HistoryDatasetAssociationEncoder ) ) + datasets_attrs_out.write( dumps( datasets_attrs, cls=HistoryDatasetAssociationEncoder ) ) datasets_attrs_out.close() jeha.datasets_attrs_filename = datasets_attrs_filename - + provenance_attrs_out = open( datasets_attrs_filename + ".provenance", 'w' ) - provenance_attrs_out.write( to_json_string( provenance_attrs, cls=HistoryDatasetAssociationEncoder ) ) + provenance_attrs_out.write( dumps( provenance_attrs, cls=HistoryDatasetAssociationEncoder ) ) provenance_attrs_out.close() # @@ -477,7 +477,7 @@ job_attrs[ 'exit_code' ] = job.exit_code job_attrs[ 'create_time' ] = job.create_time.isoformat() job_attrs[ 'update_time' ] = job.update_time.isoformat() - + # Get the job's parameters try: @@ -509,7 +509,7 @@ jobs_attrs_filename = tempfile.NamedTemporaryFile( dir=temp_output_dir ).name jobs_attrs_out = open( jobs_attrs_filename, 'w' ) - jobs_attrs_out.write( to_json_string( jobs_attrs, cls=HistoryDatasetAssociationEncoder ) ) + jobs_attrs_out.write( dumps( jobs_attrs, cls=HistoryDatasetAssociationEncoder ) ) jobs_attrs_out.close() jeha.jobs_attrs_filename = jobs_attrs_filename diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/imp_exp/export_history.py --- a/lib/galaxy/tools/imp_exp/export_history.py +++ b/lib/galaxy/tools/imp_exp/export_history.py @@ -39,7 +39,7 @@ except OverflowError: pass datasets_attr_in.close() - datasets_attrs = from_json_string( datasets_attr_str ) + datasets_attrs = loads( datasets_attr_str ) # Add datasets to archive and update dataset attributes. # TODO: security check to ensure that files added are in Galaxy dataset directory? @@ -54,7 +54,7 @@ # Rewrite dataset attributes file. datasets_attrs_out = open( datasets_attrs_file, 'w' ) - datasets_attrs_out.write( to_json_string( datasets_attrs ) ) + datasets_attrs_out.write( dumps( datasets_attrs ) ) datasets_attrs_out.close() # Finish archive. @@ -80,4 +80,4 @@ # Create archive. status = create_archive( history_attrs, dataset_attrs, job_attrs, out_file, gzip ) - print status \ No newline at end of file + print status diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/parameters/__init__.py --- a/lib/galaxy/tools/parameters/__init__.py +++ b/lib/galaxy/tools/parameters/__init__.py @@ -85,7 +85,7 @@ for key, value in param_values.iteritems(): if key in params: value = params[ key ].value_to_basic( value, app ) - rval[ key ] = str( to_json_string( value ) ) + rval[ key ] = str( dumps( value ) ) return rval @@ -98,7 +98,7 @@ """ rval = dict() for key, value in param_values.iteritems(): - value = json_fix( from_json_string( value ) ) + value = json_fix( loads( value ) ) if key in params: value = params[key].value_from_basic( value, app, ignore_errors ) rval[ key ] = value diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/tools/util/__init__.py --- a/lib/galaxy/tools/util/__init__.py +++ b/lib/galaxy/tools/util/__init__.py @@ -3,4 +3,4 @@ FIXME: These are used by tool scripts, not the framework, and should not live in this package. -""" \ No newline at end of file +""" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/util/__init__.py --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -338,8 +338,8 @@ def pretty_print_json(json_data, is_json_string=False): if is_json_string: - json_data = json.from_json_string(json_data) - return json.to_json_string(json_data, sort_keys=True, indent=4) + json_data = json.loads(json_data) + return json.dumps(json_data, sort_keys=True, indent=4) # characters that are valid valid_chars = set(string.letters + string.digits + " -=_.()/+*^,:?!") diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/util/backports/__init__.py --- a/lib/galaxy/util/backports/__init__.py +++ b/lib/galaxy/util/backports/__init__.py @@ -1,3 +1,3 @@ """ Modules for providing backward compatibility with future versions of Python -""" \ No newline at end of file +""" diff -r 9d15e899516411c9b491f66f001a55136b1173b9 -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 lib/galaxy/util/backports/importlib/__init__.py --- a/lib/galaxy/util/backports/importlib/__init__.py +++ b/lib/galaxy/util/backports/importlib/__init__.py @@ -39,4 +39,4 @@ ## Note: this was copied from ## http://svn.python.org/projects/python/trunk/Lib/importlib/__init__.py - ## on 24 September 2012 \ No newline at end of file + ## on 24 September 2012 This diff is so big that we needed to truncate the remainder. https://bitbucket.org/galaxy/galaxy-central/commits/983a1cc80be6/ Changeset: 983a1cc80be6 User: dannon Date: 2014-09-08 23:03:22 Summary: Web helpers (${h.*}) in mako used to_json_string, now dumps. Affected #: 45 files diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/admin/external_service/reload_external_service_types.mako --- a/templates/admin/external_service/reload_external_service_types.mako +++ b/templates/admin/external_service/reload_external_service_types.mako @@ -19,4 +19,4 @@ <input type="submit" name="reload_external_service_type_button" value="Reload"/></div></div> -</form> \ No newline at end of file +</form> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/admin/requests/grid.mako --- a/templates/admin/requests/grid.mako +++ b/templates/admin/requests/grid.mako @@ -1,1 +1,1 @@ -<%inherit file="/grid_base.mako"/> \ No newline at end of file +<%inherit file="/grid_base.mako"/> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/admin/requests/view_sample_dataset.mako --- a/templates/admin/requests/view_sample_dataset.mako +++ b/templates/admin/requests/view_sample_dataset.mako @@ -76,4 +76,4 @@ <div style="clear: both"></div> %endif </div> -</div> \ No newline at end of file +</div> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/galaxy_client_app.mako --- a/templates/galaxy_client_app.mako +++ b/templates/galaxy_client_app.mako @@ -15,7 +15,7 @@ //TODO: global... %for key in kwargs: ( window.bootstrapped = window.bootstrapped || {} )[ '${key}' ] = ( - ${ h.to_json_string( kwargs[ key ], indent=( 2 if trans.debug else 0 ) )} ); + ${ h.dumps( kwargs[ key ], indent=( 2 if trans.debug else 0 ) )} ); %endfor define( 'bootstrapped-data', function(){ return window.bootstrapped; @@ -61,7 +61,7 @@ <%def name="get_config_json()"> ## Conv. fn to write as JSON -${ h.to_json_string( get_config_dict() )} +${ h.dumps( get_config_dict() )} </%def> @@ -106,5 +106,5 @@ <%def name="get_user_json()"> ## Conv. fn to write as JSON -${ h.to_json_string( get_user_dict() )} +${ h.dumps( get_user_dict() )} </%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/grid_base.mako --- a/templates/grid_base.mako +++ b/templates/grid_base.mako @@ -73,7 +73,7 @@ // load grid viewer require(['mvc/grid/grid-view'], function(GridView) { $(function() { - gridView = new GridView( ${ h.to_json_string( self.get_grid_config( embedded=embedded, insert=insert ) ) } ); + gridView = new GridView( ${ h.dumps( self.get_grid_config( embedded=embedded, insert=insert ) ) } ); }); }); </script> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/grid_base_async.mako --- a/templates/grid_base_async.mako +++ b/templates/grid_base_async.mako @@ -1,4 +1,4 @@ <%namespace name="grid_base" file="./grid_base.mako" import="*" /> ${init()} -${h.to_json_string( grid_base.get_grid_config() )} +${h.dumps( grid_base.get_grid_config() )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/rss.mako --- a/templates/rss.mako +++ b/templates/rss.mako @@ -20,4 +20,4 @@ </item> %endfor </channel> -</rss> \ No newline at end of file +</rss> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/tagging_common.mako --- a/templates/tagging_common.mako +++ b/templates/tagging_common.mako @@ -198,7 +198,7 @@ %> var options = { - tags : ${h.to_json_string(tag_names_and_values)}, + tags : ${h.dumps(tag_names_and_values)}, editable : ${iff( editable, 'true', 'false' )}, get_toggle_link_text_fn: ${get_toggle_link_text_fn}, tag_click_fn: ${tag_click_fn}, diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/user/dbkeys.mako --- a/templates/user/dbkeys.mako +++ b/templates/user/dbkeys.mako @@ -246,4 +246,4 @@ (Len Entry option).</p></div></div> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/user/new_address.mako --- a/templates/user/new_address.mako +++ b/templates/user/new_address.mako @@ -94,4 +94,4 @@ </div></form></div> -</div> \ No newline at end of file +</div> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/demo_sequencer/empty.mako --- a/templates/webapps/demo_sequencer/empty.mako +++ b/templates/webapps/demo_sequencer/empty.mako @@ -1,4 +1,4 @@ <html><body></body> -</html> \ No newline at end of file +</html> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/dataset/display.mako --- a/templates/webapps/galaxy/dataset/display.mako +++ b/templates/webapps/galaxy/dataset/display.mako @@ -26,7 +26,7 @@ data.createTabularDatasetChunkedView({ // TODO: encode id. dataset_config: - _.extend( ${h.to_json_string( item.to_dict() )}, + _.extend( ${h.dumps( item.to_dict() )}, { chunk_url: "${h.url_for( controller='/dataset', action='display', dataset_id=trans.security.encode_id( item.id ))}", diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/dataset/grid.mako --- a/templates/webapps/galaxy/dataset/grid.mako +++ b/templates/webapps/galaxy/dataset/grid.mako @@ -1,1 +1,1 @@ -<%inherit file="/grid_base.mako"/> \ No newline at end of file +<%inherit file="/grid_base.mako"/> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/dataset/item_content.mako --- a/templates/webapps/galaxy/dataset/item_content.mako +++ b/templates/webapps/galaxy/dataset/item_content.mako @@ -1,3 +1,3 @@ <%namespace file="/dataset/display.mako" import="*" /> -${render_item( item, item_data )} \ No newline at end of file +${render_item( item, item_data )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/dataset/tabular_chunked.mako --- a/templates/webapps/galaxy/dataset/tabular_chunked.mako +++ b/templates/webapps/galaxy/dataset/tabular_chunked.mako @@ -17,7 +17,7 @@ require(['mvc/data'], function(data) { data.createTabularDatasetChunkedView({ - dataset_config: _.extend( ${h.to_json_string( trans.security.encode_dict_ids( dataset.to_dict() ) )}, + dataset_config: _.extend( ${h.dumps( trans.security.encode_dict_ids( dataset.to_dict() ) )}, { first_data_chunk: ${chunk} } diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/galaxy.masthead.mako --- a/templates/webapps/galaxy/galaxy.masthead.mako +++ b/templates/webapps/galaxy/galaxy.masthead.mako @@ -87,7 +87,7 @@ ], function( mod_masthead, mod_menu, mod_modal, mod_frame, GalaxyUpload, user, quotameter ){ if( !Galaxy.currUser ){ // this doesn't need to wait for the page being readied - Galaxy.currUser = new user.User(${ h.to_json_string( get_user_json(), indent=2 ) }); + Galaxy.currUser = new user.User(${ h.dumps( get_user_json(), indent=2 ) }); } $(function() { @@ -97,7 +97,7 @@ } // get configuration - var masthead_config = ${ h.to_json_string( masthead_config ) }; + var masthead_config = ${ h.dumps( masthead_config ) }; // load global galaxy objects Galaxy.masthead = new mod_masthead.GalaxyMasthead(masthead_config); diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/galaxy.panels.mako --- a/templates/webapps/galaxy/galaxy.panels.mako +++ b/templates/webapps/galaxy/galaxy.panels.mako @@ -100,7 +100,7 @@ "libs/backbone/backbone": { exports: "Backbone" }, } }); - var galaxy_config = ${ h.to_json_string( self.galaxy_config ) }; + var galaxy_config = ${ h.dumps( self.galaxy_config ) }; </script></%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/citations.mako --- a/templates/webapps/galaxy/history/citations.mako +++ b/templates/webapps/galaxy/history/citations.mako @@ -32,4 +32,4 @@ </script></%def><div id="citations"> -</div> \ No newline at end of file +</div> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/display.mako --- a/templates/webapps/galaxy/history/display.mako +++ b/templates/webapps/galaxy/history/display.mako @@ -46,8 +46,8 @@ </div><script type="text/javascript"> var debugging = JSON.parse( sessionStorage.getItem( 'debugging' ) ) || false, - historyJSON = ${h.to_json_string( history_dict )}, - hdaJSON = ${h.to_json_string( hda_dicts )}; + historyJSON = ${h.dumps( history_dict )}, + hdaJSON = ${h.dumps( hda_dicts )}; //window.historyJSON = historyJSON; //window.hdaJSON = hdaJSON; diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/display_structured.mako --- a/templates/webapps/galaxy/history/display_structured.mako +++ b/templates/webapps/galaxy/history/display_structured.mako @@ -144,7 +144,7 @@ <script type="text/javascript"> define( 'display-structured', function(){ require([ 'mvc/history/hda-li-edit', 'mvc/history/hda-model' ], function( hdaEdit, hdaModel ){ - var hdaJSON = ${ h.to_json_string( hda_dicts, indent=( 2 if trans.debug else 0 ) ) }; + var hdaJSON = ${ h.dumps( hda_dicts, indent=( 2 if trans.debug else 0 ) ) }; window.hdas = hdaJSON.map( function( hda ){ return new hdaEdit.HDAListItemEdit({ diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/embed.mako --- a/templates/webapps/galaxy/history/embed.mako +++ b/templates/webapps/galaxy/history/embed.mako @@ -67,8 +67,8 @@ panel = new panelMod.AnnotatedHistoryPanel({ el : $embeddedHistory.find( ".history-panel" ), model : new historyModel.History( - ${h.to_json_string( history_dict )}, - ${h.to_json_string( hda_dicts )}, + ${h.dumps( history_dict )}, + ${h.dumps( hda_dicts )}, { logger: ( debugging )?( console ):( null ) } ) }).render(); diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/grid.mako --- a/templates/webapps/galaxy/history/grid.mako +++ b/templates/webapps/galaxy/history/grid.mako @@ -1,1 +1,1 @@ -<%inherit file="../grid_base.mako"/> \ No newline at end of file +<%inherit file="../grid_base.mako"/> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/item_content.mako --- a/templates/webapps/galaxy/history/item_content.mako +++ b/templates/webapps/galaxy/history/item_content.mako @@ -1,3 +1,3 @@ <%namespace file="/history/display.mako" import="*" /> -${render_item( item, item_data )} \ No newline at end of file +${render_item( item, item_data )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/history/view.mako --- a/templates/webapps/galaxy/history/view.mako +++ b/templates/webapps/galaxy/history/view.mako @@ -83,8 +83,8 @@ show_hidden = context.get( 'show_hidden', None ) user_is_owner_json = 'true' if user_is_owner else 'false' - show_deleted_json = h.to_json_string( show_deleted ) - show_hidden_json = h.to_json_string( show_hidden ) + show_deleted_json = h.dumps( show_deleted ) + show_hidden_json = h.dumps( show_hidden ) %><div id="header" class="clear"> @@ -164,8 +164,8 @@ } var userIsOwner = ${'true' if user_is_owner else 'false'}, - historyJSON = ${h.to_json_string( history )}, - hdaJSON = ${h.to_json_string( hdas )}; + historyJSON = ${h.dumps( history )}, + hdaJSON = ${h.dumps( hdas )}; panelToUse = ( userIsOwner )? //TODO: change class names ({ location: 'mvc/history/history-panel-edit', className: 'HistoryPanel' }): diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/library/index.mako --- a/templates/webapps/galaxy/library/index.mako +++ b/templates/webapps/galaxy/library/index.mako @@ -12,4 +12,4 @@ <iframe name="galaxy_main" id="galaxy_main" frameborder="0" style="position: absolute; width: 100%; height: 100%;" src="${h.url_for( controller="library", action="browse_libraries", default_action=default_action )}"></iframe> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/page/create.mako --- a/templates/webapps/galaxy/page/create.mako +++ b/templates/webapps/galaxy/page/create.mako @@ -11,4 +11,4 @@ }); }) </script> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/page/select_items_grid.mako --- a/templates/webapps/galaxy/page/select_items_grid.mako +++ b/templates/webapps/galaxy/page/select_items_grid.mako @@ -1,3 +1,3 @@ ## Template generates a grid that enables user to select items. <%namespace file="../grid_base.mako" import="*" /> -${load(True)} \ No newline at end of file +${load(True)} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako --- a/templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako +++ b/templates/webapps/galaxy/requests/common/sample_dataset_transfer_status.mako @@ -2,4 +2,4 @@ ${sample_dataset.status} </%def> -${render_sample_dataset_transfer_status( sample_dataset )} \ No newline at end of file +${render_sample_dataset_transfer_status( sample_dataset )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/requests/grid.mako --- a/templates/webapps/galaxy/requests/grid.mako +++ b/templates/webapps/galaxy/requests/grid.mako @@ -1,1 +1,1 @@ -<%inherit file="/grid_base.mako"/> \ No newline at end of file +<%inherit file="/grid_base.mako"/> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/root/redirect.mako --- a/templates/webapps/galaxy/root/redirect.mako +++ b/templates/webapps/galaxy/root/redirect.mako @@ -2,4 +2,4 @@ <script type="text/javascript"> top.location.href = '${redirect_url}'; -</script> \ No newline at end of file +</script> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/root/tool_menu.mako --- a/templates/webapps/galaxy/root/tool_menu.mako +++ b/templates/webapps/galaxy/root/tool_menu.mako @@ -25,12 +25,12 @@ hidden: false }), tools = new tools_mod.ToolCollection( - ${ h.to_json_string( trans.app.toolbox.to_dict( trans, in_panel=False ) ) } + ${ h.dumps( trans.app.toolbox.to_dict( trans, in_panel=False ) ) } ), tool_panel = new tools_mod.ToolPanel({ tool_search: tool_search, tools: tools, - layout: ${h.to_json_string( trans.app.toolbox.to_dict( trans ) )} + layout: ${h.dumps( trans.app.toolbox.to_dict( trans ) )} }), tool_panel_view = new tools_mod.ToolPanelView({ model: tool_panel }); diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/tool_form.api.mako --- a/templates/webapps/galaxy/tool_form.api.mako +++ b/templates/webapps/galaxy/tool_form.api.mako @@ -36,7 +36,7 @@ <script> require(['mvc/tools/tools-form'], function(ToolsForm){ $(function(){ - var form = new ToolsForm.View(${ h.to_json_string(self.form_config) }); + var form = new ToolsForm.View(${ h.dumps(self.form_config) }); }); }); </script> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/tracks/add_tracks.mako --- a/templates/webapps/galaxy/tracks/add_tracks.mako +++ b/templates/webapps/galaxy/tracks/add_tracks.mako @@ -1,3 +1,3 @@ ## Template generates a grid that enables user to add tracks <%namespace file="../grid_base.mako" import="*" /> -${load(True)} \ No newline at end of file +${load(True)} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/tracks/history_datasets_select_grid.mako --- a/templates/webapps/galaxy/tracks/history_datasets_select_grid.mako +++ b/templates/webapps/galaxy/tracks/history_datasets_select_grid.mako @@ -2,4 +2,4 @@ <%def name="title()"><h2>History '${grid.get_current_item( trans, **kwargs ).name}'</h2> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/tracks/library_datasets_select_grid.mako --- a/templates/webapps/galaxy/tracks/library_datasets_select_grid.mako +++ b/templates/webapps/galaxy/tracks/library_datasets_select_grid.mako @@ -10,4 +10,4 @@ ${render_content(simple=True)} <script type="text/javascript"> make_popup_menus(); -</script> \ No newline at end of file +</script> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/visualization/create.mako --- a/templates/webapps/galaxy/visualization/create.mako +++ b/templates/webapps/galaxy/visualization/create.mako @@ -11,4 +11,4 @@ }); }) </script> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/visualization/display.mako --- a/templates/webapps/galaxy/visualization/display.mako +++ b/templates/webapps/galaxy/visualization/display.mako @@ -89,9 +89,9 @@ vis_id: "${config.get('vis_id')}", dbkey: "${config.get('dbkey')}" }, - ${ h.to_json_string( config.get( 'viewport', dict() ) ) }, - ${ h.to_json_string( config['tracks'] ) }, - ${ h.to_json_string( config.get('bookmarks') ) } + ${ h.dumps( config.get( 'viewport', dict() ) ) }, + ${ h.dumps( config['tracks'] ) }, + ${ h.dumps( config.get('bookmarks') ) } ); // Set up keyboard navigation. @@ -105,4 +105,4 @@ }); </script> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/visualization/item_content.mako --- a/templates/webapps/galaxy/visualization/item_content.mako +++ b/templates/webapps/galaxy/visualization/item_content.mako @@ -1,3 +1,3 @@ <%namespace file="/visualization/display.mako" import="*" /> -${render_item( item, item_data )} \ No newline at end of file +${render_item( item, item_data )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/visualization/phyloviz.mako --- a/templates/webapps/galaxy/visualization/phyloviz.mako +++ b/templates/webapps/galaxy/visualization/phyloviz.mako @@ -168,8 +168,8 @@ }; $(function firstVizLoad(){ // calls when viz is loaded for the first time - var config = ${ h.to_json_string( config )}; - var data = ${h.to_json_string(data['data'])}; + var config = ${ h.dumps( config )}; + var data = ${h.dumps(data['data'])}; initPhyloViz(data, config); }); }); diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/visualization/sweepster.mako --- a/templates/webapps/galaxy/visualization/sweepster.mako +++ b/templates/webapps/galaxy/visualization/sweepster.mako @@ -125,7 +125,7 @@ $(function() { // -- Viz set up. -- var viz = new sweepster.SweepsterVisualization( - ${ h.to_json_string( config )} + ${ h.dumps( config )} ); var viz_view = new sweepster.SweepsterVisualizationView({ model: viz }); viz_view.render(); @@ -145,4 +145,4 @@ <div class="unified-panel-header-inner"></div></div> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/workflow/configure_menu.mako --- a/templates/webapps/galaxy/workflow/configure_menu.mako +++ b/templates/webapps/galaxy/workflow/configure_menu.mako @@ -95,4 +95,4 @@ </form></div></div> -</%def> \ No newline at end of file +</%def> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/workflow/item_content.mako --- a/templates/webapps/galaxy/workflow/item_content.mako +++ b/templates/webapps/galaxy/workflow/item_content.mako @@ -1,3 +1,3 @@ <%namespace file="/workflow/display.mako" import="*" /> -${render_item( item, item_data )} \ No newline at end of file +${render_item( item, item_data )} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/galaxy/workflow/myexp_export_content.mako --- a/templates/webapps/galaxy/workflow/myexp_export_content.mako +++ b/templates/webapps/galaxy/workflow/myexp_export_content.mako @@ -45,4 +45,4 @@ </connection> %endfor %endfor -</connections> \ No newline at end of file +</connections> diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/tool_shed/category/valid_grid.mako --- a/templates/webapps/tool_shed/category/valid_grid.mako +++ b/templates/webapps/tool_shed/category/valid_grid.mako @@ -10,4 +10,4 @@ %></%def> -${grid_base.load(False, capture(self.insert))} \ No newline at end of file +${grid_base.load(False, capture(self.insert))} diff -r ce088c6f1fd47ba4628cf6cbf0c84dd7ac010086 -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 templates/webapps/tool_shed/common/grid_common.mako --- a/templates/webapps/tool_shed/common/grid_common.mako +++ b/templates/webapps/tool_shed/common/grid_common.mako @@ -24,7 +24,7 @@ %if value != "All": <% if isinstance( temp_column, TextColumn ): - value = h.to_json_string( value ) + value = h.dumps( value ) %><input type="hidden" id="${temp_column.key}" name="f-${temp_column.key}" value='${value}'/> %endif @@ -51,7 +51,7 @@ <% new_filter = list( column_filter ) del new_filter[ i ] - new_column_filter = GridColumnFilter( "", { column.key : h.to_json_string( new_filter ) } ) + new_column_filter = GridColumnFilter( "", { column.key : h.dumps( new_filter ) } ) %><a href="${url(new_column_filter.get_url_args())}"><span class="delete-search-icon" /></a></span> https://bitbucket.org/galaxy/galaxy-central/commits/25396cf39fef/ Changeset: 25396cf39fef User: dannon Date: 2014-09-08 23:11:18 Summary: Charts to_json_string/dumps swap. Affected #: 3 files diff -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 config/plugins/visualizations/charts/templates/charts.mako --- a/config/plugins/visualizations/charts/templates/charts.mako +++ b/config/plugins/visualizations/charts/templates/charts.mako @@ -86,8 +86,8 @@ require(['plugin/app'], function(App) { // load options var options = { - id : ${h.to_json_string( visualization_id )} || undefined, - config : ${h.to_json_string( config )} + id : ${h.dumps( visualization_id )} || undefined, + config : ${h.dumps( config )} } // create application diff -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 config/plugins/visualizations/common/templates/config_utils.mako --- a/config/plugins/visualizations/common/templates/config_utils.mako +++ b/config/plugins/visualizations/common/templates/config_utils.mako @@ -36,7 +36,7 @@ 'action' : 'saved', 'type' : visualization_name, 'title' : title, - 'config' : h.to_json_string( config ) + 'config' : h.dumps( config ) } # save to existing visualization if visualization_id: diff -r 983a1cc80be61c3ccdd3c10d06737381aacb9b05 -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 config/plugins/visualizations/scatterplot/templates/scatterplot.mako --- a/config/plugins/visualizations/scatterplot/templates/scatterplot.mako +++ b/config/plugins/visualizations/scatterplot/templates/scatterplot.mako @@ -50,11 +50,11 @@ <script type="text/javascript"> $(function(){ var model = new ScatterplotModel({ - id : ${h.to_json_string( visualization_id )} || undefined, + id : ${h.dumps( visualization_id )} || undefined, title : "${title}", - config : ${h.to_json_string( config, indent=2 )} + config : ${h.dumps( config, indent=2 )} }); - hdaJson = ${h.to_json_string( trans.security.encode_dict_ids( hda.to_dict() ), indent=2 )}, + hdaJson = ${h.dumps( trans.security.encode_dict_ids( hda.to_dict() ), indent=2 )}, display = new ScatterplotDisplay({ el : $( '.scatterplot-display' ).attr( 'id', 'scatterplot-display-' + '${visualization_id}' ), model : model, @@ -78,11 +78,11 @@ <script type="text/javascript"> $(function(){ var model = new ScatterplotModel({ - id : ${h.to_json_string( visualization_id )} || undefined, + id : ${h.dumps( visualization_id )} || undefined, title : "${title or default_title}", - config : ${h.to_json_string( config, indent=2 )} + config : ${h.dumps( config, indent=2 )} }), - hdaJson = ${h.to_json_string( trans.security.encode_dict_ids( hda.to_dict() ), indent=2 )}, + hdaJson = ${h.dumps( trans.security.encode_dict_ids( hda.to_dict() ), indent=2 )}, editor = new ScatterplotConfigEditor({ el : $( '.scatterplot-editor' ).attr( 'id', 'scatterplot-editor-hda-' + hdaJson.id ), model : model, https://bitbucket.org/galaxy/galaxy-central/commits/23f0f1e9477f/ Changeset: 23f0f1e9477f User: dannon Date: 2014-09-09 16:04:28 Summary: Switch to_json_string/from_json_string in galaxy/scripts. Affected #: 11 files diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/cleanup_datasets/populate_uuid.py --- a/scripts/cleanup_datasets/populate_uuid.py +++ b/scripts/cleanup_datasets/populate_uuid.py @@ -42,4 +42,4 @@ if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py --- a/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py +++ b/scripts/cleanup_datasets/remove_renamed_datasets_from_disk.py @@ -28,4 +28,4 @@ print >> out, "# Removed " + str( removed_files ) + " files" if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/cleanup_datasets/rename_purged_datasets.py --- a/scripts/cleanup_datasets/rename_purged_datasets.py +++ b/scripts/cleanup_datasets/rename_purged_datasets.py @@ -29,4 +29,4 @@ print >> out, "# Renamed " + str( renamed_files ) + " files" if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/cleanup_datasets/update_dataset_size.py --- a/scripts/cleanup_datasets/update_dataset_size.py +++ b/scripts/cleanup_datasets/update_dataset_size.py @@ -31,4 +31,4 @@ sys.exit(0) if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/cleanup_datasets/update_metadata.py --- a/scripts/cleanup_datasets/update_metadata.py +++ b/scripts/cleanup_datasets/update_metadata.py @@ -73,4 +73,4 @@ sys.exit(0) if __name__ == "__main__": - main() \ No newline at end of file + main() diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/functional_tests.py --- a/scripts/functional_tests.py +++ b/scripts/functional_tests.py @@ -48,7 +48,7 @@ from galaxy import tools from galaxy.util import bunch from galaxy import util -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps from functional import database_contexts from base.api_util import get_master_api_key @@ -520,7 +520,7 @@ has_test_data, shed_tools_dict = parse_tool_panel_config( shed_tool_config, shed_tools_dict ) # Persist the shed_tools_dict to the galaxy_tool_shed_test_file. shed_tools_file = open( galaxy_tool_shed_test_file, 'w' ) - shed_tools_file.write( to_json_string( shed_tools_dict ) ) + shed_tools_file.write( dumps( shed_tools_dict ) ) shed_tools_file.close() if not os.path.isabs( galaxy_tool_shed_test_file ): galaxy_tool_shed_test_file = os.path.join( os.getcwd(), galaxy_tool_shed_test_file ) diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/galaxy_messaging/client/scanner_interface.py --- a/scripts/galaxy_messaging/client/scanner_interface.py +++ b/scripts/galaxy_messaging/client/scanner_interface.py @@ -73,4 +73,4 @@ return - \ No newline at end of file + diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/galaxy_messaging/server/setup_rabbitmq.py --- a/scripts/galaxy_messaging/server/setup_rabbitmq.py +++ b/scripts/galaxy_messaging/server/setup_rabbitmq.py @@ -35,4 +35,4 @@ sys.exit(1) if __name__ == '__main__': - main( sys.argv[1] ) \ No newline at end of file + main( sys.argv[1] ) diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/galaxy_messaging/server/xml_helper.py --- a/scripts/galaxy_messaging/server/xml_helper.py +++ b/scripts/galaxy_messaging/server/xml_helper.py @@ -25,4 +25,4 @@ for node in nodelist: if node.nodeType == node.TEXT_NODE: rc = rc + node.data - return rc \ No newline at end of file + return rc diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/metagenomics/convert_title.py --- a/scripts/metagenomics/convert_title.py +++ b/scripts/metagenomics/convert_title.py @@ -36,4 +36,4 @@ print "\n".join(seq) print >> sys.stderr, "Unable to find gi number for %d sequences, the title is replaced as giunknown" %(invalid_lines) - \ No newline at end of file + diff -r 25396cf39fef9f77fbb802ed8ad5b8c2affb6cd6 -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 scripts/transfer.py --- a/scripts/transfer.py +++ b/scripts/transfer.py @@ -122,7 +122,7 @@ response = {} valid, request, response = json.validate_jsonrpc_request( request, ( 'get_state', ), () ) if valid: - self.request.send( json.to_json_string( json.jsonrpc_response( request=request, result=self.server.state_result.result ) ) ) + self.request.send( json.dumps( json.jsonrpc_response( request=request, result=self.server.state_result.result ) ) ) else: error_msg = 'Unable to serve request: %s' % response['error']['message'] if 'data' in response['error']: https://bitbucket.org/galaxy/galaxy-central/commits/a3450635b1d6/ Changeset: a3450635b1d6 User: dannon Date: 2014-09-09 16:06:35 Summary: Switch to_json_string/from_json_string in galaxy/tools. Affected #: 3 files diff -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 tools/data_source/data_source.py --- a/tools/data_source/data_source.py +++ b/tools/data_source/data_source.py @@ -3,7 +3,7 @@ # Data source application parameters are temporarily stored in the dataset file. import socket, urllib, sys, os from galaxy import eggs #eggs needs to be imported so that galaxy.util can find docutils egg... -from galaxy.util.json import from_json_string, to_json_string +from galaxy.util.json import loads, dumps from galaxy.util import get_charset_from_http_headers import galaxy.model # need to import model before sniff to resolve a circular import dependency from galaxy.datatypes import sniff @@ -23,7 +23,7 @@ def load_input_parameters( filename, erase_file = True ): datasource_params = {} try: - json_params = from_json_string( open( filename, 'r' ).read() ) + json_params = loads( open( filename, 'r' ).read() ) datasource_params = json_params.get( 'param_dict' ) except: json_params = None @@ -44,7 +44,7 @@ max_file_size = int( sys.argv[2] ) except: max_file_size = 0 - + job_params, params = load_input_parameters( filename ) if job_params is None: #using an older tabular file enhanced_handling = False @@ -57,13 +57,13 @@ else: enhanced_handling = True json_file = open( job_params[ 'job_config' ][ 'TOOL_PROVIDED_JOB_METADATA_FILE' ], 'w' ) #specially named file for output junk to pass onto set metadata - + datatypes_registry = Registry() datatypes_registry.load_datatypes( root_dir = job_params[ 'job_config' ][ 'GALAXY_ROOT_DIR' ], config = job_params[ 'job_config' ][ 'GALAXY_DATATYPES_CONF_FILE' ] ) - + URL = params.get( 'URL', None ) #using exactly URL indicates that only one dataset is being downloaded URL_method = params.get( 'URL_method', None ) - + # The Python support for fetching resources from the web is layered. urllib uses the httplib # library, which in turn uses the socket library. As of Python 2.3 you can specify how long # a socket should wait for a response before timing out. By default the socket module has no @@ -71,14 +71,14 @@ # levels. However, you can set the default timeout ( in seconds ) globally for all sockets by # doing the following. socket.setdefaulttimeout( 600 ) - + for data_dict in job_params[ 'output_data' ]: cur_filename = data_dict.get( 'file_name', filename ) cur_URL = params.get( '%s|%s|URL' % ( GALAXY_PARAM_PREFIX, data_dict[ 'out_data_name' ] ), URL ) if not cur_URL: open( cur_filename, 'w' ).write( "" ) stop_err( 'The remote data source application has not sent back a URL parameter in the request.' ) - + # The following calls to urllib.urlopen() will use the above default timeout try: if not URL_method or URL_method == 'get': @@ -96,7 +96,7 @@ cur_filename, is_multi_byte = sniff.stream_to_open_named_file( page, os.open( cur_filename, os.O_WRONLY | os.O_CREAT ), cur_filename, source_encoding=get_charset_from_http_headers( page.headers ) ) except Exception, e: stop_err( 'Unable to fetch %s:\n%s' % ( cur_URL, e ) ) - + #here import checks that upload tool performs if enhanced_handling: try: @@ -106,7 +106,7 @@ info = dict( type = 'dataset', dataset_id = data_dict[ 'dataset_id' ], ext = ext) - - json_file.write( "%s\n" % to_json_string( info ) ) - + + json_file.write( "%s\n" % dumps( info ) ) + if __name__ == "__main__": __main__() diff -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 tools/data_source/upload.py --- a/tools/data_source/upload.py +++ b/tools/data_source/upload.py @@ -36,7 +36,7 @@ sys.stderr.write( msg ) sys.exit( ret ) def file_err( msg, dataset, json_file ): - json_file.write( to_json_string( dict( type = 'dataset', + json_file.write( dumps( dict( type = 'dataset', ext = 'data', dataset_id = dataset.dataset_id, stderr = msg ) ) + "\n" ) @@ -319,7 +319,7 @@ line_count = line_count ) if dataset.get('uuid', None) is not None: info['uuid'] = dataset.get('uuid') - json_file.write( to_json_string( info ) + "\n" ) + json_file.write( dumps( info ) + "\n" ) if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming( output_path ): # Groom the dataset content if necessary @@ -358,7 +358,7 @@ info = dict( type = 'dataset', dataset_id = dataset.dataset_id, stdout = 'uploaded %s file' % dataset.file_type ) - json_file.write( to_json_string( info ) + "\n" ) + json_file.write( dumps( info ) + "\n" ) def output_adjacent_tmpdir( output_path ): @@ -382,7 +382,7 @@ registry.load_datatypes( root_dir=sys.argv[1], config=sys.argv[2] ) for line in open( sys.argv[3], 'r' ): - dataset = from_json_string( line ) + dataset = loads( line ) dataset = util.bunch.Bunch( **safe_dict( dataset ) ) try: output_path = output_paths[int( dataset.dataset_id )][0] diff -r 23f0f1e9477f6c14abbe9f08a4b9d12f5c122631 -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 tools/filters/gff/gff_filter_by_attribute.py --- a/tools/filters/gff/gff_filter_by_attribute.py +++ b/tools/filters/gff/gff_filter_by_attribute.py @@ -1,13 +1,13 @@ #!/usr/bin/env python # This tool takes a gff file as input and creates filters on attributes based on certain properties. # The tool will skip over invalid lines within the file, informing the user about the number of lines skipped. -# TODO: much of this code is copied from the Filter1 tool (filtering.py in tools/stats/). The commonalities should be +# TODO: much of this code is copied from the Filter1 tool (filtering.py in tools/stats/). The commonalities should be # abstracted and leveraged in each filtering tool. from __future__ import division import sys from galaxy import eggs -from galaxy.util.json import to_json_string, from_json_string +from galaxy.util.json import dumps, loads # Older py compatibility try: @@ -44,7 +44,7 @@ except: if operand in secured: stop_err( "Illegal value '%s' in %s '%s'" % ( operand, description, text ) ) - + # # Process inputs. # @@ -52,13 +52,13 @@ in_fname = sys.argv[1] out_fname = sys.argv[2] cond_text = sys.argv[3] -attribute_types = from_json_string( sys.argv[4] ) +attribute_types = loads( sys.argv[4] ) # Convert types from str to type objects. for name, a_type in attribute_types.items(): check_for_executable(a_type) attribute_types[ name ] = eval( a_type ) - + # Unescape if input has been escaped mapped_str = { '__lt__': '<', @@ -72,23 +72,23 @@ } for key, value in mapped_str.items(): cond_text = cond_text.replace( key, value ) - + # Attempt to determine if the condition includes executable stuff and, if so, exit. check_for_executable( cond_text, 'condition') -# Prepare the column variable names and wrappers for column data types. Only +# Prepare the column variable names and wrappers for column data types. Only # prepare columns up to largest column in condition. attrs, type_casts = [], [] for name, attr_type in attribute_types.items(): attrs.append( name ) type_cast = "get_value('%(name)s', attribute_types['%(name)s'], attribute_values)" % ( {'name': name} ) type_casts.append( type_cast ) - + attr_str = ', '.join( attrs ) # 'c1, c2, c3, c4' type_cast_str = ', '.join( type_casts ) # 'str(c1), int(c2), int(c3), str(c4)' wrap = "%s = %s" % ( attr_str, type_cast_str ) - -# Stats + +# Stats skipped_lines = 0 first_invalid_line = 0 invalid_line = None @@ -102,7 +102,7 @@ return (a_type)(values_dict[ name ]) else: return None - + # Read and filter input file, skipping invalid lines code = ''' for i, line in enumerate( file( in_fname ) ): https://bitbucket.org/galaxy/galaxy-central/commits/c2a961719f38/ Changeset: c2a961719f38 User: dannon Date: 2014-09-09 16:12:40 Summary: Final to_json_string/from_json_string elimination in tests. Affected #: 20 files diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/base/twilltestcase.py --- a/test/base/twilltestcase.py +++ b/test/base/twilltestcase.py @@ -16,7 +16,7 @@ from base.asserts import verify_assertions from galaxy.util import asbool -from galaxy.util.json import from_json_string +from galaxy.util.json import loads from galaxy.web import security from galaxy.web.framework.helpers import iff from urlparse import urlparse @@ -56,7 +56,7 @@ f = open( self.tool_shed_test_file, 'r' ) text = f.read() f.close() - self.shed_tools_dict = from_json_string( text ) + self.shed_tools_dict = loads( text ) else: self.shed_tools_dict = {} self.keepOutdir = os.environ.get( 'GALAXY_TEST_SAVE', '' ) @@ -1293,12 +1293,12 @@ def get_running_datasets( self ): self.visit_url( '/api/histories' ) - history_id = from_json_string( self.last_page() )[0][ 'id' ] + history_id = loads( self.last_page() )[0][ 'id' ] self.visit_url( '/api/histories/%s/contents' % history_id ) - jsondata = from_json_string( self.last_page() ) + jsondata = loads( self.last_page() ) for history_item in jsondata: self.visit_url( history_item[ 'url' ] ) - item_json = from_json_string( self.last_page() ) + item_json = loads( self.last_page() ) if item_json[ 'state' ] in [ 'queued', 'running', 'paused' ]: return True return False @@ -1394,7 +1394,7 @@ def json_from_url( self, url, params={} ): self.visit_url( url, params ) - return from_json_string( self.last_page() ) + return loads( self.last_page() ) def last_page( self ): return tc.browser.get_html() @@ -2510,4 +2510,4 @@ self.visit_url( page_url ) data = self.last_page() return data - return fetcher \ No newline at end of file + return fetcher diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/casperjs/server_env.py --- a/test/casperjs/server_env.py +++ b/test/casperjs/server_env.py @@ -100,7 +100,7 @@ f = open( self.tool_shed_test_file, 'r' ) text = f.read() f.close() - shed_tools_dict = from_json_string( text ) + shed_tools_dict = loads( text ) except Exception, exc: log.error( 'Error reading tool shed test file "%s": %s', self.tool_shed_test_file, exc, exc_info=True ) diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/functional/test_ucsc_table_browser.py --- a/test/functional/test_ucsc_table_browser.py +++ b/test/functional/test_ucsc_table_browser.py @@ -32,4 +32,4 @@ ) self.run_ucsc_main( track_params, output_params ) self.wait() - self.verify_dataset_correctness( 'GRCm38mm10_chr5_34761740-34912521.fa' ) \ No newline at end of file + self.verify_dataset_correctness( 'GRCm38mm10_chr5_34761740-34912521.fa' ) diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/install_and_test_tool_shed_repositories/__init__.py --- a/test/install_and_test_tool_shed_repositories/__init__.py +++ b/test/install_and_test_tool_shed_repositories/__init__.py @@ -1,1 +1,1 @@ -"""Install and test tool shed repositories.""" \ No newline at end of file +"""Install and test tool shed repositories.""" diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/install_and_test_tool_shed_repositories/functional/__init__.py --- a/test/install_and_test_tool_shed_repositories/functional/__init__.py +++ b/test/install_and_test_tool_shed_repositories/functional/__init__.py @@ -1,1 +1,1 @@ -'''Tests''' \ No newline at end of file +'''Tests''' diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/install_and_test_tool_shed_repositories/repositories_with_tools/__init__.py --- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/__init__.py +++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/__init__.py @@ -1,1 +1,1 @@ -"""Install and test tool shed repositories that contain tools and tool test components.""" \ No newline at end of file +"""Install and test tool shed repositories that contain tools and tool test components.""" diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py --- a/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py +++ b/test/install_and_test_tool_shed_repositories/repositories_with_tools/functional_tests.py @@ -42,7 +42,7 @@ from base.tool_shed_util import parse_tool_panel_config from galaxy.app import UniverseApplication -from galaxy.util.json import from_json_string +from galaxy.util.json import loads from galaxy.util import asbool from galaxy.util import unicodify from galaxy.web import buildapp @@ -312,7 +312,7 @@ # Find the path to the test-data directory within the installed repository. has_test_data, shed_tools_dict = \ parse_tool_panel_config( galaxy_shed_tool_conf_file, - from_json_string( file( galaxy_shed_tools_dict_file, 'r' ).read() ) ) + loads( file( galaxy_shed_tools_dict_file, 'r' ).read() ) ) # If the repository has a test-data directory we write the generated shed_tools_dict to a temporary # file so the functional test framework can find it. install_and_test_base_util.populate_galaxy_shed_tools_dict_file( galaxy_shed_tools_dict_file, diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/install_and_test_tool_shed_repositories/tool_dependency_definitions/__init__.py --- a/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/__init__.py +++ b/test/install_and_test_tool_shed_repositories/tool_dependency_definitions/__init__.py @@ -1,1 +1,1 @@ -"""Install and test tool shed repositories of type tool_dependency_definition.""" \ No newline at end of file +"""Install and test tool shed repositories of type tool_dependency_definition.""" diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/__init__.py --- a/test/tool_shed/__init__.py +++ b/test/tool_shed/__init__.py @@ -1,1 +1,1 @@ -"""Tool shed functional Tests""" \ No newline at end of file +"""Tool shed functional Tests""" diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/base/twilltestcase.py --- a/test/tool_shed/base/twilltestcase.py +++ b/test/tool_shed/base/twilltestcase.py @@ -15,7 +15,7 @@ from base.tool_shed_util import repository_installation_timeout from base.twilltestcase import TwillTestCase -from galaxy.util.json import from_json_string +from galaxy.util.json import loads from galaxy.web import security from tool_shed.util.encoding_util import tool_shed_encode from tool_shed.util import shed_util_common as suc @@ -637,7 +637,7 @@ url = '/api/datatypes?upload_only=false' self.visit_galaxy_url( url ) html = self.last_page() - datatypes = from_json_string( html ) + datatypes = loads( html ) return len( datatypes ) def get_env_sh_path( self, tool_dependency_name, tool_dependency_version, repository ): @@ -714,7 +714,7 @@ # Get the current folder's contents. url = '/repository/open_folder?folder_path=%s' % request_param_path self.visit_url( url ) - file_list = from_json_string( self.last_page() ) + file_list = loads( self.last_page() ) returned_file_list = [] if current_path is not None: returned_file_list.append( current_path ) @@ -753,7 +753,7 @@ url = '/api/datatypes/sniffers' self.visit_galaxy_url( url ) html = self.last_page() - sniffers = from_json_string( html ) + sniffers = loads( html ) return len( sniffers ) def get_tools_from_repository_metadata( self, repository, include_invalid=False ): @@ -772,7 +772,7 @@ tool_guid = urllib.quote_plus( tool_metadata[ 0 ][ 'guid' ], safe='' ) api_url = '/%s' % '/'.join( [ 'api', 'tools', tool_guid ] ) self.visit_galaxy_url( api_url ) - tool_dict = from_json_string( self.last_page() ) + tool_dict = loads( self.last_page() ) tool_panel_section = tool_dict[ 'panel_section_name' ] return tool_panel_section diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0170_complex_prior_installation_required.py --- a/test/tool_shed/functional/test_0170_complex_prior_installation_required.py +++ b/test/tool_shed/functional/test_0170_complex_prior_installation_required.py @@ -131,4 +131,4 @@ self.check_repository_dependency( matplotlib_repository, depends_on_repository=numpy_repository ) self.display_manage_repository_page( matplotlib_repository, strings_displayed=[ 'numpy', '1.7', 'package', changeset_revision ] ) - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0310_hg_push_from_api.py --- a/test/tool_shed/functional/test_0310_hg_push_from_api.py +++ b/test/tool_shed/functional/test_0310_hg_push_from_api.py @@ -166,4 +166,4 @@ 'Added another line to filtering.py.' ] strings_not_displayed = [ 'Added a line to filtering.py' ] self.check_repository_changelog( repository, strings_displayed=strings_displayed, strings_not_displayed=[] ) - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0410_repository_component_review_access_control.py --- a/test/tool_shed/functional/test_0410_repository_component_review_access_control.py +++ b/test/tool_shed/functional/test_0410_repository_component_review_access_control.py @@ -192,4 +192,4 @@ changeset_revision = self.get_repository_tip( repository ) review = self.test_db_util.get_repository_review_by_user_id_changeset_revision( user.id, repository.id, changeset_revision ) self.browse_component_review( review, strings_displayed=strings_displayed ) - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0420_citable_urls_for_repositories.py --- a/test/tool_shed/functional/test_0420_citable_urls_for_repositories.py +++ b/test/tool_shed/functional/test_0420_citable_urls_for_repositories.py @@ -227,4 +227,4 @@ encoded_user_id=None, encoded_repository_id=None, strings_displayed=strings_displayed ) - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0460_upload_to_repository.py --- a/test/tool_shed/functional/test_0460_upload_to_repository.py +++ b/test/tool_shed/functional/test_0460_upload_to_repository.py @@ -482,4 +482,4 @@ self.display_repository_file_contents( repository, filename='repository_dependencies.xml', filepath='subfolder', - strings_displayed=[ changeset_revision ] ) \ No newline at end of file + strings_displayed=[ changeset_revision ] ) diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_0480_tool_dependency_xml_verification.py --- a/test/tool_shed/functional/test_0480_tool_dependency_xml_verification.py +++ b/test/tool_shed/functional/test_0480_tool_dependency_xml_verification.py @@ -72,4 +72,4 @@ repository = self.test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name ) assert self.repository_is_new( repository ), 'Uploading an incorrectly defined tool_dependencies.xml resulted in a changeset being generated.' - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py --- a/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py +++ b/test/tool_shed/functional/test_1087_install_updated_repository_dependencies.py @@ -116,4 +116,4 @@ self.visit_galaxy_url( url, params ) self.check_for_strings( strings_displayed ) strings_not_displayed = [ 'column_maker_1087' ] - self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed ) \ No newline at end of file + self.display_galaxy_browse_repositories_page( strings_not_displayed=strings_not_displayed ) diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_1170_complex_prior_installation_required.py --- a/test/tool_shed/functional/test_1170_complex_prior_installation_required.py +++ b/test/tool_shed/functional/test_1170_complex_prior_installation_required.py @@ -175,4 +175,4 @@ numpy_repository = self.test_db_util.get_installed_repository_by_name_owner( numpy_repository_name, common.test_user_1_name ) assert matplotlib_repository.update_time > numpy_repository.update_time, \ 'Error: package_numpy_1_7_0170 shows a later update time than package_matplotlib_1_2_0170' - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional/test_1300_reset_all_metadata.py --- a/test/tool_shed/functional/test_1300_reset_all_metadata.py +++ b/test/tool_shed/functional/test_1300_reset_all_metadata.py @@ -463,4 +463,4 @@ del old_metadata[ 'tool_panel_section' ] assert repository.metadata == old_metadata, 'Metadata for %s repository %s changed after reset. \nOld: %s\nNew: %s' % \ ( repository.status, repository.name, old_metadata, repository.metadata ) - \ No newline at end of file + diff -r a3450635b1d6a4fd9a796adf72f63c4f12623ec5 -r c2a961719f380f5acf89fdc68964e40c225879bc test/tool_shed/functional_tests.py --- a/test/tool_shed/functional_tests.py +++ b/test/tool_shed/functional_tests.py @@ -57,7 +57,7 @@ from galaxy.app import UniverseApplication as GalaxyUniverseApplication from galaxy.web import buildapp as galaxybuildapp from galaxy.util import asbool -from galaxy.util.json import to_json_string +from galaxy.util.json import dumps import nose.core import nose.config @@ -182,7 +182,7 @@ galaxy_test_proxy_port = None if 'TOOL_SHED_TEST_DBPATH' in os.environ: shed_db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ] - else: + else: tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) shed_db_path = os.path.join( tempdir, 'database' ) shed_tool_data_table_conf_file = os.environ.get( 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF', os.path.join( tool_shed_test_tmp_dir, 'shed_tool_data_table_conf.xml' ) ) @@ -199,7 +199,7 @@ os.environ[ 'GALAXY_TEST_TOOL_DATA_PATH' ] = tool_data_path if 'GALAXY_TEST_DBPATH' in os.environ: galaxy_db_path = os.environ[ 'GALAXY_TEST_DBPATH' ] - else: + else: tempdir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) galaxy_db_path = os.path.join( tempdir, 'database' ) shed_file_path = os.path.join( shed_db_path, 'files' ) @@ -207,9 +207,9 @@ hgweb_config_file_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) new_repos_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) galaxy_tempfiles = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - galaxy_shed_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - galaxy_migrated_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) - galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + galaxy_shed_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + galaxy_migrated_tool_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) + galaxy_tool_dependency_dir = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir ) os.environ[ 'GALAXY_TEST_TOOL_DEPENDENCY_DIR' ] = galaxy_tool_dependency_dir hgweb_config_dir = hgweb_config_file_path os.environ[ 'TEST_HG_WEB_CONFIG_DIR' ] = hgweb_config_dir @@ -276,8 +276,8 @@ # Generate the shed_tool_data_table_conf.xml file. file( shed_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template ) os.environ[ 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' ] = shed_tool_data_table_conf_file - # ---- Build Tool Shed Application -------------------------------------------------- - toolshedapp = None + # ---- Build Tool Shed Application -------------------------------------------------- + toolshedapp = None # if not toolshed_database_connection.startswith( 'sqlite://' ): # kwargs[ 'database_engine_option_max_overflow' ] = '20' if tool_dependency_dir is not None: @@ -335,7 +335,7 @@ else: raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" ) log.info( "Embedded web server started" ) - + # ---- Optionally start up a Galaxy instance ------------------------------------------------------ if 'TOOL_SHED_TEST_OMIT_GALAXY' not in os.environ: # Generate the tool_conf.xml file. @@ -357,7 +357,7 @@ open( galaxy_shed_data_manager_conf_file, 'wb' ).write( shed_data_manager_conf_xml_template ) galaxy_global_conf = get_webapp_global_conf() galaxy_global_conf[ '__file__' ] = 'universe_wsgi.ini.sample' - + kwargs = dict( allow_user_creation = True, allow_user_deletion = True, admin_users = 'test@bx.psu.edu', @@ -389,15 +389,15 @@ tool_data_table_config_path = galaxy_tool_data_table_conf_file, update_integrated_tool_panel = False, use_heartbeat = False ) - - # ---- Build Galaxy Application -------------------------------------------------- + + # ---- Build Galaxy Application -------------------------------------------------- if not galaxy_database_connection.startswith( 'sqlite://' ) and not install_galaxy_database_connection.startswith( 'sqlite://' ): kwargs[ 'database_engine_option_pool_size' ] = '10' kwargs[ 'database_engine_option_max_overflow' ] = '20' galaxyapp = GalaxyUniverseApplication( **kwargs ) log.info( "Embedded Galaxy application started" ) - + # ---- Run galaxy webserver ------------------------------------------------------ galaxy_server = None galaxy_global_conf[ 'database_file' ] = galaxy_database_connection @@ -464,11 +464,11 @@ test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() ) test_config.configure( sys.argv ) # Run the tests. - result = run_tests( test_config ) + result = run_tests( test_config ) success = result.wasSuccessful() except: log.exception( "Failure running tests" ) - + log.info( "Shutting down" ) # ---- Tear down ----------------------------------------------------------- if tool_shed_server: https://bitbucket.org/galaxy/galaxy-central/commits/6f06ff96d5ca/ Changeset: 6f06ff96d5ca User: dannon Date: 2014-09-09 16:17:56 Summary: Add deprecation notice for to_json_string/from_json_string. I left these references just in case I missed something that uses them. Affected #: 1 file diff -r c2a961719f380f5acf89fdc68964e40c225879bc -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 lib/galaxy/util/json.py --- a/lib/galaxy/util/json.py +++ b/lib/galaxy/util/json.py @@ -7,8 +7,8 @@ import random import string -to_json_string = json.dumps -from_json_string = json.loads +to_json_string = json.dumps # deprecated +from_json_string = json.loads # deprecated dumps = json.dumps loads = json.loads @@ -25,9 +25,9 @@ else: return val + # Methods for handling JSON-RPC - def validate_jsonrpc_request( request, regular_methods, notification_methods ): try: request = loads( request ) https://bitbucket.org/galaxy/galaxy-central/commits/74b2495de918/ Changeset: 74b2495de918 User: dannon Date: 2014-09-09 22:31:31 Summary: Merge json changes (and some cleanup) back with central. Affected #: 62 files diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f run_functional_tests.sh --- /dev/null +++ b/run_functional_tests.sh @@ -0,0 +1,1 @@ +run_tests.sh \ No newline at end of file diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/base-mvc.js --- a/static/scripts/mvc/base-mvc.js +++ b/static/scripts/mvc/base-mvc.js @@ -42,6 +42,7 @@ if( this.logger ){ var log = this.logger.log; if( typeof this.logger.log === 'object' ){ +//TODO:! there has to be a way to get the lineno/file into this log = Function.prototype.bind.call( this.logger.log, this.logger ); } return log.apply( this.logger, arguments ); @@ -149,6 +150,23 @@ //============================================================================== +/** Function that allows mixing of hashs into bbone MVC while showing the mixins first + * (before the more local class overrides/hash). + * Basically, a simple reversal of param order on _.defaults() - to show mixins in top of definition. + * @example: + * var NewModel = Something.extend( mixin( MyMixinA, MyMixinB, { ... myVars : ... }) ); + * + * NOTE: this does not combine any hashes (like events, etc.) and you're expected to handle that + */ +function mixin( mixinHash1, /* mixinHash2, etc: ... variadic */ propsHash ){ + var args = Array.prototype.slice.call( arguments, 0 ), + lastArg = args.pop(); + args.unshift( lastArg ); + return _.defaults.apply( _, args ); +} + + +//============================================================================== /** A mixin for models that allow T/F/Matching to their attributes - useful when * searching or filtering collections of models. * @example: @@ -329,205 +347,6 @@ //============================================================================== -/** Function that allows mixing of hashs into bbone MVC while showing the mixins first - * (before the more local class overrides/hash). - * Basically, a simple reversal of param order on _.defaults() - to show mixins in top of definition. - * @example: - * var NewModel = Something.extend( mixin( MyMixinA, MyMixinB, { ... myVars : ... }) ); - * - * NOTE: this does not combine any hashes (like events, etc.) and you're expected to handle that - */ -function mixin( mixinHash1, /* mixinHash2, etc: ... variadic */ propsHash ){ - var args = Array.prototype.slice.call( arguments, 0 ), - lastArg = args.pop(); - args.unshift( lastArg ); - return _.defaults.apply( _, args ); -} - -//============================================================================== -/** Return an underscore template fn from an array of strings. - * @param {String[]} template the template strings to compile into the underscore template fn - * @param {String} jsonNamespace an optional namespace for the json data passed in (defaults to 'model') - * @returns {Function} the (wrapped) underscore template fn - * The function accepts: - * - * The template strings can access: - * the json/model hash using model ("<%- model.myAttr %>) using the jsonNamespace above - * _l: the localizer function - * view (if passed): ostensibly, the view using the template (handy for view instance vars) - * Because they're namespaced, undefined attributes will not throw an error. - * - * @example: - * templateBler : BASE_MVC.wrapTemplate([ - * '<div class="myclass <%- mynamespace.modelClass %>">', - * '<span><% print( _l( mynamespace.message ) ); %>:<%= view.status %></span>' - * '</div>' - * ], 'mynamespace' ) - * - * Meant to be called in a View's definition in order to compile only once. - * - */ -function wrapTemplate( template, jsonNamespace ){ - jsonNamespace = jsonNamespace || 'model'; - var templateFn = _.template( template.join( '' ) ); - return function( json, view ){ - var templateVars = { view : view || {}, _l : _l }; - templateVars[ jsonNamespace ] = json || {}; - return templateFn( templateVars ); - }; -} - -//============================================================================== -/** A view which, when first rendered, shows only summary data/attributes, but - * can be expanded to show further details (and optionally fetch those - * details from the server). - */ -var ExpandableView = Backbone.View.extend( LoggableMixin ).extend({ -//TODO: Although the reasoning behind them is different, this shares a lot with HiddenUntilActivated above: combine them - //PRECONDITION: model must have method hasDetails - //PRECONDITION: subclasses must have templates.el and templates.details - - initialize : function( attributes ){ - /** are the details of this view expanded/shown or not? */ - this.expanded = attributes.expanded || false; - //this.log( '\t expanded:', this.expanded ); - this.fxSpeed = attributes.fxSpeed || this.fxSpeed; - }, - - // ........................................................................ render main - /** jq fx speed */ - fxSpeed : 'fast', - - /** Render this content, set up ui. - * @param {Number or String} speed the speed of the render - */ - render : function( speed ){ - var $newRender = this._buildNewRender(); - this._setUpBehaviors( $newRender ); - this._queueNewRender( $newRender, speed ); - return this; - }, - - /** Build a temp div containing the new children for the view's $el. - * If the view is already expanded, build the details as well. - */ - _buildNewRender : function(){ - // create a new render using a skeleton template, render title buttons, render body, and set up events, etc. - var $newRender = $( this.templates.el( this.model.toJSON(), this ) ); - if( this.expanded ){ - this.$details( $newRender ).replaceWith( this._renderDetails().show() ); - } - return $newRender; - }, - - /** Fade out the old el, swap in the new contents, then fade in. - * @param {Number or String} speed jq speed to use for rendering effects - * @fires rendered when rendered - */ - _queueNewRender : function( $newRender, speed ) { - speed = ( speed === undefined )?( this.fxSpeed ):( speed ); - var view = this; - - $( view ).queue( 'fx', [ - function( next ){ this.$el.fadeOut( speed, next ); }, - function( next ){ - view._swapNewRender( $newRender ); - next(); - }, - function( next ){ this.$el.fadeIn( speed, next ); }, - function( next ){ - this.trigger( 'rendered', view ); - next(); - } - ]); - }, - - /** empty out the current el, move the $newRender's children in */ - _swapNewRender : function( $newRender ){ - return this.$el.empty().attr( 'class', this.className ).append( $newRender.children() ); - }, - - /** set up js behaviors, event handlers for elements within the given container - * @param {jQuery} $container jq object that contains the elements to process (defaults to this.$el) - */ - _setUpBehaviors : function( $where ){ - $where = $where || this.$el; - // set up canned behavior on children (bootstrap, popupmenus, editable_text, etc.) - //make_popup_menus( $where ); - $where.find( '[title]' ).tooltip({ placement : 'bottom' }); - }, - - // ......................................................................... details - /** shortcut to details DOM (as jQ) */ - $details : function( $where ){ - $where = $where || this.$el; - return $where.find( '.details' ); - }, - - /** build the DOM for the details and set up behaviors on it */ - _renderDetails : function(){ - var $newDetails = $( this.templates.details( this.model.toJSON(), this ) ); - this._setUpBehaviors( $newDetails ); - return $newDetails; - }, - - // ......................................................................... expansion/details - /** Show or hide the details - * @param {Boolean} expand if true, expand; if false, collapse - */ - toggleExpanded : function( expand ){ - expand = ( expand === undefined )?( !this.expanded ):( expand ); - if( expand ){ - this.expand(); - } else { - this.collapse(); - } - return this; - }, - - /** Render and show the full, detailed body of this view including extra data and controls. - * note: if the model does not have detailed data, fetch that data before showing the body - * @fires expanded when a body has been expanded - */ - expand : function(){ - var view = this; - return view._fetchModelDetails() - .always(function(){ - var $newDetails = view._renderDetails(); - view.$details().replaceWith( $newDetails ); - // needs to be set after the above or the slide will not show - view.expanded = true; - $newDetails.slideDown( view.fxSpeed, function(){ - view.trigger( 'expanded', view ); - }); - }); - }, - - /** Check for model details and, if none, fetch them. - * @returns {jQuery.promise} the model.fetch.xhr if details are being fetched, an empty promise if not - */ - _fetchModelDetails : function(){ - if( !this.model.hasDetails() ){ - return this.model.fetch(); - } - return jQuery.when(); - }, - - /** Hide the body/details of an HDA. - * @fires collapsed when a body has been collapsed - */ - collapse : function(){ - var view = this; - view.expanded = false; - this.$details().slideUp( view.fxSpeed, function(){ - view.trigger( 'collapsed', view ); - }); - } - -}); - - -//============================================================================== /** Mixin for views that can be dragged and dropped * Allows for the drag behavior to be turned on/off, setting/removing jQuery event * handlers each time. @@ -711,186 +530,48 @@ //============================================================================== -/** A view that is displayed in some larger list/grid/collection. - * Inherits from Expandable, Selectable, Draggable. - * The DOM contains warnings, a title bar, and a series of primary action controls. - * Primary actions are meant to be easily accessible item functions (such as delete) - * that are rendered in the title bar. +/** Return an underscore template fn from an array of strings. + * @param {String[]} template the template strings to compile into the underscore template fn + * @param {String} jsonNamespace an optional namespace for the json data passed in (defaults to 'model') + * @returns {Function} the (wrapped) underscore template fn + * The function accepts: * - * Details are rendered when the user clicks the title bar or presses enter/space when - * the title bar is in focus. + * The template strings can access: + * the json/model hash using model ("<%- model.myAttr %>) using the jsonNamespace above + * _l: the localizer function + * view (if passed): ostensibly, the view using the template (handy for view instance vars) + * Because they're namespaced, undefined attributes will not throw an error. * - * Designed as a base class for history panel contents - but usable elsewhere (I hope). + * @example: + * templateBler : BASE_MVC.wrapTemplate([ + * '<div class="myclass <%- mynamespace.modelClass %>">', + * '<span><% print( _l( mynamespace.message ) ); %>:<%= view.status %></span>' + * '</div>' + * ], 'mynamespace' ) + * + * Meant to be called in a View's definition in order to compile only once. + * */ -var ListItemView = ExpandableView.extend( mixin( SelectableViewMixin, DraggableViewMixin, { - -//TODO: that's a little contradictory - tagName : 'div', - className : 'list-item', - - /** Set up the base class and all mixins */ - initialize : function( attributes ){ - ExpandableView.prototype.initialize.call( this, attributes ); - SelectableViewMixin.initialize.call( this, attributes ); - DraggableViewMixin.initialize.call( this, attributes ); - }, - - // ........................................................................ rendering - /** In this override, call methods to build warnings, titlebar and primary actions */ - _buildNewRender : function(){ - var $newRender = ExpandableView.prototype._buildNewRender.call( this ); - $newRender.find( '.warnings' ).replaceWith( this._renderWarnings() ); - $newRender.find( '.title-bar' ).replaceWith( this._renderTitleBar() ); - $newRender.find( '.primary-actions' ).append( this._renderPrimaryActions() ); - $newRender.find( '.subtitle' ).replaceWith( this._renderSubtitle() ); - return $newRender; - }, - - /** In this override, render the selector controls and set up dragging before the swap */ - _swapNewRender : function( $newRender ){ - ExpandableView.prototype._swapNewRender.call( this, $newRender ); - if( this.selectable ){ this.showSelector( 0 ); } - if( this.draggable ){ this.draggableOn(); } - return this.$el; - }, - - /** Render any warnings the item may need to show (e.g. "I'm deleted") */ - _renderWarnings : function(){ - var view = this, - $warnings = $( '<div class="warnings"></div>' ), - json = view.model.toJSON(); -//TODO:! unordered (map) - _.each( view.templates.warnings, function( templateFn ){ - $warnings.append( $( templateFn( json, view ) ) ); - }); - return $warnings; - }, - - /** Render the title bar (the main/exposed SUMMARY dom element) */ - _renderTitleBar : function(){ - return $( this.templates.titleBar( this.model.toJSON(), this ) ); - }, - - /** Return an array of jQ objects containing common/easily-accessible item controls */ - _renderPrimaryActions : function(){ - // override this - return []; - }, - - /** Render the title bar (the main/exposed SUMMARY dom element) */ - _renderSubtitle : function(){ - return $( this.templates.subtitle( this.model.toJSON(), this ) ); - }, - - // ......................................................................... events - /** event map */ - events : { - // expand the body when the title is clicked or when in focus and space or enter is pressed - 'click .title-bar' : '_clickTitleBar', - 'keydown .title-bar' : '_keyDownTitleBar', - - // dragging - don't work, originalEvent === null - //'dragstart .dataset-title-bar' : 'dragStartHandler', - //'dragend .dataset-title-bar' : 'dragEndHandler' - - 'click .selector' : 'toggleSelect' - }, - - /** expand when the title bar is clicked */ - _clickTitleBar : function( event ){ - event.stopPropagation(); - this.toggleExpanded(); - }, - - /** expand when the title bar is in focus and enter or space is pressed */ - _keyDownTitleBar : function( event ){ - // bail (with propagation) if keydown and not space or enter - var KEYCODE_SPACE = 32, KEYCODE_RETURN = 13; - if( event && ( event.type === 'keydown' ) - &&( event.keyCode === KEYCODE_SPACE || event.keyCode === KEYCODE_RETURN ) ){ - this.toggleExpanded(); - event.stopPropagation(); - return false; - } - return true; - }, - - // ......................................................................... misc - /** String representation */ - toString : function(){ - var modelString = ( this.model )?( this.model + '' ):( '(no model)' ); - return 'ListItemView(' + modelString + ')'; - } -})); - -// ............................................................................ TEMPLATES -/** underscore templates */ -ListItemView.prototype.templates = (function(){ -//TODO: move to require text! plugin - - var elTemplato = wrapTemplate([ - '<div class="list-element">', - // errors, messages, etc. - '<div class="warnings"></div>', - - // multi-select checkbox - '<div class="selector">', - '<span class="fa fa-2x fa-square-o"></span>', - '</div>', - // space for title bar buttons - gen. floated to the right - '<div class="primary-actions"></div>', - '<div class="title-bar"></div>', - - // expandable area for more details - '<div class="details"></div>', - '</div>' - ]); - - var warnings = {}; - - var titleBarTemplate = wrapTemplate([ - // adding a tabindex here allows focusing the title bar and the use of keydown to expand the dataset display - '<div class="title-bar clear" tabindex="0">', -//TODO: prob. belongs in dataset-list-item - '<span class="state-icon"></span>', - '<div class="title">', - '<span class="name"><%- element.name %></span>', - '</div>', - '<div class="subtitle"></div>', - '</div>' - ], 'element' ); - - var subtitleTemplate = wrapTemplate([ - // override this - '<div class="subtitle"></div>' - ]); - - var detailsTemplate = wrapTemplate([ - // override this - '<div class="details"></div>' - ]); - - return { - el : elTemplato, - warnings : warnings, - titleBar : titleBarTemplate, - subtitle : subtitleTemplate, - details : detailsTemplate +function wrapTemplate( template, jsonNamespace ){ + jsonNamespace = jsonNamespace || 'model'; + var templateFn = _.template( template.join( '' ) ); + return function( json, view ){ + var templateVars = { view : view || {}, _l : _l }; + templateVars[ jsonNamespace ] = json || {}; + return templateFn( templateVars ); }; -}()); +} //============================================================================== return { LoggableMixin : LoggableMixin, SessionStorageModel : SessionStorageModel, + mixin : mixin, SearchableModelMixin : SearchableModelMixin, HiddenUntilActivatedViewMixin : HiddenUntilActivatedViewMixin, - mixin : mixin, - wrapTemplate : wrapTemplate, - ExpandableView : ExpandableView, DraggableViewMixin : DraggableViewMixin, SelectableViewMixin : SelectableViewMixin, - ListItemView : ListItemView + wrapTemplate : wrapTemplate }; }); diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/collection/collection-li.js --- a/static/scripts/mvc/collection/collection-li.js +++ b/static/scripts/mvc/collection/collection-li.js @@ -1,11 +1,12 @@ define([ + "mvc/list/list-item", "mvc/dataset/dataset-li", "mvc/base-mvc", "utils/localization" -], function( DATASET_LI, BASE_MVC, _l ){ +], function( LIST_ITEM, DATASET_LI, BASE_MVC, _l ){ /* global Backbone, LoggableMixin */ //============================================================================== -var ListItemView = BASE_MVC.ListItemView; +var ListItemView = LIST_ITEM.ListItemView; /** @class Read only view for DatasetCollection. */ var DCListItemView = ListItemView.extend( @@ -97,8 +98,7 @@ //TODO: this might be expendable - compacted with HDAListItemView /** logger used to record this.log messages, commonly set to console */ - // comment this out to suppress log output - logger : console, + //logger : console, /** add the DCE class to the list item */ className : ListItemView.prototype.className + " dataset-collection-element", diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/collection/collection-model.js --- a/static/scripts/mvc/collection/collection-model.js +++ b/static/scripts/mvc/collection/collection-model.js @@ -270,6 +270,11 @@ return this.save( { deleted: false }, options ); }, + /** Is this collection deleted or purged? */ + isDeletedOrPurged : function(){ + return ( this.get( 'deleted' ) || this.get( 'purged' ) ); + }, + // ........................................................................ searchable /** searchable attributes for collections */ searchAttributes : [ diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/collection/collection-panel.js --- a/static/scripts/mvc/collection/collection-panel.js +++ b/static/scripts/mvc/collection/collection-panel.js @@ -1,9 +1,10 @@ define([ + "mvc/list/list-panel", "mvc/collection/collection-model", "mvc/collection/collection-li", "mvc/base-mvc", "utils/localization" -], function( DC_MODEL, DC_LI, BASE_MVC, _l ){ +], function( LIST_PANEL, DC_MODEL, DC_LI, BASE_MVC, _l ){ /* ============================================================================= TODO: @@ -11,36 +12,29 @@ // ============================================================================= /** @class non-editable, read-only View/Controller for a dataset collection. */ -var CollectionPanel = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend( +var _super = LIST_PANEL.ModelListPanel; +var CollectionPanel = _super.extend( /** @lends CollectionPanel.prototype */{ //MODEL is either a DatasetCollection (or subclass) or a DatasetCollectionElement (list of pairs) /** logger used to record this.log messages, commonly set to console */ //logger : console, - tagName : 'div', - className : 'dataset-collection-panel', - - /** (in ms) that jquery effects will use */ - fxSpeed : 'fast', + className : _super.prototype.className + ' dataset-collection-panel', /** sub view class used for datasets */ DatasetDCEViewClass : DC_LI.DatasetDCEListItemView, /** sub view class used for nested collections */ NestedDCDCEViewClass : DC_LI.NestedDCDCEListItemView, + /** key of attribute in model to assign to this.collection */ + modelCollectionKey : 'elements', // ......................................................................... SET UP /** Set up the view, set up storage, bind listeners to HistoryContents events * @param {Object} attributes optional settings for the panel */ initialize : function( attributes ){ - attributes = attributes || {}; - // set the logger if requested - if( attributes.logger ){ - this.logger = attributes.logger; - } - this.log( this + '.initialize:', attributes ); - + _super.prototype.initialize.call( this, attributes ); this.linkTarget = attributes.linkTarget || '_blank'; this.hasUser = attributes.hasUser; @@ -50,185 +44,49 @@ //window.collectionPanel = this; }, - /** create any event listeners for the panel - * @fires: rendered:initial on the first render - * @fires: empty-history when switching to a history with no HDAs or creating a new history - */ - _setUpListeners : function(){ - // debugging - //if( this.logger ){ - this.on( 'all', function( event ){ - this.log( this + '', arguments ); - }, this ); - //} - return this; + // ------------------------------------------------------------------------ sub-views + /** In this override, use model.getVisibleContents */ + _filterCollection : function(){ +//TODO: should *not* be model.getVisibleContents + return this.model.getVisibleContents(); }, - // ------------------------------------------------------------------------ history/hda event listening - /** listening for history and HDA events */ - _setUpModelEventHandlers : function(){ - return this; - }, - - // ------------------------------------------------------------------------ panel rendering - /** Render panel - * @fires: rendered when the panel is attached and fully visible - * @see Backbone.View#render - */ - render : function( speed, callback ){ - this.log( 'render:', speed, callback ); - // send a speed of 0 to have no fade in/out performed - speed = ( speed === undefined )?( this.fxSpeed ):( speed ); - //this.debug( this + '.render, fxSpeed:', speed ); - var panel = this, - $newRender; - - // handle the possibility of no model (can occur if fetching the model returns an error) - if( !this.model ){ - return this; - } - $newRender = this.renderModel(); - - // fade out existing, swap with the new, fade in, set up behaviours - $( panel ).queue( 'fx', [ - function( next ){ - if( speed && panel.$el.is( ':visible' ) ){ - panel.$el.fadeOut( speed, next ); - } else { - next(); - } - }, - function( next ){ - // swap over from temp div newRender - panel.$el.empty(); - if( $newRender ){ - panel.$el.append( $newRender.children() ); - } - next(); - }, - function( next ){ - if( speed && !panel.$el.is( ':visible' ) ){ - panel.$el.fadeIn( speed, next ); - } else { - next(); - } - }, - function( next ){ - //TODO: ideally, these would be set up before the fade in (can't because of async save text) - if( callback ){ callback.call( this ); } - panel.trigger( 'rendered', this ); - next(); - } - ]); - return this; - }, - - /** render with collection data - * @returns {jQuery} dom fragment as temporary container to be swapped out later - */ - renderModel : function( ){ - // tmp div for final swap in render -//TODO: ugh - reuse issue - refactor out - var type = this.model.get( 'collection_type' ) || this.model.object.get( 'collection_type' ), - json = _.extend( this.model.toJSON(), { - parentName : this.parentName, - type : type - }), - $newRender = $( '<div/>' ).append( this.templates.panel( json ) ); - this._setUpBehaviours( $newRender ); - this.renderContents( $newRender ); - return $newRender; - }, - - /** Set up js/widget behaviours */ - _setUpBehaviours : function( $where ){ - //TODO: these should be either sub-MVs, or handled by events - $where = $where || this.$el; - $where.find( '[title]' ).tooltip({ placement: 'bottom' }); - return this; - }, - - // ------------------------------------------------------------------------ sub-$element shortcuts - /** the scroll container for this panel - can be $el, $el.parent(), or grandparent depending on context */ - $container : function(){ - return ( this.findContainerFn )?( this.findContainerFn.call( this ) ):( this.$el.parent() ); - }, - /** where list content views are attached */ - $datasetsList : function( $where ){ - return ( $where || this.$el ).find( '.datasets-list' ); - }, - - // ------------------------------------------------------------------------ sub-views - /** Set up/render a view for each DCE to be shown, init with model and listeners. - * DCE views are cached to the map this.contentViews (using the model.id as key). - * @param {jQuery} $whereTo what dom element to prepend the DCE views to - * @returns the number of visible DCE views - */ - renderContents : function( $whereTo ){ - //this.debug( 'renderContents, elements:', this.model.elements ); - $whereTo = $whereTo || this.$el; - - this.warn( this + '.renderContents:, model:', this.model ); - var panel = this, - contentViews = {}, - //NOTE: no filtering here - visibleContents = this.model.getVisibleContents(); - //this.debug( 'renderContents, visibleContents:', visibleContents, $whereTo ); - - this.$datasetsList( $whereTo ).empty(); - if( visibleContents && visibleContents.length ){ - visibleContents.each( function( content ){ - var contentId = content.id, - contentView = panel._createContentView( content ); - contentViews[ contentId ] = contentView; - panel._attachContentView( contentView.render(), $whereTo ); - }); - } - this.contentViews = contentViews; - return this.contentViews; - }, - - /** */ - _createContentView : function( content ){ - //this.debug( 'content json:', JSON.stringify( content, null, ' ' ) ); - var contentView = null, - ContentClass = this._getContentClass( content ); - //this.debug( 'content:', content ); - //this.debug( 'ContentClass:', ContentClass ); - contentView = new ContentClass({ - model : content, - linkTarget : this.linkTarget, - //draggable : true, - hasUser : this.hasUser, - logger : this.logger - }); - //this.debug( 'contentView:', contentView ); - this._setUpContentListeners( contentView ); - return contentView; - }, - - /** */ - _getContentClass : function( content ){ - //this.debug( this + '._getContentClass:', content ); + /** override to return proper view class based on element_type */ + _getItemViewClass : function( model ){ + //this.debug( this + '._getItemViewClass:', model ); //TODO: subclasses use DCEViewClass - but are currently unused - decide - switch( content.get( 'element_type' ) ){ + switch( model.get( 'element_type' ) ){ case 'hda': return this.DatasetDCEViewClass; case 'dataset_collection': return this.NestedDCDCEViewClass; } - throw new TypeError( 'Unknown element type:', content.get( 'element_type' ) ); + throw new TypeError( 'Unknown element type:', model.get( 'element_type' ) ); }, - /** Set up listeners for content view events. In this override, handle collection expansion. */ - _setUpContentListeners : function( contentView ){ + /** override to add link target and anon */ + _getItemViewOptions : function( model ){ + var options = _super.prototype._getItemViewOptions.call( this, model ); + return _.extend( options, { + linkTarget : this.linkTarget, + hasUser : this.hasUser + }); + }, + + /** when a sub-view is clicked in the collection panel that is itself a collection, + * hide this panel's elements and show the sub-collection in its own panel. + */ + _setUpItemViewListeners : function( view ){ var panel = this; - if( contentView.model.get( 'element_type' ) === 'dataset_collection' ){ - contentView.on( 'expanded', function( collectionView ){ + _super.prototype._setUpItemViewListeners.call( panel, view ); + //TODO:?? doesn't seem to belong here + if( view.model.get( 'element_type' ) === 'dataset_collection' ){ + view.on( 'expanded', function( collectionView ){ panel.info( 'expanded', collectionView ); panel._addCollectionPanel( collectionView ); }); } + return panel; }, /** When a sub-collection is clicked, hide the current panel and render the sub-collection in its own panel */ @@ -246,7 +104,7 @@ }); currPanel.panelStack.push( panel ); - currPanel.$( '.controls' ).add( '.datasets-list' ).hide(); + currPanel.$( '.controls' ).add( '.list-items' ).hide(); currPanel.$el.append( panel.$el ); panel.on( 'close', function(){ currPanel.render(); @@ -266,14 +124,6 @@ } }, - /** attach an contentView to the panel */ - _attachContentView : function( contentView, $whereTo ){ - $whereTo = $whereTo || this.$el; - var $datasetsList = this.$datasetsList( $whereTo ); - $datasetsList.append( contentView.$el ); - return this; - }, - // ------------------------------------------------------------------------ panel events /** event map */ events : { @@ -293,16 +143,16 @@ } }); -//----------------------------------------------------------------------------- TEMPLATES -/** underscore templates */ -CollectionPanel.templates = CollectionPanel.prototype.templates = (function(){ -// use closure to run underscore template fn only once at module load - var _panelTemplate = _.template([ + +//------------------------------------------------------------------------------ TEMPLATES +CollectionPanel.prototype.templates = (function(){ + + var controlsTemplate = BASE_MVC.wrapTemplate([ '<div class="controls">', '<div class="navigation">', '<a class="back" href="javascript:void(0)">', '<span class="fa fa-icon fa-angle-left"></span>', - _l( 'Back to ' ), '<%- collection.parentName %>', + _l( 'Back to ' ), '<%- view.parentName %>', '</a>', '</div>', @@ -310,29 +160,25 @@ '<div class="name"><%- collection.name || collection.element_identifier %></div>', '<div class="subtitle">', //TODO: remove logic from template - '<% if( collection.type === "list" ){ %>', + '<% if( collection.collection_type === "list" ){ %>', _l( 'a list of datasets' ), - '<% } else if( collection.type === "paired" ){ %>', + '<% } else if( collection.collection_type === "paired" ){ %>', _l( 'a pair of datasets' ), - '<% } else if( collection.type === "list:paired" ){ %>', + '<% } else if( collection.collection_type === "list:paired" ){ %>', _l( 'a list of paired datasets' ), '<% } %>', '</div>', '</div>', - '</div>', - // where the datasets/hdas are added - '<div class="datasets-list"></div>' - ].join( '' )); + '</div>' + ], 'collection' ); - // we override here in order to pass the localizer (_L) into the template scope - since we use it as a fn within - return { - panel : function( json ){ - return _panelTemplate({ _l: _l, collection: json }); - } - }; + return _.extend( _.clone( _super.prototype.templates ), { + controls : controlsTemplate + }); }()); + // ============================================================================= /** @class non-editable, read-only View/Controller for a dataset collection. */ var ListCollectionPanel = CollectionPanel.extend( diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/dataset/dataset-li.js --- a/static/scripts/mvc/dataset/dataset-li.js +++ b/static/scripts/mvc/dataset/dataset-li.js @@ -1,8 +1,9 @@ define([ + "mvc/list/list-item", "mvc/dataset/states", "mvc/base-mvc", "utils/localization" -], function( STATES, BASE_MVC, _l ){ +], function( LIST_ITEM, STATES, BASE_MVC, _l ){ /* global Backbone */ /*============================================================================== TODO: @@ -11,7 +12,7 @@ simplify button rendering ==============================================================================*/ -var _super = BASE_MVC.ListItemView; +var _super = LIST_ITEM.ListItemView; /** @class Read only list view for either LDDAs, HDAs, or HDADCEs. * Roughly, any DatasetInstance (and not a raw Dataset). */ diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/dataset/dataset-list.js --- a/static/scripts/mvc/dataset/dataset-list.js +++ b/static/scripts/mvc/dataset/dataset-list.js @@ -1,5 +1,5 @@ define([ - "mvc/dataset/list-panel", + "mvc/list/list-panel", "mvc/dataset/dataset-li", "mvc/base-mvc", "utils/localization" diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/dataset/dataset-model.js --- a/static/scripts/mvc/dataset/dataset-model.js +++ b/static/scripts/mvc/dataset/dataset-model.js @@ -94,6 +94,9 @@ this.trigger( 'state:ready', currModel, newState, this.previous( 'state' ) ); } }); + this.on( 'change:urls', function(){ + console.warn( 'change:urls', arguments ); + }); // the download url (currenlty) relies on having a correct file extension this.on( 'change:id change:file_ext', function( currModel ){ this._generateUrls(); @@ -104,6 +107,8 @@ /** override to add urls */ toJSON : function(){ var json = Backbone.Model.prototype.toJSON.call( this ); + //console.warn( 'returning json?' ); + //return json; return _.extend( json, { urls : this.urls }); diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/dataset/list-panel.js --- a/static/scripts/mvc/dataset/list-panel.js +++ /dev/null @@ -1,687 +0,0 @@ -define([ - "mvc/base-mvc", - "utils/localization" -], function( BASE_MVC, _l ){ -/* ============================================================================= -TODO: - -============================================================================= */ -/** @class List that contains ListItemViews. - */ -var ListPanel = Backbone.View.extend( BASE_MVC.LoggableMixin ).extend( -/** @lends ReadOnlyHistoryPanel.prototype */{ - - /** logger used to record this.log messages, commonly set to console */ - //logger : console, - - /** class to use for constructing the sub-views */ - viewClass : BASE_MVC.ListItemView, - - tagName : 'div', - className : 'list-panel', - - /** (in ms) that jquery effects will use */ - fxSpeed : 'fast', - - /** string to display when the model has no hdas */ - emptyMsg : _l( 'This list is empty' ), - /** string to no hdas match the search terms */ - noneFoundMsg : _l( 'No matching items found' ), - - // ......................................................................... SET UP - /** Set up the view, set up storage, bind listeners to HistoryContents events - * @param {Object} attributes optional settings for the list - */ - initialize : function( attributes, options ){ - attributes = attributes || {}; - // set the logger if requested - if( attributes.logger ){ - this.logger = attributes.logger; - } - this.log( this + '.initialize:', attributes ); - - // ---- instance vars - /** how quickly should jquery fx run? */ - this.fxSpeed = _.has( attributes, 'fxSpeed' )?( attributes.fxSpeed ):( this.fxSpeed ); - - /** filters for displaying subviews */ - this.filters = []; - /** current search terms */ - this.searchFor = attributes.searchFor || ''; - - /** loading indicator */ - this.indicator = new LoadingIndicator( this.$el ); - - /** currently showing selectors on items? */ - this.selecting = ( attributes.selecting !== undefined )? attributes.selecting : true; - //this.selecting = false; - - /** cached selected item.model.ids to persist btwn renders */ - this.selected = attributes.selected || []; - /** the last selected item.model.id */ - this.lastSelected = null; - - /** list item view class (when passed models) */ - this.viewClass = attributes.viewClass || this.viewClass; - - /** list item views */ - this.views = []; - /** list item models */ - this.collection = attributes.collection || ( new Backbone.Collection([]) ); - - /** filter fns run over collection items to see if they should show in the list */ - this.filters = attributes.filters || []; - -//TODO: remove - this.title = attributes.title || ''; - this.subtitle = attributes.subtitle || ''; - - this._setUpListeners(); - }, - - /** create any event listeners for the list - */ - _setUpListeners : function(){ - this.on( 'error', function( model, xhr, options, msg, details ){ - //this.errorHandler( model, xhr, options, msg, details ); - console.error( model, xhr, options, msg, details ); - }, this ); - - // show hide the loading indicator - this.on( 'loading', function(){ - this._showLoadingIndicator( 'loading...', 40 ); - }, this ); - this.on( 'loading-done', function(){ - this._hideLoadingIndicator( 40 ); - }, this ); - - // throw the first render up as a diff namespace using once (for outside consumption) - this.once( 'rendered', function(){ - this.trigger( 'rendered:initial', this ); - }, this ); - - // debugging - if( this.logger ){ - this.on( 'all', function( event ){ - this.log( this + '', arguments ); - }, this ); - } - - this._setUpCollectionListeners(); - this._setUpViewListeners(); - return this; - }, - - /** free any sub-views the list has */ - freeViews : function(){ -//TODO: stopListening? remove? - this.views = []; - return this; - }, - - // ------------------------------------------------------------------------ item listeners - /** listening for history and HDA events */ - _setUpCollectionListeners : function(){ - - this.collection.on( 'reset', function(){ - this.renderItems(); - }, this ); - - this.collection.on( 'add', this.addItemView, this ); - this.collection.on( 'remove', this.removeItemView, this ); - - // debugging - if( this.logger ){ - this.collection.on( 'all', function( event ){ - this.info( this + '(collection)', arguments ); - }, this ); - } - return this; - }, - - /** listening for history and HDA events */ - _setUpViewListeners : function(){ - - // shift to select a range - this.on( 'view:selected', function( view, ev ){ - if( ev && ev.shiftKey && this.lastSelected ){ - var lastSelectedView = _.find( this.views, function( view ){ - return view.model.id === this.lastSelected; - }); - if( lastSelectedView ){ - this.selectRange( view, lastSelectedView ); - } - } - this.selected.push( view.model.id ); - this.lastSelected = view.model.id; - }, this ); - }, - - // ------------------------------------------------------------------------ rendering - /** Render this content, set up ui. - * @param {Number or String} speed the speed of the render - */ - render : function( speed ){ - var $newRender = this._buildNewRender(); - this._setUpBehaviors( $newRender ); - this._queueNewRender( $newRender, speed ); - return this; - }, - - /** Build a temp div containing the new children for the view's $el. - */ - _buildNewRender : function(){ - // create a new render using a skeleton template, render title buttons, render body, and set up events, etc. - var json = this.model? this.model.toJSON() : {}, - $newRender = $( this.templates.el( json, this ) ); - this._renderTitle( $newRender ); - this._renderSubtitle( $newRender ); - this._renderSearch( $newRender ); - this.renderItems( $newRender ); - return $newRender; - }, - - /** - */ - _renderTitle : function( $where ){ - //$where = $where || this.$el; - //$where.find( '.title' ).replaceWith( ... ) - }, - - /** - */ - _renderSubtitle : function( $where ){ - //$where = $where || this.$el; - //$where.find( '.title' ).replaceWith( ... ) - }, - - /** Fade out the old el, swap in the new contents, then fade in. - * @param {Number or String} speed jq speed to use for rendering effects - * @fires rendered when rendered - */ - _queueNewRender : function( $newRender, speed ) { - speed = ( speed === undefined )?( this.fxSpeed ):( speed ); - var view = this; - - $( view ).queue( 'fx', [ - function( next ){ this.$el.fadeOut( speed, next ); }, - function( next ){ - view._swapNewRender( $newRender ); - next(); - }, - function( next ){ this.$el.fadeIn( speed, next ); }, - function( next ){ - view.trigger( 'rendered', view ); - next(); - } - ]); - }, - - /** empty out the current el, move the $newRender's children in */ - _swapNewRender : function( $newRender ){ - this.$el.empty().attr( 'class', this.className ).append( $newRender.children() ); - if( this.selecting ){ this.showSelectors( 0 ); } - return this; - }, - - /** */ - _setUpBehaviors : function( $where ){ - $where = $where || this.$el; - $where.find( '.controls [title]' ).tooltip({ placement: 'bottom' }); - return this; - }, - - // ------------------------------------------------------------------------ sub-$element shortcuts - /** the scroll container for this panel - can be $el, $el.parent(), or grandparent depending on context */ - $scrollContainer : function(){ - // override - return this.$el.parent().parent(); - }, - /** */ - $list : function( $where ){ - return ( $where || this.$el ).find( '.list-items' ); - }, - /** container where list messages are attached */ - $messages : function( $where ){ - return ( $where || this.$el ).find( '.message-container' ); - }, - /** the message displayed when no views can be shown (no views, none matching search) */ - $emptyMessage : function( $where ){ - return ( $where || this.$el ).find( '.empty-message' ); - }, - - // ------------------------------------------------------------------------ hda sub-views - /** - * @param {jQuery} $whereTo what dom element to prepend the HDA views to - * @returns the visible item views - */ - renderItems : function( $whereTo ){ - $whereTo = $whereTo || this.$el; - var list = this, - newViews = []; - - var $list = this.$list( $whereTo ), - item$els = this._filterCollection().map( function( itemModel ){ -//TODO: creates views each time - not neccessarily good - var view = list._createItemView( itemModel ); - newViews.push( view ); - return view.render( 0 ).$el; - }); - this.debug( item$els ); - this.debug( newViews ); - - $list.empty(); - if( item$els.length ){ - $list.append( item$els ); - this.$emptyMessage( $whereTo ).hide(); - - } else { - this._renderEmptyMessage( $whereTo ).show(); - } - - this.views = newViews; - return newViews; - }, - - /** - */ - _filterCollection : function(){ - // override this - var list = this; - return list.collection.filter( _.bind( list._filterItem, list ) ); - }, - - /** - */ - _filterItem : function( model ){ - // override this - var list = this; - return ( _.every( list.filters.map( function( fn ){ return fn.call( model ); }) ) ) - && ( !list.searchFor || model.matchesAll( list.searchFor ) ); - }, - - /** - */ - _createItemView : function( model ){ - var ViewClass = this._getItemViewClass( model ), - options = _.extend( this._getItemViewOptions( model ), { - model : model - }), - view = new ViewClass( options ); - this._setUpItemViewListeners( view ); - return view; - }, - - _getItemViewClass : function( model ){ - // override this - return this.viewClass; - }, - - _getItemViewOptions : function( model ){ - // override this - return { - //logger : this.logger, - fxSpeed : this.fxSpeed, - expanded : false, - selectable : this.selecting, - selected : _.contains( this.selected, model.id ), - draggable : this.dragging - }; - }, - - /** - */ - _setUpItemViewListeners : function( view ){ - var list = this; - view.on( 'all', function(){ - var args = Array.prototype.slice.call( arguments, 0 ); - args[0] = 'view:' + args[0]; - list.trigger.apply( list, args ); - }); - - // debugging - //if( this.logger ){ - // view.on( 'all', function( event ){ - // this.log( this + '(view)', arguments ); - // }, this ); - //} - return this; - }, - - /** render the empty/none-found message */ - _renderEmptyMessage : function( $whereTo ){ - //this.debug( '_renderEmptyMessage', $whereTo, this.searchFor ); - var text = this.searchFor? this.noneFoundMsg : this.emptyMsg; - return this.$emptyMessage( $whereTo ).text( text ); - }, - - /** collapse all item views */ - expandAll : function(){ - _.each( this.views, function( view ){ - view.expand(); - }); - }, - - /** collapse all item views */ - collapseAll : function(){ - _.each( this.views, function( view ){ - view.collapse(); - }); - }, - - // ------------------------------------------------------------------------ collection/views syncing - /** - */ - addItemView : function( model, collection, options ){ - this.log( this + '.addItemView:', model ); - var list = this; - if( !this._filterItem( model ) ){ return undefined; } - -//TODO: sorted? position? - var view = list._createItemView( model ); - this.views.push( view ); - - $( view ).queue( 'fx', [ - function( next ){ list.$emptyMessage().fadeOut( list.fxSpeed, next ); }, - function( next ){ -//TODO: auto render? - list.$list().append( view.render().$el ); - next(); - } - ]); - return view; - }, - - /** - */ - removeItemView : function( model, collection, options ){ - this.log( this + '.removeItemView:', model ); - var list = this, - view = list.viewFromModel( model ); - if( !view ){ return undefined; } - - this.views = _.without( this.views, view ); - view.remove(); - if( !this.views.length ){ - list._renderEmptyMessage().fadeIn( list.fxSpeed ); - } - return view; - }, - - /** get views based on model - */ - viewFromModel : function( model ){ - for( var i=0; i<this.views.length; i++ ){ - var view = this.views[i]; - if( view.model === model ){ - return view; - } - } - return undefined; - }, - - /** get views based on model properties - */ - viewsWhereModel : function( properties ){ - return this.views.filter( function( view ){ - //return view.model.matches( properties ); -//TODO: replace with _.matches (underscore 1.6.0) - var json = view.model.toJSON(); - //console.debug( '\t', json, properties ); - for( var key in properties ){ - if( properties.hasOwnPropery( key ) ){ - //console.debug( '\t\t', json[ key ], view.model.properties[ key ] ); - if( json[ key ] !== view.model.properties[ key ] ){ - return false; - } - } - } - return true; - }); - }, - - /** - */ - viewRange : function( viewA, viewB ){ - if( viewA === viewB ){ return ( viewA )?( [ viewA ] ):( [] ); } - - var indexA = this.views.indexOf( viewA ), - indexB = this.views.indexOf( viewB ); - - // handle not found - if( indexA === -1 || indexB === -1 ){ - if( indexA === indexB ){ return []; } - return ( indexA === -1 )?( [ viewB ] ):( [ viewA ] ); - } - // reverse if indeces are - //note: end inclusive - return ( indexA < indexB )? - this.views.slice( indexA, indexB + 1 ) : - this.views.slice( indexB, indexA + 1 ); - }, - - // ------------------------------------------------------------------------ searching - /** render a search input for filtering datasets shown - * (see the search section in the HDA model for implementation of the actual searching) - * return will start the search - * esc will clear the search - * clicking the clear button will clear the search - * uses searchInput in ui.js - */ - _renderSearch : function( $where ){ - $where.find( '.controls .search-input' ).searchInput({ - placeholder : 'search', - initialVal : this.searchFor, - onfirstsearch : _.bind( this._firstSearch, this ), - onsearch : _.bind( this.searchItems, this ), - onclear : _.bind( this.clearSearch, this ) - }); - return $where; - }, - - _firstSearch : function( searchFor ){ - this.log( 'onFirstSearch', searchFor ); - return this.searchItems( searchFor ); - }, - - /** filter view list to those that contain the searchFor terms */ - searchItems : function( searchFor ){ - this.searchFor = searchFor; - this.trigger( 'search:searching', searchFor, this ); - this.renderItems(); - return this; - }, - - /** clear the search filters and show all views that are normally shown */ - clearSearch : function( searchFor ){ - //this.log( 'onSearchClear', this ); - this.searchFor = ''; - this.trigger( 'search:clear', this ); - this.renderItems(); - return this; - }, - - // ------------------------------------------------------------------------ selection - /** show selectors on all visible hdas and associated controls */ - showSelectors : function( speed ){ - speed = ( speed !== undefined )?( speed ):( this.fxSpeed ); - this.selecting = true; - this.$( '.list-actions' ).slideDown( speed ); - _.each( this.views, function( view ){ - view.showSelector( speed ); - }); - this.selected = []; - this.lastSelected = null; - }, - - /** hide selectors on all visible hdas and associated controls */ - hideSelectors : function( speed ){ - speed = ( speed !== undefined )?( speed ):( this.fxSpeed ); - this.selecting = false; - this.$( '.list-actions' ).slideUp( speed ); - _.each( this.views, function( view ){ - view.hideSelector( speed ); - }); - this.selected = []; - this.lastSelected = null; - }, - - /** show or hide selectors on all visible hdas and associated controls */ - toggleSelectors : function(){ - if( !this.selecting ){ - this.showSelectors(); - } else { - this.hideSelectors(); - } - }, - - /** select all visible hdas */ - selectAll : function( event ){ - _.each( this.views, function( view ){ - view.select( event ); - }); - }, - - /** deselect all visible hdas */ - deselectAll : function( event ){ - this.lastSelected = null; - _.each( this.views, function( view ){ - view.deselect( event ); - }); - }, - - /** select a range of datasets between A and B */ - selectRange : function( viewA, viewB ){ - var range = this.viewRange( viewA, viewB ); - _.each( range, function( view ){ - view.select(); - }); - return range; - }, - - /** return an array of all currently selected hdas */ - getSelectedViews : function(){ - return _.filter( this.views, function( v ){ - return v.selected; - }); - }, - - /** return an collection of the models of all currenly selected hdas */ - getSelectedModels : function(){ - return new this.collection.constructor( _.map( this.getSelectedViews(), function( view ){ - return view.model; - })); - }, - - // ------------------------------------------------------------------------ loading indicator -//TODO: questionable - /** hide the $el and display a loading indicator (in the $el's parent) when loading new data */ - _showLoadingIndicator : function( msg, speed, callback ){ - speed = ( speed !== undefined )?( speed ):( this.fxSpeed ); - if( !this.indicator ){ - this.indicator = new LoadingIndicator( this.$el, this.$el.parent() ); - } - if( !this.$el.is( ':visible' ) ){ - this.indicator.show( 0, callback ); - } else { - this.$el.fadeOut( speed ); - this.indicator.show( msg, speed, callback ); - } - }, - - /** hide the loading indicator */ - _hideLoadingIndicator : function( speed, callback ){ - speed = ( speed !== undefined )?( speed ):( this.fxSpeed ); - if( this.indicator ){ - this.indicator.hide( speed, callback ); - } - }, - - // ------------------------------------------------------------------------ scrolling - /** get the current scroll position of the panel in its parent */ - scrollPosition : function(){ - return this.$scrollContainer().scrollTop(); - }, - - /** set the current scroll position of the panel in its parent */ - scrollTo : function( pos ){ - this.$scrollContainer().scrollTop( pos ); - return this; - }, - - /** Scrolls the panel to the top. */ - scrollToTop : function(){ - this.$scrollContainer().scrollTop( 0 ); - return this; - }, - - /** */ - scrollToItem : function( view ){ - if( !view ){ return; } - var itemTop = view.$el.offset().top; - this.$scrollContainer().scrollTop( itemTop ); - }, - - // ------------------------------------------------------------------------ panel events - /** event map */ - events : { - 'click .select-all' : 'selectAll', - 'click .deselect-all' : 'deselectAll' - }, - - // ------------------------------------------------------------------------ misc - /** Return a string rep of the history */ - toString : function(){ - return 'ListPanel(' + this.collection + ')'; - } -}); - -// ............................................................................ TEMPLATES -/** underscore templates */ -ListPanel.prototype.templates = (function(){ -//TODO: move to require text! plugin - - var elTemplate = BASE_MVC.wrapTemplate([ - // temp container - '<div>', - '<div class="controls">', - '<div class="title">', - '<div class="name"><%= model.name || view.title %></div>', - '</div>', - '<div class="subtitle"><%= view.subtitle %></div>', - '<div class="actions"></div>', - '<div class="messages"></div>', - - '<div class="search">', - '<div class="search-input"></div>', - '</div>', - - '<div class="list-actions">', - '<div class="btn-group">', - '<button class="select-all btn btn-default"', - 'data-mode="select">', _l( 'All' ), '</button>', - '<button class="deselect-all btn btn-default"', - 'data-mode="select">', _l( 'None' ), '</button>', - '</div>', - //'<button class="action-popup-btn btn btn-default">', - // _l( 'For all selected' ), '...', - //'</button>', - '</div>', - '</div>', - '<div class="list-items"></div>', - '<div class="empty-message infomessagesmall"></div>', - '</div>' - ]); - - return { - el : elTemplate - }; -}()); - - - -//============================================================================== - return { - ListPanel: ListPanel - }; -}); diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/history/hda-li.js --- a/static/scripts/mvc/history/hda-li.js +++ b/static/scripts/mvc/history/hda-li.js @@ -17,12 +17,12 @@ /** logger used to record this.log messages, commonly set to console */ //logger : console, + className : _super.prototype.className + " history-content", + initialize : function( attributes, options ){ _super.prototype.initialize.call( this, attributes, options ); }, - className : _super.prototype.className + " history-content", - // ......................................................................... misc /** String representation */ toString : function(){ diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/history/history-contents.js --- a/static/scripts/mvc/history/history-contents.js +++ b/static/scripts/mvc/history/history-contents.js @@ -53,6 +53,7 @@ */ initialize : function( models, options ){ options = options || {}; +//TODO: could probably use the contents.history_id instead this.historyId = options.historyId; //this._setUpListeners(); @@ -226,7 +227,7 @@ if( !existing ){ return model; } // merge the models _BEFORE_ calling the superclass version - var merged = existing.toJSON(); + var merged = _.clone( existing.attributes ); _.extend( merged, model ); return merged; }); @@ -303,6 +304,13 @@ return xhr; }, + /** In this override, copy the historyId to the clone */ + clone : function(){ + var clone = Backbone.Collection.prototype.clone.call( this ); + clone.historyId = this.historyId; + return clone; + }, + /** debugging */ print : function(){ var contents = this; diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/history/history-model.js --- a/static/scripts/mvc/history/history-model.js +++ b/static/scripts/mvc/history/history-model.js @@ -232,7 +232,7 @@ /** Get data for a history then its hdas using a sequential ajax call, return a deferred to receive both */ History.getHistoryData = function getHistoryData( historyId, options ){ options = options || {}; - var hdaDetailIds = options.hdaDetailIds || []; + var detailIdsFn = options.detailIdsFn || []; var hdcaDetailIds = options.hdcaDetailIds || []; //console.debug( 'getHistoryData:', historyId, options ); @@ -250,20 +250,20 @@ // get the number of hdas accrd. to the history return historyData && historyData.empty; } - function getHdas( historyData ){ + function getContents( historyData ){ // get the hda data // if no hdas accrd. to history: return empty immed. if( isEmpty( historyData ) ){ return []; } // if there are hdas accrd. to history: get those as well - if( _.isFunction( hdaDetailIds ) ){ - hdaDetailIds = hdaDetailIds( historyData ); + if( _.isFunction( detailIdsFn ) ){ + detailIdsFn = detailIdsFn( historyData ); } if( _.isFunction( hdcaDetailIds ) ){ hdcaDetailIds = hdcaDetailIds( historyData ); } var data = {}; - if( hdaDetailIds.length ) { - data.dataset_details = hdaDetailIds.join( ',' ); + if( detailIdsFn.length ) { + data.dataset_details = detailIdsFn.join( ',' ); } if( hdcaDetailIds.length ) { // for symmetry, not actually used by backend of consumed @@ -274,10 +274,10 @@ } // getting these concurrently is 400% slower (sqlite, local, vanilla) - so: - // chain the api calls - getting history first then hdas + // chain the api calls - getting history first then contents var historyFn = options.historyFn || getHistory, - hdaFn = options.hdaFn || getHdas; + contentsFn = options.contentsFn || getContents; // chain ajax calls: get history first, then hdas var historyXHR = historyFn( historyId ); @@ -291,15 +291,15 @@ df.reject( xhr, 'loading the history' ); }); - var hdaXHR = historyXHR.then( hdaFn ); - hdaXHR.then( function( hdaJSON ){ - df.notify({ status: 'dataset data retrieved', historyJSON: historyJSON, hdaJSON: hdaJSON }); + var contentsXHR = historyXHR.then( contentsFn ); + contentsXHR.then( function( contentsJSON ){ + df.notify({ status: 'contents data retrieved', historyJSON: historyJSON, contentsJSON: contentsJSON }); // we've got both: resolve the outer scope deferred - df.resolve( historyJSON, hdaJSON ); + df.resolve( historyJSON, contentsJSON ); }); - hdaXHR.fail( function( xhr, status, message ){ + contentsXHR.fail( function( xhr, status, message ){ // call reject on the outer deferred to allow its fail callback to run - df.reject( xhr, 'loading the datasets', { history: historyJSON } ); + df.reject( xhr, 'loading the contents', { history: historyJSON } ); }); return df; diff -r 6f06ff96d5cacd8cd73b9a7e50d43aff5322dc17 -r 74b2495de918c95730f1f2fd2ac1bf502018a99f static/scripts/mvc/history/history-panel-annotated.js --- a/static/scripts/mvc/history/history-panel-annotated.js +++ b/static/scripts/mvc/history/history-panel-annotated.js @@ -1,118 +1,97 @@ define([ "mvc/history/history-panel", "mvc/history/hda-li", + "mvc/history/hdca-li", + "mvc/base-mvc", "utils/localization" -], function( HPANEL, HDA_LI, _l ){ +], function( HPANEL, HDA_LI, HDCA_LI, BASE_MVC, _l ){ /* ============================================================================= TODO: ============================================================================= */ -var _super = HPANEL.ReadOnlyHistoryPanel; +var _super = HPANEL.HistoryPanel; // used in history/display.mako and history/embed.mako /** @class View/Controller for a tabular view of the history model. - * @name AnnotatedHistoryPanel * * As ReadOnlyHistoryPanel, but with: * history annotation always shown * datasets displayed in a table: * datasets in left cells, dataset annotations in the right - * - * @augments Backbone.View - * @borrows LoggableMixin#logger as #logger - * @borrows LoggableMixin#log as #log - * @constructs */ var AnnotatedHistoryPanel = _super.extend( /** @lends AnnotatedHistoryPanel.prototype */{ /** logger used to record this.log messages, commonly set to console */ - // comment this out to suppress log output //logger : console, - className : 'annotated-history-panel', - - //TODO:?? possibly into own annotated class - /** class to use for constructing the HDA views */ - //HDAViewClass : HDA_LI.HDAListItemView, + className : _super.prototype.className + ' annotated-history-panel', // ------------------------------------------------------------------------ panel rendering - /** render with history data - * In this override: - * replace the datasets list with a table, - * add the history annotation, - * and move the search controls - * @returns {jQuery} dom fragment as temporary container to be swapped out later + /** In this override, add the history annotation */ - renderModel : function( ){ - // why do we need this here? why isn't className being applied? - this.$el.addClass( this.className ); - var $newRender = _super.prototype.renderModel.call( this ), - // move datasets from div to table - $datasetsList = this.$datasetsList( $newRender ), - $datasetsTable = $( '<table/>' ).addClass( 'datasets-list datasets-table' ); - $datasetsTable.append( $datasetsList.children() ); - $datasetsList.replaceWith( $datasetsTable ); - //TODO: it's possible to do this with css only, right? display: table-cell, etc.? - - // add history annotation under subtitle - $newRender.find( '.history-subtitle' ).after( this.renderHistoryAnnotation() ); - - // hide search button, move search bar beneath controls (instead of above title), show, and set up - $newRender.find( '.history-search-btn' ).hide(); - $newRender.find( '.history-controls' ).after( $newRender.find( '.history-search-controls' ).show() ); - + _buildNewRender : function(){ + //TODO: shouldn't this display regardless (on all non-current panels)? + var $newRender = _super.prototype._buildNewRender.call( this ); + this.renderHistoryAnnotation( $newRender ); return $newRender; }, /** render the history's annotation as its own field */ - renderHistoryAnnotation : function(){ + renderHistoryAnnotation : function( $newRender ){ var annotation = this.model.get( 'annotation' ); - if( !annotation ){ return null; } - return $([ - '<div class="history-annotation">', annotation, '</div>' - ].join( '' )); + if( !annotation ){ return; } + $newRender.find( '.controls .annotation-display' ).text( annotation ); }, - /** Set up/render a view for each HDA to be shown, init with model and listeners. - * In this override, add table header cells to indicate the dataset, annotation columns + /** In this override, convert the list-items tag to a table + * and add table header cells to indicate the dataset, annotation columns */ - renderHdas : function( $whereTo ){ + renderItems : function( $whereTo ){ $whereTo = $whereTo || this.$el; - var hdaViews = _super.prototype.renderHdas.call( this, $whereTo ); - this.$datasetsList( $whereTo ).prepend( $( '<tr/>' ).addClass( 'headers' ).append([ - $( '<th/>' ).text( _l( 'Dataset' ) ), - $( '<th/>' ).text( _l( 'Annotation' ) ) - ])); - return hdaViews; + + // convert to table + $whereTo.find( '.list-items' ) + .replaceWith( $( '<table/>' ).addClass( 'list-items' ) ); + + // render rows/contents and prepend headers + var views = _super.prototype.renderItems.call( this, $whereTo ); + this.$list( $whereTo ) + .prepend( $( '<tr/>' ).addClass( 'headers' ).append([ + $( '<th/>' ).text( _l( 'Dataset' ) ), + $( '<th/>' ).text( _l( 'Annotation' ) ) + ])); + return views; }, - // ------------------------------------------------------------------------ hda sub-views - /** attach an hdaView to the panel - * In this override, wrap the hdaView in a table row and cell, adding a 2nd cell for the hda annotation + // ------------------------------------------------------------------------ sub-views + /** In this override, wrap the content view in a table row + * with the content in the left td and annotation/extra-info in the right */ - attachContentView : function( hdaView, $whereTo ){ - $whereTo = $whereTo || this.$el; - // build a row around the dataset with the std hdaView in the first cell and the annotation in the next - var stateClass = _.find( hdaView.el.classList, function( c ){ return ( /^state\-/ ).test( c ); }), - annotation = hdaView.model.get( 'annotation' ) || '', - $tr = $( '<tr/>' ).addClass( 'dataset-row' ).append([ - $( '<td/>' ).addClass( 'dataset-container' ).append( hdaView.$el ) - // visually match the cell bg to the dataset at runtime (prevents the empty space) - // (getting bg via jq on hidden elem doesn't work on chrome/webkit - so use states) - //.css( 'background-color', hdaView.$el.css( 'background-color' ) ), - .addClass( stateClass? stateClass.replace( '-', '-color-' ): '' ), - $( '<td/>' ).addClass( 'additional-info' ).text( annotation ) - ]); - this.$datasetsList( $whereTo ).append( $tr ); + _attachItems : function( $whereTo ){ + this.$list( $whereTo ).append( this.views.map( function( view ){ + //TODO:?? possibly make this more flexible: instead of annotation use this._additionalInfo() + // build a row around the dataset with the std itemView in the first cell and the annotation in the next + var stateClass = _.find( view.el.classList, function( c ){ return ( /^state\-/ ).test( c ); }), + annotation = view.model.get( 'annotation' ) || '', + $tr = $( '<tr/>' ).append([ + $( '<td/>' ).addClass( 'contents-container' ).append( view.$el ) + // visually match the cell bg to the dataset at runtime (prevents the empty space) + // (getting bg via jq on hidden elem doesn't work on chrome/webkit - so use states) + //.css( 'background-color', view.$el.css( 'background-color' ) ), + .addClass( stateClass? stateClass.replace( '-', '-color-' ): '' ), + $( '<td/>' ).addClass( 'additional-info' ).text( annotation ) + ]); + return $tr; + })); + return this; }, // ------------------------------------------------------------------------ panel events /** event map */ events : _.extend( _.clone( _super.prototype.events ), { + // clicking on any part of the row will expand the items 'click tr' : function( ev ){ - //if( !ev.target.hasAttribute( 'href' ) ){ - $( ev.currentTarget ).find( '.dataset-title-bar' ).click(); - //} + $( ev.currentTarget ).find( '.title-bar' ).click(); }, // prevent propagation on icon btns so they won't bubble up to tr and toggleBodyVisibility 'click .icon-btn' : function( ev ){ @@ -127,6 +106,7 @@ } }); + //============================================================================== return { AnnotatedHistoryPanel : AnnotatedHistoryPanel This diff is so big that we needed to truncate the remainder. Repository URL: https://bitbucket.org/galaxy/galaxy-central/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email.