galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
August 2009
- 12 participants
- 156 discussions
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/a7f9325bb319
changeset: 2589:a7f9325bb319
user: rc
date: Thu Aug 20 11:43:28 2009 -0400
description:
Merge with f6e0863862efe02bb416596b08010b513f9ffdf7
2 file(s) affected in this change:
templates/base_panels.mako
test/base/twilltestcase.py
diffs (1505 lines):
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 20 11:43:28 2009 -0400
@@ -6,6 +6,8 @@
from galaxy.datatypes.tabular import *
from galaxy.datatypes.interval import *
from galaxy.datatypes import metadata
+from galaxy.util.json import from_json_string
+from galaxy.util.expressions import ExpressionContext
import pkg_resources
pkg_resources.require( "PasteDeploy" )
@@ -18,6 +20,12 @@
# States for running a job. These are NOT the same as data states
JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_OK, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ok', 'ready', 'deleted', 'admin_deleted'
+
+# This file, if created in the job's working directory, will be used for
+# setting advanced metadata properties on the job and its associated outputs.
+# This interface is currently experimental, is only used by the upload tool,
+# and should eventually become API'd
+TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
class JobManager( object ):
"""
@@ -320,6 +328,7 @@
self.working_directory = \
os.path.join( self.app.config.job_working_directory, str( self.job_id ) )
self.output_paths = None
+ self.tool_provided_job_metadata = None
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job ) #wrapper holding the info required to restore and clean up from files used for setting metadata externally
def get_param_dict( self ):
@@ -422,6 +431,8 @@
dataset.blurb = 'tool error'
dataset.info = message
dataset.set_size()
+ if dataset.ext == 'auto':
+ dataset.extension = 'data'
dataset.flush()
job.state = model.Job.states.ERROR
job.command_line = self.command_line
@@ -486,16 +497,28 @@
except ( IOError, OSError ):
self.fail( "Job %s's output dataset(s) could not be read" % job.id )
return
+ job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
for dataset_assoc in job.output_datasets:
+ context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
+ if context.get( 'path', None ):
+ # The tool can set an alternate output path for the dataset.
+ try:
+ shutil.move( context['path'], dataset.file_name )
+ except ( IOError, OSError ):
+ if not context['stderr']:
+ context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = stdout + stderr
+ dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
- if stderr:
+ if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
+ # If the tool was expected to set the extension, attempt to retrieve it
+ if dataset.ext == 'auto':
+ dataset.extension = context.get( 'ext', 'data' )
#if a dataset was copied, it won't appear in our dictionary:
#either use the metadata from originating output dataset, or call set_meta on the copies
#it would be quicker to just copy the metadata from the originating output dataset,
@@ -510,18 +533,39 @@
#the metadata that was stored to disk for use via the external process,
#and the changes made by the user will be lost, without warning or notice
dataset.metadata.from_JSON_dict( self.external_output_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
- if self.tool.is_multi_byte:
- dataset.set_multi_byte_peek()
- else:
- dataset.set_peek()
+ try:
+ assert context.get( 'line_count', None ) is not None
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek( line_count=context['line_count'] )
+ else:
+ dataset.set_peek( line_count=context['line_count'] )
+ except:
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek()
+ else:
+ dataset.set_peek()
+ try:
+ # set the name if provided by the tool
+ dataset.name = context['name']
+ except:
+ pass
else:
dataset.blurb = "empty"
+ if dataset.ext == 'auto':
+ dataset.extension = 'txt'
dataset.flush()
- if stderr:
+ if context['stderr']:
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
- dataset_assoc.dataset.dataset.flush()
+ # If any of the rest of the finish method below raises an
+ # exception, the fail method will run and set the datasets to
+ # ERROR. The user will never see that the datasets are in error if
+ # they were flushed as OK here, since upon doing so, the history
+ # panel stops checking for updates. So allow the
+ # mapping.context.current.flush() at the bottom of this method set
+ # the state instead.
+ #dataset_assoc.dataset.dataset.flush()
# Save stdout and stderr
if len( stdout ) > 32768:
@@ -591,7 +635,8 @@
return self.output_paths
class DatasetPath( object ):
- def __init__( self, real_path, false_path = None ):
+ def __init__( self, dataset_id, real_path, false_path = None ):
+ self.dataset_id = dataset_id
self.real_path = real_path
self.false_path = false_path
def __str__( self ):
@@ -605,10 +650,55 @@
self.output_paths = []
for name, data in [ ( da.name, da.dataset.dataset ) for da in job.output_datasets ]:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % data.id ) )
- self.output_paths.append( DatasetPath( data.file_name, false_path ) )
+ self.output_paths.append( DatasetPath( data.id, data.file_name, false_path ) )
else:
- self.output_paths = [ DatasetPath( da.dataset.file_name ) for da in job.output_datasets ]
+ self.output_paths = [ DatasetPath( da.dataset.dataset.id, da.dataset.file_name ) for da in job.output_datasets ]
return self.output_paths
+
+ def get_output_file_id( self, file ):
+ if self.output_paths is None:
+ self.get_output_fnames()
+ for dp in self.output_paths:
+ if self.app.config.outputs_to_working_directory and os.path.basename( dp.false_path ) == file:
+ return dp.dataset_id
+ elif os.path.basename( dp.real_path ) == file:
+ return dp.dataset_id
+ return None
+
+ def get_tool_provided_job_metadata( self ):
+ if self.tool_provided_job_metadata is not None:
+ return self.tool_provided_job_metadata
+
+ # Look for JSONified job metadata
+ self.tool_provided_job_metadata = []
+ meta_file = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+ if os.path.exists( meta_file ):
+ for line in open( meta_file, 'r' ):
+ try:
+ line = from_json_string( line )
+ assert 'type' in line
+ except:
+ log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id )
+ log.debug( 'Offending data was: %s' % line )
+ continue
+ # Set the dataset id if it's a dataset entry and isn't set.
+ # This isn't insecure. We loop the job's output datasets in
+ # the finish method, so if a tool writes out metadata for a
+ # dataset id that it doesn't own, it'll just be ignored.
+ if line['type'] == 'dataset' and 'dataset_id' not in line:
+ try:
+ line['dataset_id'] = self.get_output_file_id( line['dataset'] )
+ except KeyError:
+ log.warning( '(%s) Tool provided job dataset-specific metadata without specifying a dataset' % self.job_id )
+ continue
+ self.tool_provided_job_metadata.append( line )
+ return self.tool_provided_job_metadata
+
+ def get_dataset_finish_context( self, job_context, dataset ):
+ for meta in self.get_tool_provided_job_metadata():
+ if meta['type'] == 'dataset' and meta['dataset_id'] == dataset.id:
+ return ExpressionContext( meta, job_context )
+ return job_context
def check_output_sizes( self ):
sizes = []
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 20 11:43:28 2009 -0400
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil
+import logging, os, string, sys, tempfile, glob, shutil, types
import simplejson
import binascii
from UserDict import DictMixin
@@ -415,6 +415,7 @@
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = util.xml_text( data_elem, "label" )
+ output.count = int( data_elem.get("count", 1) )
output.filters = data_elem.findall( 'filter' )
self.outputs[ output.name ] = output
# Any extra generated config files for the tool
@@ -816,7 +817,11 @@
# If we've completed the last page we can execute the tool
elif state.page == self.last_page:
out_data = self.execute( trans, incoming=params )
- return 'tool_executed.mako', dict( out_data=out_data )
+ try:
+ assert type( out_data ) is types.DictType
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ return 'message.mako', dict( message_type='error', message=out_data, refresh_frames=[] )
# Otherwise move on to the next page
else:
state.page += 1
@@ -824,15 +829,26 @@
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
else:
- if filter( lambda x: isinstance( x, FieldStorage ) and x.file, state.inputs.values() ):
+ try:
+ self.find_fieldstorage( state.inputs )
+ except InterruptedUpload:
# If inputs contain a file it won't persist. Most likely this
# is an interrupted upload. We should probably find a more
# standard method of determining an incomplete POST.
return self.handle_interrupted( trans, state.inputs )
- else:
- # Just a refresh, render the form with updated state and errors.
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ except:
+ pass
+ # Just a refresh, render the form with updated state and errors.
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def find_fieldstorage( self, x ):
+ if isinstance( x, FieldStorage ):
+ raise InterruptedUpload( None )
+ elif type( x ) is types.DictType:
+ [ self.find_fieldstorage( y ) for y in x.values() ]
+ elif type( x ) is types.ListType:
+ [ self.find_fieldstorage( y ) for y in x ]
+
def handle_interrupted( self, trans, inputs ):
"""
Upon handling inputs, if it appears that we have received an incomplete
@@ -1704,3 +1720,6 @@
return value
else:
return incoming.get( key, default )
+
+class InterruptedUpload( Exception ):
+ pass
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 11:43:28 2009 -0400
@@ -1,8 +1,10 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from cgi import FieldStorage
from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
+from galaxy.util.json import to_json_string
import sys, traceback
@@ -11,14 +13,28 @@
class UploadToolAction( ToolAction ):
# Action for uploading files
- def __init__( self ):
- self.empty = False
- self.line_count = None
- def remove_tempfile( self, filename ):
- try:
- os.unlink( filename )
- except:
- log.exception( 'failure removing temporary file: %s' % filename )
+ def persist_uploads( self, incoming ):
+ if 'files' in incoming:
+ new_files = []
+ temp_files = []
+ for upload_dataset in incoming['files']:
+ f = upload_dataset['file_data']
+ if isinstance( f, FieldStorage ):
+ # very small files can be StringIOs
+ if 'name' in dir( f.file ) and f.file.name != '<fdopen>':
+ local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+ f.file.close()
+ else:
+ local_filename = datatypes.sniff.stream_to_file( f.file, prefix="strio_upload_file_" )[0]
+ upload_dataset['file_data'] = dict( filename = f.filename,
+ local_filename = local_filename )
+ if upload_dataset['url_paste'].strip() != '':
+ upload_dataset['url_paste'] = datatypes.sniff.stream_to_file( StringIO.StringIO( upload_dataset['url_paste'] ), prefix="strio_url_paste_" )[0]
+ else:
+ upload_dataset['url_paste'] = None
+ new_files.append( upload_dataset )
+ incoming['files'] = new_files
+ return incoming
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
@@ -42,330 +58,100 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
+
data_list = []
+
+ incoming = self.persist_uploads( incoming )
+
+ json_file = tempfile.mkstemp()
+ json_file_path = json_file[1]
+ json_file = os.fdopen( json_file[0], 'w' )
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
for uploaded_dataset in uploaded_datasets:
- precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
- dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset, metadata = uploaded_dataset.metadata, uploaded_dataset = uploaded_dataset )
- #dataset state is now set, we should not do anything else to this dataset
- data_list.append( dataset )
- #clean up extra temp names
- uploaded_dataset.clean_up_temp_files()
-
+ data = self.get_precreated_dataset( uploaded_dataset.name )
+ if not data:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
+ data.name = uploaded_dataset.name
+ data.state = data.states.QUEUED
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.add_dataset( data, genome_build = uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ else:
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.genome_build = uploaded_dataset.dbkey
+ if uploaded_dataset.type == 'composite':
+ # we need to init metadata before the job is dispatched
+ data.init_meta()
+ for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
+ setattr( data.metadata, meta_name, meta_value )
+ data.flush()
+ json = dict( file_type = uploaded_dataset.file_type,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ metadata = uploaded_dataset.metadata,
+ primary_file = uploaded_dataset.primary_file,
+ extra_files_path = data.extra_files_path,
+ composite_file_paths = uploaded_dataset.composite_files,
+ composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ else:
+ try:
+ is_binary = uploaded_dataset.datatype.is_binary
+ except:
+ is_binary = None
+ json = dict( file_type = uploaded_dataset.file_type,
+ name = uploaded_dataset.name,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ is_binary = is_binary,
+ space_to_tab = uploaded_dataset.space_to_tab,
+ path = uploaded_dataset.path )
+ json_file.write( to_json_string( json ) + '\n' )
+ data_list.append( data )
+ json_file.close()
+
#cleanup unclaimed precreated datasets:
for data in self.precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
- if data_list:
- trans.app.model.flush()
+ if not data_list:
+ try:
+ os.remove( json_file_path )
+ except:
+ pass
+ return 'No data was entered in the upload form, please go back and choose data to upload.'
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
job.history_id = trans.history.id
job.tool_id = tool.id
- try:
- # For backward compatibility, some tools may not have versions yet.
- job.tool_version = tool.version
- except:
- job.tool_version = "1.0.1"
+ job.tool_version = tool.version
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
+
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
+ for i, dataset in enumerate( data_list ):
+ job.add_output_dataset( i, dataset )
+ trans.app.model.flush()
- #if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as output for the job, why?
- job.state = trans.app.model.Job.states.OK
- file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
- job.info = "%s, size: %s" % ( hda.info, file_size_str )
- job.add_output_dataset( hda.name, hda )
- job.flush()
- log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
- trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
- return dict( output=hda )
-
- def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( create_dataset=True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
- data.name = err_code
- data.extension = "txt"
- data.dbkey = "?"
- data.info = err_msg
- data.file_size = 0
- data.state = data.states.EMPTY
- data.flush()
- if precreated_dataset is None:
- trans.history.add_dataset( data )
- trans.app.model.flush()
- # Indicate job failure by setting state and info
- job.state = trans.app.model.Job.states.ERROR
- job.info = err_msg
- job.add_output_dataset( data.name, data )
- job.flush()
- log.info( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ) )
- trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
- return dict( output=data )
-
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None, metadata = {}, uploaded_dataset = None ):
- def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
- data.info = "No data: %s." % message
- data.state = data.states.ERROR
- if data.extension is None:
- data.extension = 'data'
- return data
- data_type = None
-
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
-
- # See if we have an empty file
- if not os.path.getsize( temp_name ) > 0:
- return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
- #raise BadFileException( "you attempted to upload an empty file." )
- if is_multi_byte:
- ext = sniff.guess_ext( temp_name, is_multi_byte=True )
- else:
- if not data_type: #at this point data_type is always None (just initialized above), so this is always True...lots of cleanup needed here
- # See if we have a gzipped file, which, if it passes our restrictions,
- # we'll decompress on the fly.
- is_gzipped, is_valid = self.check_gzip( temp_name )
- if is_gzipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_gzipped and is_valid:
- # We need to uncompress the temp_name file
- CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( temp_name )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
- #raise BadFileException( 'problem decompressing gzipped data.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file
- shutil.move( uncompressed, temp_name )
- file_name = file_name.rstrip( '.gz' )
- data_type = 'gzip'
- ext = ''
- if not data_type:
- # See if we have a zip archive
- is_zipped, is_valid, test_ext = self.check_zip( temp_name )
- if is_zipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_zipped and is_valid:
- # Currently, we force specific tools to handle this case. We also require the user
- # to manually set the incoming file_type
- if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
- elif test_ext == 'txt' and file_type != 'txtseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
- if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
- #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
- data_type = 'zip'
- ext = file_type
- if not data_type:
- if self.check_binary( temp_name ):
- if uploaded_dataset and uploaded_dataset.datatype and uploaded_dataset.datatype.is_binary:
- #we need a more generalized way of checking if a binary upload is of the right format for a datatype...magic number, etc
- data_type = 'binary'
- ext = uploaded_dataset.file_type
- else:
- parts = file_name.split( "." )
- if len( parts ) > 1:
- ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- if ext == 'ab1' and file_type != 'ab1':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
- elif ext == 'scf' and file_type != 'scf':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
- data_type = 'binary'
- if not data_type:
- # We must have a text file
- if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- #if data_type != 'binary' and data_type != 'zip' and not trans.app.datatypes_registry.get_datatype_by_extension( ext ).is_binary:
- if data_type != 'binary' and data_type != 'zip':
- if space_to_tab:
- self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
- else:
- self.line_count = sniff.convert_newlines( temp_name )
- if file_type == 'auto':
- ext = sniff.guess_ext( temp_name, sniff_order=trans.app.datatypes_registry.sniff_order )
- else:
- ext = file_type
- data_type = ext
- if info is None:
- info = 'uploaded %s file' %data_type
- data.extension = ext
- data.name = file_name
- data.dbkey = dbkey
- data.info = info
- data.flush()
- shutil.move( temp_name, data.file_name )
- dataset_state = data.states.OK #don't set actual state here, only set to OK when finished setting attributes of the dataset
- data.set_size()
- data.init_meta()
- #need to set metadata, has to be done after extention is set
- for meta_name, meta_value in metadata.iteritems():
- setattr( data.metadata, meta_name, meta_value )
- if self.line_count is not None:
- try:
- if is_multi_byte:
- data.set_multi_byte_peek( line_count=self.line_count )
- else:
- data.set_peek( line_count=self.line_count )
- except:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
- else:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
-
- # validate incomming data
- # Commented by greg on 3/14/07
- # for error in data.datatype.validate( data ):
- # data.add_validation_error(
- # model.ValidationError( message=str( error ), err_type=error.__class__.__name__, attributes=util.object_to_string( error.__dict__ ) ) )
- if data.missing_meta():
- data.datatype.set_meta( data )
- dbkey_to_store = dbkey
- if type( dbkey_to_store ) == type( [] ):
- dbkey_to_store = dbkey[0]
- if precreated_dataset is not None:
- trans.history.genome_build = dbkey_to_store
- else:
- trans.history.add_dataset( data, genome_build=dbkey_to_store )
- #set up composite files
- if uploaded_dataset is not None:
- composite_files = data.datatype.get_composite_files( data )
- if composite_files:
- os.mkdir( data.extra_files_path ) #make extra files path
- for name, value in composite_files.iteritems():
- if uploaded_dataset.composite_files[ value.name ] is None and not value.optional:
- data.info = "A required composite data file was not provided (%s)" % name
- dataset_state = data.states.ERROR
- break
- elif uploaded_dataset.composite_files[ value.name] is not None:
- if not value.is_binary:
- if uploaded_dataset.composite_files[ value.name ].space_to_tab:
- sniff.convert_newlines_sep2tabs( uploaded_dataset.composite_files[ value.name ].filename )
- else:
- sniff.convert_newlines( uploaded_dataset.composite_files[ value.name ].filename )
- shutil.move( uploaded_dataset.composite_files[ value.name ].filename, os.path.join( data.extra_files_path, name ) )
- if data.datatype.composite_type == 'auto_primary_file':
- #now that metadata was set above, we should create the primary file as required
- open( data.file_name, 'wb+' ).write( data.datatype.generate_primary_file( dataset = data ) )
- data.state = dataset_state #Always set dataset state LAST
- trans.app.model.flush()
- trans.log_event( "Added dataset %d to history %d" %( data.id, trans.history.id ), tool_id="upload" )
- return data
-
- def check_gzip( self, temp_name ):
- temp = open( temp_name, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != util.gzip_magic:
- return ( False, False )
- CHUNK_SIZE = 2**15 # 32Kb
- gzipped_file = gzip.GzipFile( temp_name )
- chunk = gzipped_file.read( CHUNK_SIZE )
- gzipped_file.close()
- if self.check_html( temp_name, chunk=chunk ) or self.check_binary( temp_name, chunk=chunk ):
- return( True, False )
- return ( True, True )
-
- def check_zip( self, temp_name ):
- if not zipfile.is_zipfile( temp_name ):
- return ( False, False, None )
- zip_file = zipfile.ZipFile( temp_name, "r" )
- # Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf or .txt files
- # 2. All file extensions within an archive must be the same
- name = zip_file.namelist()[0]
- test_ext = name.split( "." )[1].strip().lower()
- if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
- return ( True, False, test_ext )
- for name in zip_file.namelist():
- ext = name.split( "." )[1].strip().lower()
- if ext != test_ext:
- return ( True, False, test_ext )
- return ( True, True, test_ext )
-
- def check_html( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open(temp_name, "U")
- else:
- temp = chunk
- regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
- regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
- regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
- regexp4 = re.compile( "<META[^>]*>", re.I )
- lineno = 0
- for line in temp:
- lineno += 1
- matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
- if matches:
- if chunk is None:
- temp.close()
- return True
- if lineno > 100:
- break
- if chunk is None:
- temp.close()
- return False
- def check_binary( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open( temp_name, "U" )
- else:
- temp = chunk
- lineno = 0
- for line in temp:
- lineno += 1
- line = line.strip()
- if line:
- if util.is_multi_byte( line ):
- return False
- for char in line:
- if ord( char ) > 128:
- if chunk is None:
- temp.close()
- return True
- if lineno > 10:
- break
- if chunk is None:
- temp.close()
- return False
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return dict( [ ( i, v ) for i, v in enumerate( data_list ) ] )
def get_precreated_dataset( self, name ):
"""
@@ -378,7 +164,3 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
-class BadFileException( Exception ):
- pass
-
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Aug 20 11:43:28 2009 -0400
@@ -304,21 +304,22 @@
def get_html_field( self, trans=None, value=None, other_values={} ):
return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
+ # TODO: Fix nginx upload module support
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
- if type( value ) == dict:
- upload_location = self.tool.app.config.nginx_upload_location
- assert upload_location, \
- "Request appears to have been processed by nginx_upload_module \
- but Galaxy is not configured to recgonize it"
- # Check that the file is in the right location
- local_filename = os.path.abspath( value['path'] )
- assert local_filename.startswith( upload_location ), \
- "Filename provided by nginx is not in correct directory"
- value = Bunch(
- filename = value["name"],
- local_filename = local_filename
- )
+ #if type( value ) == dict:
+ # upload_location = self.tool.app.config.nginx_upload_location
+ # assert upload_location, \
+ # "Request appears to have been processed by nginx_upload_module \
+ # but Galaxy is not configured to recgonize it"
+ # # Check that the file is in the right location
+ # local_filename = os.path.abspath( value['path'] )
+ # assert local_filename.startswith( upload_location ), \
+ # "Filename provided by nginx is not in correct directory"
+ # value = Bunch(
+ # filename = value["name"],
+ # local_filename = local_filename
+ # )
return value
def get_required_enctype( self ):
"""
@@ -330,10 +331,18 @@
return None
elif isinstance( value, unicode ) or isinstance( value, str ):
return value
+ elif isinstance( value, dict ):
+ # or should we jsonify?
+ try:
+ return value['local_filename']
+ except:
+ return None
raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
else:
raise Exception( "FileToolParameter cannot be persisted" )
def get_initial_value( self, trans, context ):
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 11:43:28 2009 -0400
@@ -12,6 +12,7 @@
from galaxy.datatypes import sniff
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
+from galaxy.util import json
class Group( object ):
def __init__( self ):
@@ -167,33 +168,30 @@
rval.append( rval_dict )
return rval
def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
- def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, override_name = None, override_info = None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
- if 'local_filename' in dir( data_file ):
+ try:
# Use the existing file
- return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
- elif 'filename' in dir( data_file ):
- #create a new tempfile
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- precreated_name = get_file_name( data_file.filename )
- if not dataset_name:
- dataset_name = precreated_name
- if not dataset_info:
- dataset_info = 'uploaded file'
- return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_temp_file( temp_name )
- return None, None, is_multi_byte, None, None
- def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ if not dataset_name and 'filename' in data_file:
+ dataset_name = get_file_name( data_file['filename'] )
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
+ except:
+ # The uploaded file should've been persisted by the upload tool action
+ return Bunch( type=None, path=None, name=None )
+ #return None, None, None, None, None
+ def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ):
filenames = []
- if url_paste not in [ None, "" ]:
+ url_paste_file = group_incoming.get( 'url_paste', None )
+ if url_paste_file is not None:
+ url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
@@ -208,114 +206,54 @@
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- temp_name = None
- precreated_name = str( e )
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
+ yield Bunch( type='url', path=line, name=precreated_name )
+ #yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
- is_valid = False
- for line in url_paste: #Trim off empty lines from begining
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- temp_name = None
- precreated_name = str( e )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
-
+ yield Bunch( type='file', path=url_paste_file, name=precreated_name )
+ #yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
if url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
else: #we need to use url_paste
- #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
- if temp_name:
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
break
- ###this check will cause an additional file to be retrieved and created...so lets not do that
- #try: #check to see if additional paste contents were available
- # file_names.next()
- # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
- #except StopIteration:
- # pass
- return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
-
+ return file_bunch, warnings
def get_filenames( context ):
rval = []
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
- warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ warnings = []
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
+ #rval.append( ( type, temp_name, precreated_name, space_to_tab, dataset_name, dataset_info ) )
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
return rval
- class UploadedDataset( Bunch ):
- def __init__( self, **kwd ):
- Bunch.__init__( self, **kwd )
- self.primary_file = None
- self.composite_files = odict()
- self.dbkey = None
- self.warnings = []
- self.metadata = {}
-
- self._temp_filenames = [] #store all created filenames here, delete on cleanup
- def register_temp_file( self, filename ):
- if isinstance( filename, list ):
- self._temp_filenames.extend( filename )
- else:
- self._temp_filenames.append( filename )
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- pass
- #log.warning( str( e ) )
- def clean_up_temp_files( self ):
- for filename in self._temp_filenames:
- self.remove_temp_file( filename )
-
file_type = self.get_file_type( context )
d_type = self.get_datatype( trans, context )
dbkey = context.get( 'dbkey', None )
@@ -325,51 +263,50 @@
for group_incoming in context.get( self.name, [] ):
i = int( group_incoming['__index__'] )
groups_incoming[ i ] = group_incoming
-
if d_type.composite_type is not None:
#handle uploading of composite datatypes
#Only one Dataset can be created
+ '''
dataset = UploadedDataset()
+ dataset.datatype = d_type
+ '''
+ dataset = Bunch()
+ dataset.type = 'composite'
dataset.file_type = file_type
+ dataset.dbkey = dbkey
dataset.datatype = d_type
- dataset.dbkey = dbkey
+ dataset.warnings = []
+ dataset.metadata = {}
+ dataset.composite_files = {}
#load metadata
files_metadata = context.get( self.metadata_ref, {} )
- for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+ for meta_name, meta_spec in d_type.metadata_spec.iteritems():
if meta_spec.set_in_upload:
if meta_name in files_metadata:
dataset.metadata[ meta_name ] = files_metadata[ meta_name ]
-
- temp_name = None
- precreated_name = None
- is_multi_byte = False
- space_to_tab = False
- warnings = []
dataset_name = None
dataset_info = None
if dataset.datatype.composite_type == 'auto_primary_file':
#replace sniff here with just creating an empty file
temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
- precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ dataset.primary_file = temp_name
+ dataset.space_to_tab = False
+ dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
if dataset.datatype.composite_type:
precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
writable_files_offset = 1
- if temp_name is None:#remove this before finish, this should create an empty dataset
+ dataset.primary_file = file_bunch.path
+ dataset.space_to_tab = file_bunch.space_to_tab
+ dataset.precreated_name = file_bunch.precreated_name
+ dataset.name = file_bunch.precreated_name
+ dataset.warnings.extend( file_bunch.warnings )
+ if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
raise Exception( 'No primary dataset file was available for composite upload' )
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.precreated_name = precreated_name
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.warnings.extend( warnings )
- dataset.register_temp_file( temp_name )
-
keys = [ value.name for value in writable_files.values() ]
for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
key = keys[ i + writable_files_offset ]
@@ -377,37 +314,22 @@
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
dataset.composite_files[ key ] = None
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
- if temp_name:
- dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
- dataset.register_temp_file( temp_name )
+ file_bunch, warnings = get_one_filename( group_incoming )
+ if file_bunch.path:
+ dataset.composite_files[ key ] = file_bunch.__dict__
else:
dataset.composite_files[ key ] = None
if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
return [ dataset ]
else:
+ datasets = get_filenames( context[ self.name ][0] )
rval = []
- for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
- dataset = UploadedDataset()
+ for dataset in datasets:
dataset.file_type = file_type
- dataset.datatype = d_type
dataset.dbkey = dbkey
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.precreated_name = precreated_name
- dataset.register_temp_file( temp_name )
rval.append( dataset )
- return rval
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- log.warning( str( e ) )
-
+ return rval
class Conditional( Group ):
type = "conditional"
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Aug 20 11:43:28 2009 -0400
@@ -3,7 +3,7 @@
"""
import logging
-import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys
+import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile
# Older py compatibility
try:
@@ -454,6 +454,26 @@
out_dict[ str( key ) ] = value
return out_dict
+def mkstemp_ln( src, prefix='mkstemp_ln_' ):
+ """
+ From tempfile._mkstemp_inner, generate a hard link in the same dir with a
+ random name. Created so we can persist the underlying file of a
+ NamedTemporaryFile upon its closure.
+ """
+ dir = os.path.dirname(src)
+ names = tempfile._get_candidate_names()
+ for seq in xrange(tempfile.TMP_MAX):
+ name = names.next()
+ file = os.path.join(dir, prefix + name)
+ try:
+ linked_path = os.link( src, file )
+ return (os.path.abspath(file))
+ except OSError, e:
+ if e.errno == errno.EEXIST:
+ continue # try again
+ raise
+ raise IOError, (errno.EEXIST, "No usable temporary file name found")
+
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) ) #this list is used in edit attributes and the upload tool
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) ) #this list is used in history.tmpl
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 11:43:28 2009 -0400
@@ -136,6 +136,7 @@
"""
Precreate datasets for asynchronous uploading.
"""
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name, history ):
data = trans.app.model.HistoryDatasetAssociation( create_dataset = True )
data.name = name
@@ -143,6 +144,7 @@
data.history = history
data.flush()
history.add_dataset( data )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
return data
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
diff -r 070cf5f6f928 -r a7f9325bb319 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Thu Aug 20 11:39:32 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Thu Aug 20 11:43:28 2009 -0400
@@ -212,6 +212,17 @@
else:
return None
+# For request.params, override cgi.FieldStorage.make_file to create persistent
+# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
+# but for performance reasons it's way better to use Paste's tempfile than to
+# create a new one and copy.
+import cgi
+class FieldStorage( cgi.FieldStorage ):
+ def make_file(self, binary=None):
+ import tempfile
+ return tempfile.NamedTemporaryFile()
+cgi.FieldStorage = FieldStorage
+
class Request( webob.Request ):
"""
Encapsulates an HTTP request.
diff -r 070cf5f6f928 -r a7f9325bb319 templates/base_panels.mako
--- a/templates/base_panels.mako Thu Aug 20 11:39:32 2009 -0400
+++ b/templates/base_panels.mako Thu Aug 20 11:43:28 2009 -0400
@@ -72,9 +72,6 @@
<script type="text/javascript">
jQuery( function() {
$("iframe#galaxy_main").load( function() {
- ##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
- ##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
- ##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
$(this).contents().find("form").each( function() {
if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
$(this).submit( function() {
diff -r 070cf5f6f928 -r a7f9325bb319 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Thu Aug 20 11:39:32 2009 -0400
+++ b/test/base/twilltestcase.py Thu Aug 20 11:43:28 2009 -0400
@@ -93,6 +93,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when uploading file %s" % ( hid, filename )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
@@ -112,6 +114,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
# Functions associated with histories
def check_history_for_errors( self ):
diff -r 070cf5f6f928 -r a7f9325bb319 tools/data_source/upload.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/upload.py Thu Aug 20 11:43:28 2009 -0400
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+#Processes uploads from the user.
+
+# WARNING: Changes in this tool (particularly as related to parsing) may need
+# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
+
+import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile
+from galaxy import eggs
+# need to import model before sniff to resolve a circular import dependency
+import galaxy.model
+from galaxy.datatypes import sniff
+from galaxy import util
+from galaxy.util.json import *
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def stop_err( msg, ret=1 ):
+ sys.stderr.write( msg )
+ sys.exit( ret )
+
+def file_err( msg, dataset, json_file ):
+ json_file.write( to_json_string( dict( type = 'dataset',
+ ext = 'data',
+ dataset_id = dataset.dataset_id,
+ stderr = msg ) ) + "\n" )
+ try:
+ os.remove( dataset.path )
+ except:
+ pass
+
+def safe_dict(d):
+ """
+ Recursively clone json structure with UTF-8 dictionary keys
+ http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-ke…
+ """
+ if isinstance(d, dict):
+ return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
+ elif isinstance(d, list):
+ return [safe_dict(x) for x in d]
+ else:
+ return d
+
+def check_html( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open(temp_name, "U")
+ else:
+ temp = chunk
+ regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
+ regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
+ regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
+ regexp4 = re.compile( "<META[^>]*>", re.I )
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
+ if matches:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 100:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_binary( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open( temp_name, "U" )
+ else:
+ temp = chunk
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ line = line.strip()
+ if line:
+ for char in line:
+ if ord( char ) > 128:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 10:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_gzip( temp_name ):
+ temp = open( temp_name, "U" )
+ magic_check = temp.read( 2 )
+ temp.close()
+ if magic_check != util.gzip_magic:
+ return ( False, False )
+ CHUNK_SIZE = 2**15 # 32Kb
+ gzipped_file = gzip.GzipFile( temp_name )
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ gzipped_file.close()
+ if check_html( temp_name, chunk=chunk ) or check_binary( temp_name, chunk=chunk ):
+ return( True, False )
+ return ( True, True )
+
+def check_zip( temp_name ):
+ if not zipfile.is_zipfile( temp_name ):
+ return ( False, False, None )
+ zip_file = zipfile.ZipFile( temp_name, "r" )
+ # Make sure the archive consists of valid files. The current rules are:
+ # 1. Archives can only include .ab1, .scf or .txt files
+ # 2. All file extensions within an archive must be the same
+ name = zip_file.namelist()[0]
+ test_ext = name.split( "." )[1].strip().lower()
+ if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+ return ( True, False, test_ext )
+ for name in zip_file.namelist():
+ ext = name.split( "." )[1].strip().lower()
+ if ext != test_ext:
+ return ( True, False, test_ext )
+ return ( True, True, test_ext )
+
+def add_file( dataset, json_file ):
+ data_type = None
+ line_count = None
+
+ if dataset.type == 'url':
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ except Exception, e:
+ file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
+ return
+ dataset.path = temp_name
+ dataset.is_multi_byte = is_multi_byte
+
+ # See if we have an empty file
+ if not os.path.exists( dataset.path ):
+ file_err( 'Uploaded temporary file (%s) does not exist. Please' % dataset.path, dataset, json_file )
+ return
+ if not os.path.getsize( dataset.path ) > 0:
+ file_err( 'The uploaded file is empty', dataset, json_file )
+ return
+ if 'is_multi_byte' not in dir( dataset ):
+ dataset.is_multi_byte = util.is_multi_byte( open( dataset.path, 'r' ).read( 1024 )[:100] )
+ if dataset.is_multi_byte:
+ ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
+ data_type = ext
+ else:
+ # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+ is_gzipped, is_valid = check_gzip( dataset.path )
+ if is_gzipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_gzipped and is_valid:
+ # We need to uncompress the temp_name file
+ CHUNK_SIZE = 2**20 # 1Mb
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ) )
+ gzipped_file = gzip.GzipFile( dataset.path )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing gzipped data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ # Replace the gzipped file with the decompressed file
+ shutil.move( uncompressed, dataset.path )
+ dataset.name = dataset.name.rstrip( '.gz' )
+ data_type = 'gzip'
+ if not data_type:
+ # See if we have a zip archive
+ is_zipped, is_valid, test_ext = check_zip( dataset.path )
+ if is_zipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_zipped and is_valid:
+ # Currently, we force specific tools to handle this case. We also require the user
+ # to manually set the incoming file_type
+ if ( test_ext == 'ab1' or test_ext == 'scf' ) and dataset.file_type != 'binseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
+ return
+ elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'", dataset, json_file )
+ return
+ if not ( dataset.file_type == 'binseq.zip' or dataset.file_type == 'txtseq.zip' ):
+ file_err( "You must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files", dataset, json_file )
+ return
+ data_type = 'zip'
+ ext = dataset.file_type
+ if not data_type:
+ if check_binary( dataset.path ):
+ if dataset.is_binary is not None:
+ data_type = 'binary'
+ ext = dataset.file_type
+ else:
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[1].strip().lower()
+ if not( ext == 'ab1' or ext == 'scf' ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if ext == 'ab1' and dataset.file_type != 'ab1':
+ file_err( "You must manually set the 'File Format' to 'Ab1' when uploading ab1 files.", dataset, json_file )
+ return
+ elif ext == 'scf' and dataset.file_type != 'scf':
+ file_err( "You must manually set the 'File Format' to 'Scf' when uploading scf files.", dataset, json_file )
+ return
+ data_type = 'binary'
+ if not data_type:
+ # We must have a text file
+ if check_html( dataset.path ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if data_type != 'binary' and data_type != 'zip':
+ if dataset.space_to_tab:
+ line_count = sniff.convert_newlines_sep2tabs( dataset.path )
+ else:
+ line_count = sniff.convert_newlines( dataset.path )
+ if dataset.file_type == 'auto':
+ ext = sniff.guess_ext( dataset.path )
+ else:
+ ext = dataset.file_type
+ data_type = ext
+ # Save job info for the framework
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.path,
+ ext = ext,
+ stdout = 'uploaded %s file' % data_type,
+ name = dataset.name,
+ line_count = line_count )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def add_composite_file( dataset, json_file ):
+ if dataset.composite_files:
+ os.mkdir( dataset.extra_files_path )
+ for name, value in dataset.composite_files.iteritems():
+ value = util.bunch.Bunch( **value )
+ if dataset.composite_file_paths[ value.name ] is None and not value.optional:
+ file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
+ break
+ elif dataset.composite_file_paths[value.name] is not None:
+ if not value.is_binary:
+ if uploaded_dataset.composite_files[ value.name ].space_to_tab:
+ sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ else:
+ sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.primary_file,
+ stdout = 'uploaded %s file' % dataset.file_type )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def __main__():
+
+ if len( sys.argv ) != 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ sys.exit( 1 )
+
+ json_file = open( 'galaxy.json', 'w' )
+
+ for line in open( sys.argv[1], 'r' ):
+ dataset = from_json_string( line )
+ dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+
+ if dataset.type == 'composite':
+ add_composite_file( dataset, json_file )
+ else:
+ add_file( dataset, json_file )
+
+ # clean up paramfile
+ try:
+ os.remove( sys.argv[1] )
+ except:
+ pass
+
+if __name__ == '__main__':
+ __main__()
diff -r 070cf5f6f928 -r a7f9325bb319 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Aug 20 11:39:32 2009 -0400
+++ b/tools/data_source/upload.xml Thu Aug 20 11:43:28 2009 -0400
@@ -1,10 +1,13 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1" version="1.0.2">
+<tool name="Upload File" id="upload1" version="1.0.3">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
+ <command interpreter="python">
+ upload.py $paramfile
+ </command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/070cf5f6f928
changeset: 2588:070cf5f6f928
user: rc
date: Thu Aug 20 11:39:32 2009 -0400
description:
Merge with 5fa8803716fd7d3250a1a78f5521aa18b3e519b0
13 file(s) affected in this change:
lib/galaxy/web/controllers/admin.py
lib/galaxy/web/controllers/library.py
lib/galaxy/web/controllers/requests.py
lib/galaxy/web/controllers/requests_admin.py
lib/galaxy/web/framework/__init__.py
templates/admin/requests/grid.mako
templates/admin/requests/show_request.mako
templates/base_panels.mako
templates/history/grid.mako
templates/history/list_shared.mako
templates/requests/show_request.mako
test/base/twilltestcase.py
test/functional/test_security_and_libraries.py
diffs (truncated from 5056 to 3000 lines):
diff -r 881dd4c2de9f -r 070cf5f6f928 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Thu Aug 20 10:52:08 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Aug 20 11:39:32 2009 -0400
@@ -3,6 +3,7 @@
<registration converters_path="lib/galaxy/datatypes/converters">
<datatype extension="ab1" type="galaxy.datatypes.images:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
+ <datatype extension="bam" type="galaxy.datatypes.images:Bam" mimetype="application/octet-stream"/>
<datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
<converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
@@ -49,6 +50,7 @@
<datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
<datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
<datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
+ <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/>
<datatype extension="scf" type="galaxy.datatypes.images:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
<datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
@@ -205,5 +207,6 @@
<sniffer type="galaxy.datatypes.interval:Gff"/>
<sniffer type="galaxy.datatypes.interval:Gff3"/>
<sniffer type="galaxy.datatypes.interval:Interval"/>
+ <sniffer type="galaxy.datatypes.tabular:Sam"/>
</sniffers>
</datatypes>
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/app.py
--- a/lib/galaxy/app.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/app.py Thu Aug 20 11:39:32 2009 -0400
@@ -37,6 +37,8 @@
self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path, self )
# Load datatype converters
self.datatypes_registry.load_datatype_converters( self.toolbox )
+ #load external metadata tool
+ self.datatypes_registry.load_external_metadata_tool( self.toolbox )
# Load datatype indexers
self.datatypes_registry.load_datatype_indexers( self.toolbox )
#Load security policy
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/coverage.py
--- a/lib/galaxy/datatypes/coverage.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/coverage.py Thu Aug 20 11:39:32 2009 -0400
@@ -5,7 +5,7 @@
import pkg_resources
pkg_resources.require( "bx-python" )
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from galaxy.datatypes.sniff import *
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,4 +1,4 @@
-import logging, os, sys, time, sets, tempfile
+import logging, os, sys, time, tempfile
from galaxy import util
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Thu Aug 20 11:39:32 2009 -0400
@@ -12,7 +12,7 @@
august 20 2007
"""
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from cgi import escape
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/images.py Thu Aug 20 11:39:32 2009 -0400
@@ -4,6 +4,8 @@
import data
import logging
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
from urllib import urlencode, quote_plus
import zipfile
@@ -187,7 +189,7 @@
return 'text/html'
def sniff( self, filename ):
"""
- Determines wether the file is in html format
+ Determines whether the file is in html format
>>> fname = get_test_fname( 'complete.bed' )
>>> Html().sniff( fname )
@@ -233,3 +235,25 @@
return dataset.peek
except:
return "peek unavailable"
+
+class Bam( data.Binary ):
+ """Class describing a BAM binary file"""
+ file_ext = "bam"
+ MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey})
+ dataset.peek = "Binary bam alignments file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek(self, dataset):
+ try:
+ return dataset.peek
+ except:
+ return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
+ def get_mime(self):
+ """Returns the mime type of the datatype"""
+ return 'application/octet-stream'
+
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Thu Aug 20 11:39:32 2009 -0400
@@ -5,7 +5,7 @@
import pkg_resources
pkg_resources.require( "bx-python" )
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from galaxy.datatypes.sniff import *
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,7 +1,7 @@
"""
Provides mapping between extensions and datatypes, mime-types, etc.
"""
-import os
+import os, tempfile
import logging
import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
@@ -18,6 +18,7 @@
self.datatype_converters = odict()
self.datatype_indexers = odict()
self.converters = []
+ self.set_external_metadata_tool = None
self.indexers = []
self.sniff_order = []
self.upload_file_formats = []
@@ -110,6 +111,7 @@
self.datatypes_by_extension = {
'ab1' : images.Ab1(),
'axt' : sequence.Axt(),
+ 'bam' : images.Bam(),
'bed' : interval.Bed(),
'binseq.zip' : images.Binseq(),
'blastxml' : xml.BlastXml(),
@@ -129,6 +131,7 @@
'qualsolid' : qualityscore.QualityScoreSOLiD(),
'qualsolexa' : qualityscore.QualityScoreSolexa(),
'qual454' : qualityscore.QualityScore454(),
+ 'sam' : tabular.Sam(),
'scf' : images.Scf(),
'tabular' : tabular.Tabular(),
'taxonomy' : tabular.Taxonomy(),
@@ -139,6 +142,7 @@
self.mimetypes_by_extension = {
'ab1' : 'application/octet-stream',
'axt' : 'text/plain',
+ 'bam' : 'application/octet-stream',
'bed' : 'text/plain',
'binseq.zip' : 'application/zip',
'blastxml' : 'text/plain',
@@ -156,6 +160,7 @@
'qualsolid' : 'text/plain',
'qualsolexa' : 'text/plain',
'qual454' : 'text/plain',
+ 'sam' : 'text/plain',
'scf' : 'application/octet-stream',
'tabular' : 'text/plain',
'taxonomy' : 'text/plain',
@@ -183,7 +188,8 @@
interval.CustomTrack(),
interval.Gff(),
interval.Gff3(),
- interval.Interval()
+ interval.Interval(),
+ tabular.Sam()
]
def append_to_sniff_order():
# Just in case any supported data types are not included in the config's sniff_order section.
@@ -251,6 +257,31 @@
self.datatype_converters[source_datatype][target_datatype] = converter
self.log.debug( "Loaded converter: %s", converter.id )
+ def load_external_metadata_tool( self, toolbox ):
+ """Adds a tool which is used to set external metadata"""
+ #we need to be able to add a job to the queue to set metadata. The queue will currently only accept jobs with an associated tool.
+ #We'll create a special tool to be used for Auto-Detecting metadata; this is less than ideal, but effective
+ #Properly building a tool without relying on parsing an XML file is near impossible...so we'll create a temporary file
+ tool_xml_text = """
+ <tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.0" tool_type="set_metadata">
+ <type class="SetMetadataTool" module="galaxy.tools"/>
+ <action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/>
+ <command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command>
+ <inputs>
+ <param format="data" name="input1" type="data" label="File to set metadata on."/>
+ <param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/>
+ <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/>
+ </inputs>
+ </tool>
+ """
+ tmp_name = tempfile.NamedTemporaryFile()
+ tmp_name.write( tool_xml_text )
+ tmp_name.flush()
+ set_meta_tool = toolbox.load_tool( tmp_name.name )
+ toolbox.tools_by_id[ set_meta_tool.id ] = set_meta_tool
+ self.set_external_metadata_tool = set_meta_tool
+ self.log.debug( "Loaded external metadata tool: %s", self.set_external_metadata_tool.id )
+
def load_datatype_indexers( self, toolbox ):
"""Adds indexers from self.indexers to the toolbox from app"""
for elem in self.indexers:
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/datatypes/tabular.py Thu Aug 20 11:39:32 2009 -0400
@@ -11,6 +11,7 @@
from cgi import escape
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
+from sniff import *
log = logging.getLogger(__name__)
@@ -236,3 +237,84 @@
out = "Can't create peek %s" % exc
return out
+class Sam( Tabular ):
+ file_ext = 'sam'
+ def __init__(self, **kwd):
+ """Initialize taxonomy datatype"""
+ Tabular.__init__( self, **kwd )
+ self.column_names = ['QNAME', 'FLAG', 'RNAME', 'POS', 'MAPQ', 'CIGAR',
+ 'MRNM', 'MPOS', 'ISIZE', 'SEQ', 'QUAL', 'OPT'
+ ]
+ def make_html_table( self, dataset, skipchars=[] ):
+ """Create HTML table, used for displaying peek"""
+ out = ['<table cellspacing="0" cellpadding="3">']
+ try:
+ # Generate column header
+ out.append( '<tr>' )
+ for i, name in enumerate( self.column_names ):
+ out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
+ # This data type requires at least 11 columns in the data
+ if dataset.metadata.columns - len( self.column_names ) > 0:
+ for i in range( len( self.column_names ), dataset.metadata.columns ):
+ out.append( '<th>%s</th>' % str( i+1 ) )
+ out.append( '</tr>' )
+ out.append( self.make_html_peek_rows( dataset, skipchars=skipchars ) )
+ out.append( '</table>' )
+ out = "".join( out )
+ except Exception, exc:
+ out = "Can't create peek %s" % exc
+ return out
+ def sniff( self, filename ):
+ """
+ Determines whether the file is in SAM format
+
+ A file in SAM format consists of lines of tab-separated data.
+ The following header line may be the first line:
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL
+ or
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+ Data in the OPT column is optional and can consist of tab-separated data
+
+ For complete details see http://samtools.sourceforge.net/SAM1.pdf
+
+ Rules for sniffing as True:
+ There must be 11 or more columns of data on each line
+ Columns 2 (FLAG), 4(POS), 5 (MAPQ), 8 (MPOS), and 9 (ISIZE) must be numbers (9 can be negative)
+ We will only check that up to the first 5 alignments are correctly formatted.
+
+ >>> fname = get_test_fname( 'sequence.maf' )
+ >>> Sam().sniff( fname )
+ False
+ >>> fname = get_test_fname( '1.sam' )
+ >>> Sam().sniff( fname )
+ True
+ """
+ try:
+ fh = open( filename )
+ count = 0
+ while True:
+ line = fh.readline()
+ line = line.strip()
+ if not line:
+ break #EOF
+ if line:
+ if line[0] != '@':
+ linePieces = line.split('\t')
+ if len(linePieces) < 11:
+ return False
+ try:
+ check = int(linePieces[1])
+ check = int(linePieces[3])
+ check = int(linePieces[4])
+ check = int(linePieces[7])
+ check = int(linePieces[8])
+ except ValueError:
+ return False
+ count += 1
+ if count == 5:
+ return True
+ if count < 5 and count > 0:
+ return True
+ except:
+ pass
+ return False
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/datatypes/test/1.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/test/1.sam Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,97 @@
+@QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+1378_11_329 69 * 0 0 * * 0 0 AGACCGGGCGGGGTGGCGTTCGGT %##+'#######%###$#$##$(#
+1378_11_329 133 * 0 0 * * 0 0 GTTCGTGGCCGGTGGGTGTTTGGG ###$$#$#$&#####$'$#$###$
+1378_17_1788 69 * 0 0 * * 0 0 TGCCGTGTCTTGCTAACGCCGATT #'#$$#$###%%##$$$$######
+1378_17_1788 133 * 0 0 * * 0 0 TGGGTGGATGTGTTGTCGTTCATG #$#$###$#$#######$#$####
+1378_25_2035 69 * 0 0 * * 0 0 CTGCGTGTTGGTGTCTACTGGGGT #%#'##$#$##&%#%$$$%#%#'#
+1378_25_2035 133 * 0 0 * * 0 0 GTGCGTCGGGGAGGGTGCTGTCGG ######%#$%#$$###($###&&%
+1378_28_770 89 chr11.nib:1-134452384 72131356 37 17M1I5M = 72131356 0 CACACTGTGACAGACAGCGCAGC 00/02!!0//1200210!!44/1 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_28_770 181 chr11.nib:1-134452384 72131356 0 24M = 72131356 0 TTGGTGCGCGCGGTTGAGGGTTGG $$(#%%#$%#%####$%%##$###
+1378_33_1945 113 chr2.nib:1-242951149 181247988 0 23M chr12.nib:1-132349534 41710908 0 GAGAGAGAGAGAGAGAGAGAGAG PQRVUMNXYRPUXYXWXSOSZ]M XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_33_1945 177 chr12.nib:1-132349534 41710908 0 23M chr2.nib:1-242951149 181247988 0 AGAGAGAGAGAGAGAGAGAGAGA SQQWZYURVYWX]]YXTSY]]ZM XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_34_789 69 * 0 0 * * 0 0 ATGGTGGCTGACGCGTTTGACTGT #$##%#$##$&$#%##$##$###$
+1378_34_789 133 * 0 0 * * 0 0 GGGCTTGCGTTAGTGAGAGGTTGT ###%$%$%%###$####$###$#&
+1378_35_263 115 chr16.nib:1-88827254 19671878 0 23M = 19671877 -1 AGAGAGAGAGAGAGAGAGAGTCT 77543:<55#"4!&=964518A> XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:4 X1:i:137 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_35_263 179 chr16.nib:1-88827254 19671877 0 23M = 19671878 1 GAGAGAGAGAGAGAGAGAGAGTC LE7402DD34FL:27AKE>;432 XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:265 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_43_186 69 * 0 0 * * 0 0 ATACTAGTTGGGACGCGTTGTGCT #$(4%$########$#$###$$$#
+1378_43_186 133 * 0 0 * * 0 0 GCTAGGGTTTGGGTTTGCGGTGGG $%#$########%##%#$###'#'
+1378_51_1671 117 chr2.nib:1-242951149 190342418 0 24M = 190342418 0 CTGGCGTTCTCGGCGTGGATGGGT #####$$##$#%#%%###%$#$##
+1378_51_1671 153 chr2.nib:1-242951149 190342418 37 16M1I6M = 190342418 0 TCTAACTTAGCCTCATAATAGCT /<<!"0///////00/!!0121/ XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_324 117 chr2.nib:1-242951149 80324999 0 24M = 80324999 0 TCCAGTCGCGTTGTTAGGTTCGGA #$#$$$#####%##%%###**#+/
+1378_56_324 153 chr2.nib:1-242951149 80324999 37 8M1I14M = 80324999 0 TTTAGCCCGAAATGCCTAGAGCA 4;6//11!"11100110////00 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_773 69 * 0 0 * * 0 0 TGTCGTGAGGTCACTTATCCCCAT &%#%##%%#####&#$%##$%##$
+1378_56_773 133 * 0 0 * * 0 0 TCTGGTCGGTTTCGGGGAGTGGAA ##%%#&$###$#$##%$####%%$
+1378_62_2027 69 * 0 0 * * 0 0 CTTCCACGATCTGCTCGCTGTGGT (#&&$##$$#$%#%$$$#$###'#
+1378_62_2027 133 * 0 0 * * 0 0 GTTGGCCTGGCCTGCCGTGCTGCG *##),/%##$)#%##1$#'%.#&#
+1378_62_2029 69 * 0 0 * * 0 0 TCTGGGCTGTCTTCGGGTCGGTGT $%$$####$##$$#)##%%#$###
+1378_62_2029 133 * 0 0 * * 0 0 GGCGGTGTGTGGTGCGGCTGTGCG /$$$=(####%####)$$%$-&%#
+1378_67_1795 81 chr16.nib:1-88827254 26739130 0 23M chrY.nib:1-57772954 57401793 0 TGGCATTCCTGTAGGCAGAGAGG AZWWZS]!"QNXZ]VQ]]]/2]] XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:3 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_67_1795 161 chrY.nib:1-57772954 57401793 37 23M chr16.nib:1-88827254 26739130 0 GATCACCCAGGTGATGTAACTCC ]WV]]]]WW]]]]]]]]]]PU]] XT:A:U CM:i:0 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_68_466 69 * 0 0 * * 0 0 GTGATCGTCGGTGCCAGTCCCTGT #(%)+##$#$#%#+$%##$#####
+1378_68_466 133 * 0 0 * * 0 0 GTGTCATCTGAGGTAAAGCATTGT /##$09#$#.=$#$76+$%1'###
+1378_68_1692 117 chr13.nib:1-114142980 36365609 0 24M = 36365609 0 TTGAACCGGGCACGGGTCTTCTGG #$#######%###$##%&'%)###
+1378_68_1692 153 chr13.nib:1-114142980 36365609 37 10M1D13M = 36365609 0 CTGCACATACAGAATATTCATAG 0010/!"0/!!021/132231// XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:10^T13
+1378_80_664 69 * 0 0 * * 0 0 CTGCTTTGATCCCCGGTGGAGCAC 7#%###$$6#######$##$$$##
+1378_80_664 133 * 0 0 * * 0 0 TGTCTGCGTTGTATCTCTGGTGTA %##%,%$$#&$$###$#$%##'%#
+1378_85_1786 69 * 0 0 * * 0 0 ATACTATGTCGATCTGTAAAAAAA )&.)#3%(a)$&%-,2#&+.-%0&./
+1378_85_1786 133 * 0 0 * * 0 0 CCCTAGGAGCGTATACCGGACGAG ,'&/%/@,&1,&'/)&,6&&1)((
+1378_86_1011 69 * 0 0 * * 0 0 CTACGTTATTGCTCTGTTTGTCCT ######$%##$$$%###%#$####
+1378_86_1011 133 * 0 0 * * 0 0 AGGCGATGGGATATTATTTTACTT :$###)%##$9$###1$$#$2###
+1378_86_1789 89 chr12.nib:1-132349534 39007065 37 23M = 39007065 0 GCTTTCCATAGATGTGTAATTTC J2K]]Z5!GN?@U]]]VX]UYYP XT:A:U CM:i:1 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_86_1789 181 chr12.nib:1-132349534 39007065 0 24M = 39007065 0 ACAACTTAAATAATCATGGACCGG 02,5$$0&6#%?*,$'#%&/15.1
+1378_91_1596 69 * 0 0 * * 0 0 TTAGCGGTTGACTATCTGCTGACA *&+'#9'(%*'#//,&<),/)'*#
+1378_91_1596 133 * 0 0 * * 0 0 GCTTTTTCATTCGGTGCCTTTGGA '>%/3%=()8'#.%?50$&5>%)%
+1378_94_1595 69 chr7.nib:1-158821424 127518258 0 24M = 127518258 0 CGTGCGACAGCCCATGTTTTCAGA -=..5,3826&*+.+#+#%%6;%#
+1378_94_1595 137 chr7.nib:1-158821424 127518258 37 23M = 127518258 0 TGAGATAAACACCTAACATGCTC M]]FN]]\V]]]Q>T]KIG:LVN XT:A:U CM:i:0 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_95_1039 69 * 0 0 * * 0 0 CGGCGTCCATCTTCGCCTTGAGAT $##.#$##$$#%$#$%%$###$)$
+1378_95_1039 133 * 0 0 * * 0 0 GTTCTGTGCCAGGTGAGGTACGGA &##,./#$&)6##+,'#$$0(##$
+1378_95_1767 65 chr11.nib:1-134452384 65333552 25 23M chr3.nib:1-199501827 123725482 0 CAACTGGTGGCATCTGGACAAAC W[[TZYY]]RO<BI7!!:!!>@2 XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_95_1767 129 chr3.nib:1-199501827 123725482 37 6M1I16M chr11.nib:1-134452384 65333552 0 ATTTATCTGTCTCATTCATTATT <AGB8B"!V]]UO/&JB4DE88E XT:A:U CM:i:2 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_96_1037 69 * 0 0 * * 0 0 ATCCCCCAAGATGCCTGTTGATTG $#$'##$$$#%$$#%###+##$#$
+1378_96_1037 133 * 0 0 * * 0 0 CTGCTGGGCCATTTGACTTACTCA '$#+#(##-%5##+*&###-.$$$
+1378_96_1764 81 chr15.nib:1-100338915 89251272 25 23M chr7.nib:1-158821424 19412615 0 AGAAATGGTCGCACCCTCTGGTT E*2ZEHX\SN]O>SYRL):LIOL XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_96_1764 161 chr7.nib:1-158821424 19412615 37 23M chr15.nib:1-100338915 89251272 0 GTATAGCCCACAACGCCTAATAT ZMBS]UW]UYR\]QPZ[SMYL7C XT:A:U CM:i:0 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_98_1574 69 * 0 0 * * 0 0 GTTCTGCCGGTGTCTGTGGCGGGC $$#+&$$####%$$$###$%#%%#
+1378_98_1574 133 * 0 0 * * 0 0 AGGCGAGTGTGGGGGTTGTTTGAG +%%$#)##%##$####%###$%$#
+1378_107_1647 69 * 0 0 * * 0 0 AGGCCTACTACGCGTCATTGATAG &#$$#$(.#%#$$####&$%##($
+1378_107_1647 133 * 0 0 * * 0 0 GGTCTGGTTCTATGTTGGTCGACT ###'$$#$$$(#%###(#$##$%#
+1378_111_829 69 chr9.nib:1-140273252 82506894 0 24M = 82506894 0 TGCGGCACTTGCTTCTTCGTATTT %#%##%#$%#$#%###$$##&#$$
+1378_111_829 137 chr9.nib:1-140273252 82506894 37 4M1I18M = 82506894 0 GATGCGTAATCTAGTAAAATAAG 0/362//00/5516500210451 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_111_1900 69 * 0 0 * * 0 0 TCCCCTCGCTCGGCTCTGTGCTGT $&%*$#(#)##$#'##%(##$#$%
+1378_111_1900 133 * 0 0 * * 0 0 GCACGCCTTTGGGCTAAGCCGTAA )$)'#%$########$'#&%$#(#
+1378_112_1483 69 * 0 0 * * 0 0 TGTCCAGCTATGCGGCTTCCTCCT %#$+#%#&#$#####%####%$##
+1378_112_1483 133 * 0 0 * * 0 0 TGGAGTGGTGTGTTTGCTGAGCCA #$#)#############$#%#%'%
+1378_125_1287 69 * 0 0 * * 0 0 TGTCTCTGGGGGGCCTGGTTAGGT $##13$'%#$###$$###$$$#&#
+1378_125_1287 133 * 0 0 * * 0 0 TGACGTGGGTTGTCCCGTGAGATT ##$%%#$###$##$$#&%##$(%%
+1378_126_468 117 chr11.nib:1-134452384 72541052 0 24M = 72541052 0 TGCCTCTATACAGATTAGTCCTCT )7,7..?97594@8=,=?813@>7
+1378_126_468 153 chr11.nib:1-134452384 72541052 0 23M = 72541052 0 AGGCAAGACTCTGTCTCAAAAAA PK5G]]PDT\]SEXY[]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:4 X1:i:15713 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_127_664 69 * 0 0 * * 0 0 AGAGGTTGGTGTCTTGTCGCAGCT ##'#$######$$%######$$$#
+1378_127_664 133 * 0 0 * * 0 0 TCGCTTTGCCTATGTTTGTTCGGA #%$%#&##$%#%%###$$###)-'
+1378_129_463 97 chr8.nib:1-146274826 29931771 37 23M chr19.nib:1-63811651 5702213 0 GTAGCTCTGTTTCACATTAGGGG J>AQ[G>C?NM:GD=)*PLORIF XT:A:U CM:i:1 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_129_463 145 chr19.nib:1-63811651 5702213 0 23M chr8.nib:1-146274826 29931771 0 AAAAAAAAAAAAAAAAAAAAAAA JOI:AHGD==@KQB78HF>KA8> XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:583698 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_129_875 69 * 0 0 * * 0 0 TTTCTATGGCTTACGCTGTCTGCC #$($##%####%$#$#####$###
+1378_129_875 133 * 0 0 * * 0 0 GACCTTTACGTATTGGGGGTTGGC ###)###+###$##$#&%##$,#$
+1378_140_1251 69 * 0 0 * * 0 0 ATCCTAGCGCGGTGTCTTGGGGAC #$%1#$$$##$##$#$#$##$%$$
+1378_140_1251 133 * 0 0 * * 0 0 TTTCCTTCGTGTGCGTGCGGAGTG #%#%$##$$$######.$$$%#%(
+1378_141_809 69 * 0 0 * * 0 0 TGTCCTCCAGTGTCTGTTGGGTGT %&,-##$$#(%###$#$$'###'#
+1378_141_809 133 * 0 0 * * 0 0 TCTCGTGGTTTCTTTTTTATGTGT ##%)##$$#####%$#$#%%#'##
+1378_144_983 69 * 0 0 * * 0 0 AGCGCCCGGTTGGTGCGGCTCGTC -$(&%*$#*#))#$$$#%%$#$##
+1378_144_983 133 * 0 0 * * 0 0 GTTCGTTCGTGGTGTACGAGGGTG #(#%#####($#%##$$#%##%#)
+1378_153_270 69 * 0 0 * * 0 0 AGTCCTTGTCCCCTGGGTTTTCCC +''$#&%$%#$##&$$($#&#$$#
+1378_153_270 133 * 0 0 * * 0 0 GGCCGTGTGCGGGTGTAGATTGGA %$##($######&##$&$$$$%##
+1378_155_1689 65 chrX.nib:1-154913754 106941539 37 23M = 106940385 -1154 ATCTCCTCTTCCTTCCATTCCAC \]]]Y]]]]]UV]]]ZYZZ]]RV XT:A:U CM:i:0 SM:i:37 AM:i:37 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_155_1689 129 chrX.nib:1-154913754 106940385 37 23M = 106941539 1154 GACTATGAGGTTTTCATTCAACA ]]]]\\]]]YW]]]WRZ]]WIOK XT:A:U CM:i:0 SM:i:37 AM:i:37 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_157_1580 69 * 0 0 * * 0 0 TGGGCCTCGGTGCCCTTGGTCTGT #%)$##'#$$$&#####%#$#$##
+1378_157_1580 133 * 0 0 * * 0 0 GGGATTGAAGGGATGTATGCTAGG #%$&%#$$'%$%#$##*#%$$$$#
+1378_161_317 69 * 0 0 * * 0 0 TTGGCCGGCAACCCCGGTACCTAA 7<,<'@)@>.)2@/')'&(?/-<(
+1378_161_317 133 * 0 0 * * 0 0 AATCCATACCCACAAAAGCAGGCC .&%','(@''?7//+&)+2.+)0)
+1378_177_735 113 chr2.nib:1-242951149 222173182 25 23M = 222173882 700 TTGTTCAGCGCCGATTGTCAATC KPNICFMS]]]Z]]]]Y]]]]]] XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:1G21
+1378_177_735 177 chr2.nib:1-242951149 222173882 37 23M = 222173182 -700 AGAATTCCTAACAAAATGTGAAG ES6-]]]]]]]]]]]]]]]]]]] XT:A:U CM:i:1 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_181_1684 69 * 0 0 * * 0 0 CGACTCCCGCATTCACGGTCAAGT &*#,##$#&$*$$#$#$$$#%$##
+1378_181_1684 133 * 0 0 * * 0 0 TTTCTGTTGTGGTTTTGTTGGGGT $##'$%'##%##$%$#$$####$*
+1378_187_1407 69 * 0 0 * * 0 0 TGGCGTCCACTCGTGGGTCTATCG $#$'%#$%$%&$%#####$#$#%#
+1378_187_1407 133 * 0 0 * * 0 0 TTGGGTGAAATCTTGTCGAGTGGA ####&##$$###$#####%##%%)
+1378_203_721 97 chr1.nib:1-247249719 245680524 25 23M chr2.nib:1-242951149 213173999 0 GTAAAATTTGTGGAGATTTAAGT ]VEFFEZ]XPW]TOVINQ,;T!! XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_203_721 145 chr2.nib:1-242951149 213173999 37 4M1I18M chr1.nib:1-247249719 245680524 0 ACCTAACAAAATTGTTCAATATG F>8AWT<AV]Q9B"+]O@IF=K] XT:A:U CM:i:2 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_206_2039 113 chr4.nib:1-191273063 103793427 0 23M chr18.nib:1-76117153 57165542 0 ACACACACACACACACACACACA NKWZVWZ]]XV[]]]]]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:1292040 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_206_2039 177 chr18.nib:1-76117153 57165542 0 23M chr4.nib:1-191273063 103793427 0 CACACACACACACACACACACAC NAJ[SPT[]]]W[]]]]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:1292040 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 20 11:39:32 2009 -0400
@@ -274,7 +274,7 @@
elif idata.state == idata.states.ERROR:
job_wrapper.fail( "input data %d is in error state" % ( idata.hid ) )
return JOB_INPUT_ERROR
- elif idata.state != idata.states.OK:
+ elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
# need to requeue
return JOB_WAIT
return JOB_READY
@@ -543,7 +543,7 @@
# Certain tools require tasks to be completed after job execution
# ( this used to be performed in the "exec_after_process" hook, but hooks are deprecated ).
if self.tool.tool_type is not None:
- self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict )
+ self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict, job = job )
# Call 'exec_after_process' hook
self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
out_data=out_data, param_dict=param_dict,
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/jobs/runners/local.py Thu Aug 20 11:39:32 2009 -0400
@@ -104,7 +104,7 @@
#run the metadata setting script here
#this is terminatable when output dataset/job is deleted
#so that long running set_meta()s can be cancelled without having to reboot the server
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally:
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths:
external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
external_metadata_proc = subprocess.Popen( args = external_metadata_script,
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Aug 20 11:39:32 2009 -0400
@@ -5,8 +5,7 @@
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import os.path, os, errno
-import sha
+import os.path, os, errno, sys
import galaxy.datatypes
from galaxy.util.bunch import Bunch
from galaxy import util
@@ -14,8 +13,7 @@
import galaxy.datatypes.registry
from galaxy.datatypes.metadata import MetadataCollection
from galaxy.security import RBACAgent, get_permitted_actions
-
-
+from galaxy.util.hash_util import *
import logging
log = logging.getLogger( __name__ )
@@ -40,10 +38,10 @@
def set_password_cleartext( self, cleartext ):
"""Set 'self.password' to the digest of 'cleartext'."""
- self.password = sha.new( cleartext ).hexdigest()
+ self.password = new_secure_hash( text_type=cleartext )
def check_password( self, cleartext ):
"""Check if 'cleartext' matches 'self.password' when hashed."""
- return self.password == sha.new( cleartext ).hexdigest()
+ return self.password == new_secure_hash( text_type=cleartext )
def all_roles( self ):
roles = [ ura.role for ura in self.roles ]
for group in [ uga.group for uga in self.groups ]:
@@ -330,7 +328,8 @@
OK = 'ok',
EMPTY = 'empty',
ERROR = 'error',
- DISCARDED = 'discarded' )
+ DISCARDED = 'discarded',
+ SETTING_METADATA = 'setting_metadata' )
permitted_actions = get_permitted_actions( filter='DATASET' )
file_path = "/tmp/"
engine = None
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,15 +1,13 @@
"""
Classes encapsulating galaxy tools and tool configuration.
"""
-
import pkg_resources;
pkg_resources.require( "simplejson" )
import logging, os, string, sys, tempfile, glob, shutil
import simplejson
-import sha, hmac, binascii
-
+import binascii
from UserDict import DictMixin
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
@@ -26,6 +24,7 @@
from galaxy.util.none_like import NoneDataset
from galaxy.datatypes import sniff
from cgi import FieldStorage
+from galaxy.util.hash_util import *
log = logging.getLogger( __name__ )
@@ -211,7 +210,7 @@
value["__page__"] = self.page
value = simplejson.dumps( value )
# Make it secure
- a = hmac.new( app.config.tool_secret, value, sha ).hexdigest()
+ a = hmac_new( app.config.tool_secret, value )
b = binascii.hexlify( value )
return "%s:%s" % ( a, b )
def decode( self, value, tool, app ):
@@ -221,7 +220,7 @@
# Extract and verify hash
a, b = value.split( ":" )
value = binascii.unhexlify( b )
- test = hmac.new( app.config.tool_secret, value, sha ).hexdigest()
+ test = hmac_new( app.config.tool_secret, value )
assert a == test
# Restore from string
values = json_fix( simplejson.loads( value ) )
@@ -453,7 +452,6 @@
self.tests = None
# Determine if this tool can be used in workflows
self.is_workflow_compatible = self.check_workflow_compatible()
-
def parse_inputs( self, root ):
"""
@@ -1468,7 +1466,7 @@
out_data[ name ] = data
return out_data
- def exec_after_process( self, app, inp_data, out_data, param_dict ):
+ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None ):
if self.tool_type == 'data_source':
name, data = out_data.items()[0]
data.set_size()
@@ -1572,6 +1570,18 @@
dataset.history.add( new_data )
new_data.flush()
return primary_datasets
+
+class SetMetadataTool( Tool ):
+ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None ):
+ for name, dataset in inp_data.iteritems():
+ external_metadata = galaxy.datatypes.metadata.JobExternalOutputMetadataWrapper( job )
+ if external_metadata.external_metadata_set_successfully( dataset ):
+ dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
+ # If setting external metadata has failed, how can we inform the user?
+ # For now, we'll leave the default metadata and set the state back to its original.
+ dataset.datatype.after_edit( dataset )
+ dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
+ dataset.flush()
# ---- Utility classes to be factored out -----------------------------------
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/tools/actions/metadata.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/tools/actions/metadata.py Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,48 @@
+from __init__ import ToolAction
+from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
+
+import logging
+log = logging.getLogger( __name__ )
+
+class SetMetadataToolAction( ToolAction ):
+ """Tool action used for setting external metadata on an existing dataset"""
+
+ def execute( self, tool, trans, incoming = {}, set_output_hid = False ):
+ for name, value in incoming.iteritems():
+ if isinstance( value, trans.app.model.HistoryDatasetAssociation ):
+ dataset = value
+ dataset_name = name
+ break
+ else:
+ raise Exception( 'The dataset to set metadata on could not be determined.' )
+
+ # Create the job object
+ job = trans.app.model.Job()
+ job.session_id = trans.get_galaxy_session().id
+ job.history_id = trans.history.id
+ job.tool_id = tool.id
+ try:
+ # For backward compatibility, some tools may not have versions yet.
+ job.tool_version = tool.version
+ except:
+ job.tool_version = "1.0.0"
+ job.flush() #ensure job.id is available
+
+ #add parameters to job_parameter table
+ incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state #store original dataset state, so we can restore it. A seperate table might be better (no chance of 'loosing' the original state)?
+ external_metadata_wrapper = JobExternalOutputMetadataWrapper( job )
+ cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, exec_dir = None, tmp_dir = trans.app.config.new_file_path, dataset_files_path = trans.app.model.Dataset.file_path, output_fnames = None, config_root = None, datatypes_config = None, kwds = { 'overwrite' : True } )
+ incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ #add the dataset to job_to_input_dataset table
+ job.add_input_dataset( dataset_name, dataset )
+ #Need a special state here to show that metadata is being set and also allow the job to run
+ # i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
+ dataset.state = dataset.states.SETTING_METADATA
+ trans.app.model.flush()
+
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return []
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,4 +1,5 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
@@ -8,7 +9,7 @@
import logging
log = logging.getLogger( __name__ )
-class UploadToolAction( object ):
+class UploadToolAction( ToolAction ):
# Action for uploading files
def __init__( self ):
self.empty = False
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/util/hash_util.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/util/hash_util.py Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,28 @@
+import sys, logging
+using_24 = sys.version_info[:2] < ( 2, 5 )
+if using_24:
+ import sha
+else:
+ import hashlib
+import hmac
+
+log = logging.getLogger( __name__ )
+
+"""
+Utility functions for bi-directional Python version compatibility. Python 2.5
+introduced hashlib which replaced sha in Python 2.4 and previous versions.
+"""
+def new_secure_hash( text_type=None ):
+ if using_24:
+ if text_type:
+ return sha.new( text_type ).hexdigest()
+ return sha.new()
+ else:
+ if text_type:
+ return hashlib.sha1( text_type ).hexdigest()
+ return hashlib.sha1()
+def hmac_new( key, value ):
+ if using_24:
+ return hmac.new( key, value, sha ).hexdigest()
+ else:
+ return hmac.new( key, value, hashlib.sha1 ).hexdigest()
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,9 +1,14 @@
-import shutil, StringIO, operator, urllib, gzip, tempfile, sets, string, sys
+import shutil, StringIO, operator, urllib, gzip, tempfile, string, sys
from datetime import datetime, timedelta
from galaxy import util, datatypes
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy.web.controllers.forms import get_all_forms, get_form_widgets
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
import logging
log = logging.getLogger( __name__ )
@@ -1206,7 +1211,6 @@
template = info_association.template
# See if we have any field contents
info = info_association.info
- log.debug("####In library_dataset_dataset_association, info.content: %s" % str( info.content))
if info:
widgets = get_form_widgets( trans, template, info.content )
else:
@@ -1225,16 +1229,16 @@
if v == trans.app.security_agent.permitted_actions.DATASET_ACCESS:
if len( in_roles ) > 1:
# Get the set of all users that are being associated with the dataset
- in_roles_set = sets.Set()
+ in_roles_set = set()
for role in in_roles:
in_roles_set.add( role )
- users_set = sets.Set()
+ users_set = set()
for role in in_roles:
for ura in role.users:
users_set.add( ura.user )
# Make sure that at least 1 user has every role being associated with the dataset
for user in users_set:
- user_roles_set = sets.Set()
+ user_roles_set = set()
for ura in user.roles:
user_roles_set.add( ura.role )
if in_roles_set.issubset( user_roles_set ):
@@ -1410,16 +1414,16 @@
if v == trans.app.security_agent.permitted_actions.DATASET_ACCESS:
if len( in_roles ) > 1:
# Get the set of all users that are being associated with the dataset
- in_roles_set = sets.Set()
+ in_roles_set = set()
for role in in_roles:
in_roles_set.add( role )
- users_set = sets.Set()
+ users_set = set()
for role in in_roles:
for ura in role.users:
users_set.add( ura.user )
# Make sure that at least 1 user has every role being associated with the dataset
for user in users_set:
- user_roles_set = sets.Set()
+ user_roles_set = set()
for ura in user.roles:
user_roles_set.add( ura.role )
if in_roles_set.issubset( user_roles_set ):
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/async.py Thu Aug 20 11:39:32 2009 -0400
@@ -6,8 +6,8 @@
from galaxy import jobs, util, datatypes, web
-import logging, urllib
-import sha, hmac
+import logging, urllib, sys
+from galaxy.util.hash_util import *
log = logging.getLogger( __name__ )
@@ -58,7 +58,7 @@
return "Data %s does not exist or has already been deleted" % data_id
if STATUS == 'OK':
- key = hmac.new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id), sha ).hexdigest()
+ key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
if key != data_secret:
return "You do not have permission to alter data %s." % data_id
# push the job into the queue
@@ -116,7 +116,7 @@
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
try:
- key = hmac.new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id), sha ).hexdigest()
+ key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
galaxy_url = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
params.update( { 'GALAXY_URL' :galaxy_url } )
params.update( { 'data_id' :data.id } )
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,4 +1,4 @@
-import logging, os, sets, string, shutil, re, socket, mimetypes, smtplib, urllib
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib
from galaxy.web.base.controller import *
from galaxy import util, datatypes, jobs, web, model
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/genetrack.py
--- a/lib/galaxy/web/controllers/genetrack.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/genetrack.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,11 +1,10 @@
-import time, glob, os
+import time, glob, os, sys
from itertools import cycle
-import sha
-
from mako import exceptions
from mako.template import Template
from mako.lookup import TemplateLookup
from galaxy.web.base.controller import *
+from galaxy.util.hash_util import *
try:
import pkg_resources
@@ -265,7 +264,7 @@
tmpl_name, track_maker = conf.PLOT_MAPPER[param.plot]
# check against a hash, display an image that already exists if it was previously created.
- hash = sha.new()
+ hash = new_secure_hash()
hash.update(str(dataset_id))
for key in sorted(kwds.keys()):
hash.update(str(kwds[key]))
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Aug 20 11:39:32 2009 -0400
@@ -12,7 +12,6 @@
# States for passing messages
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-
class HistoryListGrid( grids.Grid ):
# Custom column types
@@ -70,8 +69,43 @@
def apply_default_filter( self, trans, query ):
return query.filter_by( user=trans.user, purged=False )
+class SharedHistoryListGrid( grids.Grid ):
+ # Custom column types
+ class DatasetsByStateColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, history ):
+ rval = []
+ for state in ( 'ok', 'running', 'queued', 'error' ):
+ total = sum( 1 for d in history.active_datasets if d.state == state )
+ if total:
+ rval.append( '<div class="count-box state-color-%s">%s</div>' % ( state, total ) )
+ else:
+ rval.append( '' )
+ return rval
+ class SharedByColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, history ):
+ return history.user.email
+ # Grid definition
+ title = "Histories shared with you by others"
+ model_class = model.History
+ default_sort_key = "-update_time"
+ columns = [
+ grids.GridColumn( "Name", key="name" ),
+ DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
+ grids.GridColumn( "Created", key="create_time", format=time_ago ),
+ grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+ SharedByColumn( "Shared by", key="user_id" )
+ ]
+ operations = [
+ grids.GridOperation( "Clone" ),
+ grids.GridOperation( "Unshare" )
+ ]
+ standard_filters = []
+ def build_initial_query( self, session ):
+ return session.query( self.model_class ).join( 'users_shared_with' )
+ def apply_default_filter( self, trans, query ):
+ return query.filter( model.HistoryUserShareAssociation.user == trans.user )
+
class HistoryController( BaseController ):
-
@web.expose
def index( self, trans ):
return ""
@@ -80,7 +114,8 @@
"""XML history list for functional tests"""
return trans.fill_template( "/history/list_as_xml.mako" )
- list_grid = HistoryListGrid()
+ stored_list_grid = HistoryListGrid()
+ shared_list_grid = SharedHistoryListGrid()
@web.expose
@web.require_login( "work with multiple histories" )
@@ -91,7 +126,6 @@
if 'operation' in kwargs:
history_ids = util.listify( kwargs.get( 'id', [] ) )
histories = []
- shared_by_others = []
operation = kwargs['operation'].lower()
if operation == "share":
return self.share( trans, **kwargs )
@@ -127,7 +161,7 @@
status, message = self._list_undelete( trans, histories )
trans.sa_session.flush()
# Render the list view
- return self.list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
+ return self.stored_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
def _list_delete( self, trans, histories ):
"""Delete histories"""
n_deleted = 0
@@ -195,18 +229,38 @@
# No message
return None, None
@web.expose
- def list_shared( self, trans, **kwd ):
+ def list_shared( self, trans, **kwargs ):
"""List histories shared with current user by others"""
- params = util.Params( kwd )
- msg = util.restore_text( params.get( 'msg', '' ) )
- shared_by_others = trans.sa_session \
- .query( model.HistoryUserShareAssociation ) \
- .filter_by( user=trans.user ) \
- .join( 'history' ) \
- .filter( model.History.deleted == False ) \
- .order_by( desc( model.History.update_time ) ) \
- .all()
- return trans.fill_template( "/history/list_shared.mako", shared_by_others=shared_by_others, msg=msg, messagetype='done' )
+ msg = util.restore_text( kwargs.get( 'msg', '' ) )
+ status = message = None
+ if 'operation' in kwargs:
+ id = kwargs.get( 'id', None )
+ operation = kwargs['operation'].lower()
+ if operation == "clone":
+ if not id:
+ message = "Select a history to clone"
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
+ # When cloning shared histories, only copy active datasets
+ new_kwargs = { 'clone_choice' : 'active' }
+ return self.clone( trans, id, **new_kwargs )
+ elif operation == 'unshare':
+ if not id:
+ message = "Select a history to unshare"
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
+ ids = util.listify( id )
+ histories = []
+ for history_id in ids:
+ history = get_history( trans, history_id, check_ownership=False )
+ histories.append( history )
+ for history in histories:
+ # Current user is the user with which the histories were shared
+ association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one()
+ association.delete()
+ association.flush()
+ message = "Unshared %d shared histories" % len( ids )
+ status = 'done'
+ # Render the list view
+ return self.shared_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -323,6 +377,9 @@
can_change, cannot_change, no_change_needed, unique_no_change_needed, send_to_err = \
self._populate_restricted( trans, user, histories, send_to_users, None, send_to_err, unique=True )
send_to_err += err_msg
+ if cannot_change and not no_change_needed and not can_change:
+ send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
+ return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
return trans.fill_template( "/history/share.mako",
histories=histories,
@@ -350,8 +407,6 @@
email=email,
err_msg=err_msg,
share_button=True ) )
- if action == "no_share":
- trans.response.send_redirect( url_for( controller='root', action='history_options' ) )
user = trans.get_user()
histories, send_to_users, send_to_err = self._get_histories_and_users( trans, user, id, email )
send_to_err = ''
@@ -629,29 +684,38 @@
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id, **kwd ):
- history = get_history( trans, id, check_ownership=False )
+ """Clone a list of histories"""
params = util.Params( kwd )
+ ids = util.listify( id )
+ histories = []
+ for history_id in ids:
+ history = get_history( trans, history_id, check_ownership=False )
+ histories.append( history )
clone_choice = params.get( 'clone_choice', None )
if not clone_choice:
return trans.fill_template( "/history/clone.mako", history=history )
user = trans.get_user()
- if history.user == user:
- owner = True
+ for history in histories:
+ if history.user == user:
+ owner = True
+ else:
+ if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+ .filter_by( user=user, history=history ).count() == 0:
+ return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " )
+ owner = False
+ name = "Clone of '%s'" % history.name
+ if not owner:
+ name += " shared by '%s'" % history.user.email
+ if clone_choice == 'activatable':
+ new_history = history.copy( name=name, target_user=user, activatable=True )
+ elif clone_choice == 'active':
+ name += " (active items only)"
+ new_history = history.copy( name=name, target_user=user )
+ if len( histories ) == 1:
+ msg = 'Clone with name "%s" is now included in your previously stored histories.' % new_history.name
else:
- if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter_by( user=user, history=history ).count() == 0:
- return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " )
- owner = False
- name = "Clone of '%s'" % history.name
- if not owner:
- name += " shared by '%s'" % history.user.email
- if clone_choice == 'activatable':
- new_history = history.copy( name=name, target_user=user, activatable=True )
- elif clone_choice == 'active':
- name += " (active items only)"
- new_history = history.copy( name=name, target_user=user )
- # Render the list view
- return trans.show_ok_message( 'Clone with name "%s" is now included in your list of stored histories.' % new_history.name )
+ msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
+ return trans.show_ok_message( msg )
## ---- Utility methods -------------------------------------------------------
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Thu Aug 20 11:39:32 2009 -0400
@@ -465,7 +465,7 @@
msg=util.sanitize_text( msg ),
messagetype='error' ) )
# See if we have any associated templates
- info_association = folder.get_info_association()
+ info_association = ldda.get_info_association()
if info_association:
template = info_association.template
# See if we have any field contents
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Thu Aug 20 11:39:32 2009 -0400
@@ -147,16 +147,12 @@
helptext=''))
# library associated
if request.library:
- request_details.append(dict(label='Library',
- value=request.library.name,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ value = request.library.name
else:
- request_details.append(dict(label='Library',
- value=None,
- helptext='Associated library where the resultant \
- dataset will be stored'))
-
+ value = None
+ request_details.append( dict( label='Data library',
+ value=value,
+ helptext='Data library where the resultant dataset will be stored' ) )
# form fields
for index, field in enumerate(request.type.request_form.fields):
if field['required']:
@@ -489,18 +485,17 @@
else:
lib_list.add_option(lib.name, lib.id)
if lib_id == 'new':
- lib_list.add_option('Create a new library', 'new', selected=True)
+ lib_list.add_option('Create a new data library', 'new', selected=True)
else:
- lib_list.add_option('Create a new library', 'new')
- widget = dict(label='Library',
+ lib_list.add_option('Create a new data library', 'new')
+ widget = dict(label='Data library',
widget=lib_list,
- helptext='Associated library where the resultant \
- dataset will be stored.')
+ helptext='Data library where the resultant dataset will be stored.')
if lib_id == 'new':
- new_lib = dict(label='Create a new Library',
+ new_lib = dict(label='Create a new data library',
widget=TextField('new_library_name', 40,
util.restore_text( params.get( 'new_library_name', '' ) )),
- helptext='Enter a library name here to request a new library')
+ helptext='Enter a name here to request a new data library')
return [widget, new_lib]
else:
return [widget]
@@ -510,7 +505,7 @@
'''
empty_fields = []
# if not request.library:
-# empty_fields.append('Library')
+# empty_fields.append('Data library')
# check rest of the fields of the form
for index, field in enumerate(request.type.request_form.fields):
if field['required'] == 'required' and request.values.content[index] in ['', None]:
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Thu Aug 20 11:39:32 2009 -0400
@@ -544,15 +544,12 @@
helptext=''))
# library associated
if request.library:
- request_details.append(dict(label='Library',
- value=request.library.name,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ value=request.library.name
else:
- request_details.append(dict(label='Library',
- value=None,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ value = None
+ request_details.append(dict(label='Data library',
+ value=value,
+ helptext='Data library where the resultant dataset will be stored'))
# form fields
for index, field in enumerate(request.type.request_form.fields):
if field['required']:
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,7 +1,7 @@
"""
Contains the main interface in the Universe class
"""
-import logging, os, sets, string, shutil, urllib, re, socket
+import logging, os, string, shutil, urllib, re, socket
from cgi import escape, FieldStorage
from galaxy import util, datatypes, jobs, web, util
from galaxy.web.base.controller import *
@@ -60,7 +60,6 @@
trans.response.set_content_type('text/xml')
return trans.fill_template_mako( "root/history_as_xml.mako", history=history, show_deleted=util.string_as_bool( show_deleted ) )
else:
- template = "root/history.mako"
show_deleted = util.string_as_bool( show_deleted )
query = trans.sa_session.query( model.HistoryDatasetAssociation ) \
.filter( model.HistoryDatasetAssociation.history == history ) \
@@ -297,10 +296,15 @@
if name not in [ 'name', 'info', 'dbkey' ]:
if spec.get( 'default' ):
setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- data.set_meta()
- data.datatype.after_edit( data )
+ if trans.app.config.set_metadata_externally:
+ msg = 'Attributes have been queued to be updated'
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
+ else:
+ msg = 'Attributes updated'
+ data.set_meta()
+ data.datatype.after_edit( data )
trans.app.model.flush()
- return trans.show_ok_message( "Attributes updated", refresh_frames=['history'] )
+ return trans.show_ok_message( msg, refresh_frames=['history'] )
elif params.convert_data:
target_type = kwd.get("target_type", None)
if target_type:
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 11:39:32 2009 -0400
@@ -5,6 +5,7 @@
from galaxy.web.base.controller import *
from galaxy.util.bunch import Bunch
from galaxy.tools import DefaultToolState
+from galaxy.tools.parameters.basic import UnvalidatedValue
import logging
log = logging.getLogger( __name__ )
@@ -52,15 +53,84 @@
add_frame.wiki_url = trans.app.config.wiki_url
add_frame.from_noframe = True
return trans.fill_template( template, history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
-
+
+ @web.expose
+ def rerun( self, trans, id=None, from_noframe=None, **kwd ):
+ """
+ Given a HistoryDatasetAssociation id, find the job and that created
+ the dataset, extract the parameters, and display the appropriate tool
+ form with parameters already filled in.
+ """
+ if not id:
+ error( "'id' parameter is required" );
+ try:
+ id = int( id )
+ except:
+ error( "Invalid value for 'id' parameter" )
+ # Get the dataset object
+ data = trans.app.model.HistoryDatasetAssociation.get( id )
+ # Get the associated job, if any. If this hda was copied from another,
+ # we need to find the job that created the origial hda
+ job_hda = data
+ while job_hda.copied_from_history_dataset_association:
+ job_hda = job_hda.copied_from_history_dataset_association
+ if not job_hda.creating_job_associations:
+ error( "Could not find the job for this dataset" )
+ # Get the job object
+ job = None
+ for assoc in job_hda.creating_job_associations:
+ job = assoc.job
+ break
+ if not job:
+ raise Exception("Failed to get job information for dataset hid %d" % hid)
+ # Get the tool object
+ tool_id = job.tool_id
+ try:
+ # Load the tool
+ toolbox = self.get_toolbox()
+ tool = toolbox.tools_by_id.get( tool_id, None )
+ except:
+ #this is expected, so not an exception
+ error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id )
+ # Can't rerun upload, external data sources, et cetera. Workflow
+ # compatible will proxy this for now
+ if not tool.is_workflow_compatible:
+ error( "The '%s' tool does not currently support rerunning." % tool.name )
+ # Get the job's parameters
+ try:
+ params_objects = job.get_param_values( trans.app )
+ except:
+ raise Exception( "Failed to get paramemeters for dataset id %d " % hid )
+ # Unpack unvalidated values to strings, they'll be validated when the
+ # form is submitted (this happens when re-running a job that was
+ # initially run by a workflow)
+ validated_params = {}
+ for name, value in params_objects.items():
+ if isinstance( value, UnvalidatedValue ):
+ validated_params [ str(name) ] = str(value)
+ else:
+ validated_params [ str(name) ] = value
+ params_objects = validated_params
+ # Create a fake tool_state for the tool, with the parameters values
+ state = tool.new_state( trans )
+ state.inputs = params_objects
+ tool_state_string = util.object_to_string(state.encode(tool, trans.app))
+ # Setup context for template
+ history = trans.get_history()
+ vars = dict( tool_state=state, errors = {} )
+ # Is the "add frame" stuff neccesary here?
+ add_frame = AddFrameData()
+ add_frame.debug = trans.debug
+ if from_noframe is not None:
+ add_frame.wiki_url = trans.app.config.wiki_url
+ add_frame.from_noframe = True
+ return trans.fill_template( "tool_form.mako", history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
@web.expose
def redirect( self, trans, redirect_url=None, **kwd ):
if not redirect_url:
return trans.show_error_message( "Required URL for redirection missing" )
trans.log_event( "Redirecting to: %s" % redirect_url )
return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url )
-
-
@web.json
def upload_async_create( self, trans, tool_id=None, **kwd ):
"""
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/form_builder.py Thu Aug 20 11:39:32 2009 -0400
@@ -3,6 +3,7 @@
"""
import logging,sys
+from cgi import escape
log = logging.getLogger(__name__)
class BaseField(object):
@@ -28,7 +29,7 @@
self.value = value or ""
def get_html( self, prefix="" ):
return '<input type="text" name="%s%s" size="%d" value="%s">' \
- % ( prefix, self.name, self.size, self.value )
+ % ( prefix, self.name, self.size, escape(str(self.value), quote=True) )
def set_size(self, size):
self.size = int( size )
@@ -49,7 +50,7 @@
self.value = value or ""
def get_html( self, prefix="" ):
return '<textarea name="%s%s" rows="%d" cols="%d">%s</textarea>' \
- % ( prefix, self.name, self.rows, self.cols, self.value )
+ % ( prefix, self.name, self.rows, self.cols, escape(str(self.value), quote=True) )
def set_size(self, rows, cols):
self.rows = rows
self.cols = cols
@@ -113,7 +114,7 @@
self.name = name
self.value = value or ""
def get_html( self, prefix="" ):
- return '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, self.value )
+ return '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, escape(str(self.value), quote=True) )
class SelectField(BaseField):
"""
@@ -190,9 +191,9 @@
if len(self.options) > 2 and ctr % 2 == 1:
style = " class=\"odd_row\""
if selected:
- rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" checked>%s</div>' % ( style, prefix, self.name, value, text) )
+ rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" checked>%s</div>' % ( style, prefix, self.name, escape(str(value), quote=True), text) )
else:
- rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s">%s</div>' % ( style, prefix, self.name, value, text) )
+ rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s">%s</div>' % ( style, prefix, self.name, escape(str(value), quote=True), text) )
ctr += 1
return "\n".join( rval )
def get_html_radio( self, prefix="" ):
@@ -204,7 +205,7 @@
style = " class=\"odd_row\""
if selected: selected_text = " checked"
else: selected_text = ""
- rval.append( '<div%s><input type="radio" name="%s%s"%s value="%s"%s>%s</div>' % ( style, prefix, self.name, self.refresh_on_change_text, value, selected_text, text ) )
+ rval.append( '<div%s><input type="radio" name="%s%s"%s value="%s"%s>%s</div>' % ( style, prefix, self.name, self.refresh_on_change_text, escape(str(value), quote=True), selected_text, text ) )
ctr += 1
return "\n".join( rval )
def get_html_default( self, prefix="" ):
@@ -217,9 +218,9 @@
selected_text = " selected"
last_selected_value = value
else: selected_text = ""
- rval.append( '<option value="%s"%s>%s</option>' % ( value, selected_text, text ) )
+ rval.append( '<option value="%s"%s>%s</option>' % ( escape(str(value), quote=True), selected_text, text ) )
if last_selected_value:
- last_selected_value = ' last_selected_value="%s"' % last_selected_value
+ last_selected_value = ' last_selected_value="%s"' % escape(str(last_selected_value), quote=True)
rval.insert( 0, '<select name="%s%s"%s%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text, last_selected_value ) )
rval.append( '</select>' )
return "\n".join( rval )
@@ -326,12 +327,12 @@
if option['value'] in expanded_options:
default_state = 'expanded'
default_icon = '[-]'
- html.append( '<li><span class="toolParameterExpandableCollapsable">%s</span><input type="%s" name="%s%s" value="%s"%s">%s' % ( default_icon, self.display, prefix, self.name, option['value'], selected, option['name']) )
+ html.append( '<li><span class="toolParameterExpandableCollapsable">%s</span><input type="%s" name="%s%s" value="%s"%s">%s' % ( default_icon, self.display, prefix, self.name, escape(str(option['value']), quote=True), selected, option['name']) )
html.append( '<ul class="toolParameterExpandableCollapsable" default_state="%s">' % default_state )
recurse_options( html, option['options'], expanded_options )
html.append( '</ul>')
else:
- html.append( '<li><input type="%s" name="%s%s" value="%s"%s">%s' % ( self.display, prefix, self.name, option['value'], selected, option['name']) )
+ html.append( '<li><input type="%s" name="%s%s" value="%s"%s">%s' % ( self.display, prefix, self.name, escape(str(option['value']), quote=True), selected, option['name']) )
html.append( '</li>' )
rval = []
rval.append( '<div><ul class="toolParameterExpandableCollapsable">' )
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Aug 20 11:39:32 2009 -0400
@@ -93,8 +93,8 @@
"""
Exception to make throwing errors from deep in controllers easier
"""
- def __init__( self, message, type="info" ):
- self.message = message
+ def __init__( self, err_msg, type="info" ):
+ self.err_msg = err_msg
self.type = type
def error( message ):
@@ -117,7 +117,7 @@
self.security = galaxy_app.security
def handle_controller_exception( self, e, trans, **kwargs ):
if isinstance( e, MessageException ):
- return trans.show_message( e.message, e.type )
+ return trans.show_message( e.err_msg, e.type )
def make_body_iterable( self, trans, body ):
if isinstance( body, FormBuilder ):
body = trans.show_form( body )
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/web/framework/helpers/grids.py
--- a/lib/galaxy/web/framework/helpers/grids.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/web/framework/helpers/grids.py Thu Aug 20 11:39:32 2009 -0400
@@ -156,9 +156,7 @@
elif column_filter == "All":
del filter_args[self.key]
return query
-
-
-
+
class GridOperation( object ):
def __init__( self, label, key=None, condition=None, allow_multiple=True ):
self.label = label
diff -r 881dd4c2de9f -r 070cf5f6f928 lib/galaxy/webapps/reports/controllers/root.py
--- a/lib/galaxy/webapps/reports/controllers/root.py Thu Aug 20 10:52:08 2009 -0400
+++ b/lib/galaxy/webapps/reports/controllers/root.py Thu Aug 20 11:39:32 2009 -0400
@@ -1,8 +1,8 @@
-import sys, os, operator, sets, string, shutil, re, socket, urllib
+import sys, os, operator, string, shutil, re, socket, urllib, time
from galaxy import web
from cgi import escape, FieldStorage
from galaxy.webapps.reports.base.controller import *
-import logging, sets, time
+import logging
log = logging.getLogger( __name__ )
class Report( BaseController ):
diff -r 881dd4c2de9f -r 070cf5f6f928 static/jStore.Flash.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/jStore.Flash.html Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,19 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+ <head>
+ <title>Flash External Object</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <script type="text/javascript">
+ /**
+ * This function captures the flash_ready event. We need to relay this
+ * back to the parent so it knows flash is ready.
+ */
+ function flash_ready(){
+ parent.flash_ready();
+ }
+ </script>
+ </head>
+ <body>
+ <object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="http://fpdownload.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#versi…" width="1" height="1" id="jStoreFlash"><param name="allowScriptAccess" value="always" /><param name="movie" value="jStore.swf" /><param name="quality" value="high" /><param name="bgcolor" value="#ffcc00" /><embed src="jStore.swf" quality="high" bgcolor="#ffcc00" width="1" height="1" name="jStoreFlash" align="middle" allowScriptAccess="always" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer" /></object>
+ </body>
+</html>
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 static/jStore.swf
Binary file static/jStore.swf has changed
diff -r 881dd4c2de9f -r 070cf5f6f928 static/june_2007_style/blue/history.css
--- a/static/june_2007_style/blue/history.css Thu Aug 20 10:52:08 2009 -0400
+++ b/static/june_2007_style/blue/history.css Thu Aug 20 11:39:32 2009 -0400
@@ -13,6 +13,8 @@
div.historyItem-empty .state-icon{background:url(history-states.png) no-repeat 0px -25px;}
div.historyItem-running{border-color:#AAAA66;background:#FFFFCC;}
div.historyItem-running .state-icon{background-image:url(data_running.gif);}
+div.historyItem-setting_metadata{border-color:#AAAA66;background:#FFFFCC;}
+div.historyItem-setting_metadata .state-icon{background-image:url(data_running.gif);}
div.historyItem-upload{border-color:#6666AA;background:#CCCCFF;}
div.historyItem-upload .state-icon{background-image:url(data_upload.gif);}
div.historyItem-queued{background:#EEEEEE;}
diff -r 881dd4c2de9f -r 070cf5f6f928 static/june_2007_style/history.css.tmpl
--- a/static/june_2007_style/history.css.tmpl Thu Aug 20 10:52:08 2009 -0400
+++ b/static/june_2007_style/history.css.tmpl Thu Aug 20 11:39:32 2009 -0400
@@ -72,6 +72,14 @@
}
div.historyItem-running {
+ border-color: $history_running_border;
+ background: $history_running_bg;
+ .state-icon {
+ background-image: url(data_running.gif);
+ }
+}
+
+div.historyItem-setting_metadata {
border-color: $history_running_border;
background: $history_running_bg;
.state-icon {
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js Thu Aug 20 10:52:08 2009 -0400
+++ b/static/scripts/galaxy.workflow_editor.canvas.js Thu Aug 20 11:39:32 2009 -0400
@@ -359,6 +359,7 @@
this.nodes = {};
this.name = null;
this.has_changes = false;
+ this.active_form_has_changes = false;
}
$.extend( Workflow.prototype, {
add_node : function( node ) {
@@ -438,6 +439,14 @@
});
});
},
+ check_changes_in_active_form : function() {
+ // If active form has changed, save it
+ if (this.active_form_has_changes) {
+ this.has_changes = true;
+ $("#right-content").find("form").submit();
+ this.active_form_has_changes = false;
+ }
+ },
clear_active_node : function() {
if ( this.active_node ) {
this.active_node.make_inactive();
@@ -447,6 +456,7 @@
},
activate_node : function( node ) {
if ( this.active_node != node ) {
+ this.check_changes_in_active_form();
this.clear_active_node();
parent.show_form_for_tool( node.form_html, node );
node.make_active();
@@ -461,6 +471,7 @@
}
},
layout : function () {
+ this.check_changes_in_active_form();
// Prepare predecessor / successor tracking
var n_pred = {};
var successors = {};
@@ -502,7 +513,7 @@
var v = level_parents[k];
delete n_pred[v];
for ( var sk in successors[v] ) {
- n_pred[ sucessors[v][sk] ] -= 1;
+ n_pred[ successors[v][sk] ] -= 1;
}
}
}
@@ -805,6 +816,10 @@
self.draw_overview();
});
+ /* Disable dragging for child element of the panel so that resizing can
+ only be done by dragging the borders */
+ $("#overview-border div").bind("drag", function(e) { });
+
},
update_viewport_overlay: function() {
var cc = this.cc,
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/jquery.jstore-all.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/jquery.jstore-all.js Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,748 @@
+/*!
+ * jStore - Persistent Client-Side Storage
+ *
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ *
+ * Dual licensed under:
+ * MIT: http://www.opensource.org/licenses/mit-license.php
+ * GPLv3: http://www.opensource.org/licenses/gpl-3.0.html
+ *//**
+ * Javascript Class Framework
+ *
+ * Copyright (c) 2008 John Resig (http://ejohn.org/blog/simple-javascript-inheritance/)
+ * Inspired by base2 and Prototype
+ */
+(function(){
+ var initializing = false, fnTest = /xyz/.test(function(){xyz;}) ? /\b_super\b/ : /.*/;
+
+ // The base Class implementation (does nothing)
+ this.Class = function(){};
+
+ // Create a new Class that inherits from this class
+ Class.extend = function(prop) {
+ var _super = this.prototype;
+
+ // Instantiate a base class (but only create the instance,
+ // don't run the init constructor)
+ initializing = true;
+ var prototype = new this();
+ initializing = false;
+
+ // Copy the properties over onto the new prototype
+ for (var name in prop) {
+ // Check if we're overwriting an existing function
+ prototype[name] = typeof prop[name] == "function" &&
+ typeof _super[name] == "function" && fnTest.test(prop[name]) ?
+ (function(name, fn){
+ return function() {
+ var tmp = this._super;
+
+ // Add a new ._super() method that is the same method
+ // but on the super-class
+ this._super = _super[name];
+
+ // The method only need to be bound temporarily, so we
+ // remove it when we're done executing
+ var ret = fn.apply(this, arguments);
+ this._super = tmp;
+
+ return ret;
+ };
+ })(name, prop[name]) :
+ prop[name];
+ }
+
+ // The dummy class constructor
+ function Class() {
+ // All construction is actually done in the init method
+ if ( !initializing && this.init )
+ this.init.apply(this, arguments);
+ }
+
+ // Populate our constructed prototype object
+ Class.prototype = prototype;
+
+ // Enforce the constructor to be what we expect
+ Class.constructor = Class;
+
+ // And make this class extendable
+ Class.extend = arguments.callee;
+
+ return Class;
+ };
+})();
+/*!
+ * jStore Delegate Framework
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ this.jStoreDelegate = Class.extend({
+ init: function(parent){
+ // The Object this delgate operates for
+ this.parent = parent;
+ // Container for callbacks to dispatch.
+ // eventType => [ callback, callback, ... ]
+ this.callbacks = {};
+ },
+ bind: function(event, callback){
+ if ( !$.isFunction(callback) ) return this;
+ if ( !this.callbacks[ event ] ) this.callbacks[ event ] = [];
+
+ this.callbacks[ event ].push(callback);
+
+ return this;
+ },
+ trigger: function(){
+ var parent = this.parent,
+ args = [].slice.call(arguments),
+ event = args.shift(),
+ handlers = this.callbacks[ event ];
+
+ if ( !handlers ) return false;
+
+ $.each(handlers, function(){ this.apply(parent, args) });
+ return this;
+ }
+ });
+
+})(jQuery);/**
+ * jStore-jQuery Interface
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Setup the jStore namespace in jQuery for options storage
+ $.jStore = {};
+
+ // Seed the options in
+ $.extend($.jStore, {
+ EngineOrder: [],
+ // Engines should put their availability tests within jStore.Availability
+ Availability: {},
+ // Defined engines should enter themselves into the jStore.Engines
+ Engines: {},
+ // Instanciated engines should exist within jStore.Instances
+ Instances: {},
+ // The current engine to use for storage
+ CurrentEngine: null,
+ // Provide global settings for overwriting
+ defaults: {
+ project: null,
+ engine: null,
+ autoload: true,
+ flash: 'jStore.Flash.html'
+ },
+ // Boolean for ready state handling
+ isReady: false,
+ // Boolean for flash ready state handling
+ isFlashReady: false,
+ // An event delegate
+ delegate: new jStoreDelegate($.jStore)
+ .bind('jStore-ready', function(engine){
+ $.jStore.isReady = true;
+ if ($.jStore.defaults.autoload) engine.connect();
+ })
+ .bind('flash-ready', function(){
+ $.jStore.isFlashReady = true;
+ })
+ });
+
+ // Enable ready callback for jStore
+ $.jStore.ready = function(callback){
+ if ($.jStore.isReady) callback.apply($.jStore, [$.jStore.CurrentEngine]);
+ else $.jStore.delegate.bind('jStore-ready', callback);
+ }
+
+ // Enable failure callback registration for jStore
+ $.jStore.fail = function(callback){
+ $.jStore.delegate.bind('jStore-failure', callback);
+ }
+
+ // Enable ready callback for Flash
+ $.jStore.flashReady = function(callback){
+ if ($.jStore.isFlashReady) callback.apply($.jStore, [$.jStore.CurrentEngine]);
+ else $.jStore.delegate.bind('flash-ready', callback);
+ }
+
+ // Enable and test an engine
+ $.jStore.use = function(engine, project, identifier){
+ project = project || $.jStore.defaults.project || location.hostname.replace(/\./g, '-') || 'unknown';
+
+ var e = $.jStore.Engines[engine.toLowerCase()] || null,
+ name = (identifier ? identifier + '.' : '') + project + '.' + engine;
+
+ if ( !e ) throw 'JSTORE_ENGINE_UNDEFINED';
+
+ // Instanciate the engine
+ e = new e(project, name);
+
+ // Prevent against naming conflicts
+ if ($.jStore.Instances[name]) throw 'JSTORE_JRI_CONFLICT';
+
+ // Test the engine
+ if (e.isAvailable()){
+ $.jStore.Instances[name] = e; // The Easy Way
+ if (!$.jStore.CurrentEngine){
+ $.jStore.CurrentEngine = e;
+ }
+ $.jStore.delegate.trigger('jStore-ready', e);
+ } else {
+ if (!e.autoload) // Not available
+ throw 'JSTORE_ENGINE_UNAVILABLE';
+ else { // The hard way
+ e.included(function(){
+ if (this.isAvailable()) { // Worked out
+ $.jStore.Instances[name] = this;
+ // If there is no current engine, use this one
+ if (!$.jStore.CurrentEngine){
+ $.jStore.CurrentEngine = this;
+ }
+ $.jStore.delegate.trigger('jStore-ready', this);
+ }
+ else $.jStore.delegate.trigger('jStore-failure', this);
+ }).include();
+ }
+ }
+ }
+
+ // Set the current storage engine
+ $.jStore.setCurrentEngine = function(name){
+ if (!$.jStore.Instances.length ) // If no instances exist, attempt to load one
+ return $.jStore.FindEngine();
+
+ if (!name && $.jStore.Instances.length >= 1) { // If no name is specified, use the first engine
+ $.jStore.delegate.trigger('jStore-ready', $.jStore.Instances[0]);
+ return $.jStore.CurrentEngine = $.jStore.Instances[0];
+ }
+
+ if (name && $.jStore.Instances[name]) { // If a name is specified and exists, use it
+ $.jStore.delegate.trigger('jStore-ready', $.jStore.Instances[name]);
+ return $.jStore.CurrentEngine = $.jStore.Instances[name];
+ }
+
+ throw 'JSTORE_JRI_NO_MATCH';
+ }
+
+ // Test all possible engines for straightforward useability
+ $.jStore.FindEngine = function(){
+ $.each($.jStore.EngineOrder, function(k){
+ if ($.jStore.Availability[this]()){ // Find the first, easiest option and use it.
+ $.jStore.use(this, $.jStore.defaults.project, 'default');
+ return false;
+ }
+ })
+ }
+
+ // Provide a simple interface for storing/getting values
+ $.jStore.store = function(key, value){
+ if (!$.jStore.CurrentEngine) return false;
+
+ if ( !value ) // Executing a get command
+ return $.jStore.CurrentEngine.get(key);
+ // Executing a set command
+ return $.jStore.CurrentEngine.set(key, value);
+ }
+ // Provide a simple interface for storing/getting values
+ $.jStore.remove = function(key){
+ if (!$.jStore.CurrentEngine) return false;
+
+ return $.jStore.CurrentEngine.rem(key);
+ }
+
+ // Provide a chainable interface for storing values/getting a value at the end of a chain
+ $.fn.store = function(key, value){
+ if (!$.jStore.CurrentEngine) return this;
+
+ var result = $.jStore.store(key, value);
+
+ return !value ? result : this;
+ }
+
+ // Provide a chainable interface for removing values
+ $.fn.removeStore = function(key){
+ $.jStore.remove(key);
+
+ return this;
+ }
+
+ // Provide a way for users to call for auto-loading
+ $.jStore.load = function(){
+ if ($.jStore.defaults.engine)
+ return $.jStore.use($.jStore.defaults.engine, $.jStore.defaults.project, 'default');
+
+ // Attempt to find a valid engine, and catch any exceptions if we can't
+ try {
+ $.jStore.FindEngine();
+ } catch (e) {}
+ }
+
+})(jQuery);
+/**
+ * jStore Engine Core
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ this.StorageEngine = Class.extend({
+ init: function(project, name){
+ // Configure the project name
+ this.project = project;
+ // The JRI name given by the manager
+ this.jri = name;
+ // Cache the data so we can work synchronously
+ this.data = {};
+ // The maximum limit of the storage engine
+ this.limit = -1;
+ // Third party script includes
+ this.includes = [];
+ // Create an event delegate for users to subscribe to event triggers
+ this.delegate = new jStoreDelegate(this)
+ .bind('engine-ready', function(){
+ this.isReady = true;
+ })
+ .bind('engine-included', function(){
+ this.hasIncluded = true;
+ });
+ // If enabled, the manager will check availability, then run include(), then check again
+ this.autoload = false; // This should be changed by the engines, if they have required includes
+ // When set, we're ready to transact data
+ this.isReady = false;
+ // When the includer is finished, it will set this to true
+ this.hasIncluded = false;
+ },
+ // Performs all necessary script includes
+ include: function(){
+ var self = this,
+ total = this.includes.length,
+ count = 0;
+
+ $.each(this.includes, function(){
+ $.ajax({type: 'get', url: this, dataType: 'script', cache: true,
+ success: function(){
+ count++;
+ if (count == total) self.delegate.trigger('engine-included');
+ }
+ })
+ });
+ },
+ // This should be overloaded with an actual functionality presence check
+ isAvailable: function(){
+ return false;
+ },
+ /** Event Subscription Shortcuts **/
+ ready: function(callback){
+ if (this.isReady) callback.apply(this);
+ else this.delegate.bind('engine-ready', callback);
+ return this;
+ },
+ included: function(callback){
+ if (this.hasIncluded) callback.apply(this);
+ else this.delegate.bind('engine-included', callback);
+ return this;
+ },
+ /** Cache Data Access **/
+ get: function(key){
+ return this.data[key] || null;
+ },
+ set: function(key, value){
+ this.data[key] = value;
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.data[key];
+ this.data[key] = null;
+ return beforeDelete;
+ }
+ });
+
+})(jQuery);
+/*!
+ * jStore DOM Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var sessionAvailability = $.jStore.Availability.session = function(){
+ return !!window.sessionStorage;
+ },
+ localAvailability = $.jStore.Availability.local = function(){
+ return !!(window.localStorage || window.globalStorage);
+ };
+
+ this.jStoreDom = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'DOM';
+
+ // Set the Database limit
+ this.limit = 5 * 1024 * 1024;
+ },
+ connect: function(){
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ get: function(key){
+ var out = this.db.getItem(key);
+ // Gecko's getItem returns {value: 'the value'}, WebKit returns 'the value'
+ return out && out.value ? out.value : out
+ },
+ set: function(key, value){
+ this.db.setItem(key,value);
+ return value;
+ },
+ rem: function(key){
+ var out = this.get(key);
+ this.db.removeItem(key);
+ return out
+ }
+ })
+
+ this.jStoreLocal = jStoreDom.extend({
+ connect: function(){
+ // Gecko uses a non-standard globalStorage[ www.example.com ] DOM access object for persistant storage.
+ this.db = !window.globalStorage ? window.localStorage : window.globalStorage[location.hostname];
+ this._super();
+ },
+ isAvailable: localAvailability
+ })
+
+ this.jStoreSession = jStoreDom.extend({
+ connect: function(){
+ this.db = sessionStorage;
+ this._super();
+ },
+ isAvailable: sessionAvailability
+ })
+
+ $.jStore.Engines.local = jStoreLocal;
+ $.jStore.Engines.session = jStoreSession;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 1 ] = 'local';
+
+})(jQuery);
+/*!
+ * jStore Flash Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ * jStore.swf Copyright (c) 2008 Daniel Bulli (http://www.nuff-respec.com)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.flash = function(){
+ return !!($.jStore.hasFlash('8.0.0'));
+ }
+
+ this.jStoreFlash = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'Flash';
+
+ // Bind our flashReady function to the jStore Delegate
+ var self = this;
+ $.jStore.flashReady(function(){ self.flashReady() });
+ },
+ connect: function(){
+ var name = 'jstore-flash-embed-' + this.project;
+
+ // To make Flash Storage work on IE, we have to load up an iFrame
+ // which contains an HTML page that embeds the object using an
+ // object tag wrapping an embed tag. Of course, this is unnecessary for
+ // all browsers except for IE, which, to my knowledge, is the only browser
+ // in existance where you need to complicate your code to fix bugs. Goddamnit. :(
+ $(document.body)
+ .append('<iframe style="height:1px;width:1px;position:absolute;left:0;top:0;margin-left:-100px;" ' +
+ 'id="jStoreFlashFrame" src="' +$.jStore.defaults.flash + '"></iframe>');
+ },
+ flashReady: function(e){
+ var iFrame = $('#jStoreFlashFrame')[0];
+
+ // IE
+ if (iFrame.Document && $.isFunction(iFrame.Document['jStoreFlash'].f_get_cookie)) this.db = iFrame.Document['jStoreFlash'];
+ // Safari && Firefox
+ else if (iFrame.contentWindow && iFrame.contentWindow.document){
+ var doc = iFrame.contentWindow.document;
+ // Safari
+ if ($.isFunction($('object', $(doc))[0].f_get_cookie)) this.db = $('object', $(doc))[0];
+ // Firefox
+ else if ($.isFunction($('embed', $(doc))[0].f_get_cookie)) this.db = $('embed', $(doc))[0];
+ }
+
+ // We're ready to process data
+ if (this.db) this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ get: function(key){
+ var out = this.db.f_get_cookie(key);
+ return out == 'null' ? null : out;
+ },
+ set: function(key, value){
+ this.db.f_set_cookie(key, value);
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.get(key);
+ this.db.f_delete_cookie(key);
+ return beforeDelete;
+ }
+ })
+
+ $.jStore.Engines.flash = jStoreFlash;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 2 ] = 'flash';
+
+ /**
+ * Flash Detection functions copied from the jQuery Flash Plugin
+ * Copyright (c) 2006 Luke Lutman (http://jquery.lukelutman.com/plugins/flash)
+ * Dual licensed under the MIT and GPL licenses.
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.opensource.org/licenses/gpl-license.php
+ */
+ $.jStore.hasFlash = function(version){
+ var pv = $.jStore.flashVersion().match(/\d+/g),
+ rv = version.match(/\d+/g);
+
+ for(var i = 0; i < 3; i++) {
+ pv[i] = parseInt(pv[i] || 0);
+ rv[i] = parseInt(rv[i] || 0);
+ // player is less than required
+ if(pv[i] < rv[i]) return false;
+ // player is greater than required
+ if(pv[i] > rv[i]) return true;
+ }
+ // major version, minor version and revision match exactly
+ return true;
+ }
+
+ $.jStore.flashVersion = function(){
+ // ie
+ try {
+ try {
+ // avoid fp6 minor version lookup issues
+ // see: http://blog.deconcept.com/2006/01/11/getvariable-setvariable-crash-internet…
+ var axo = new ActiveXObject('ShockwaveFlash.ShockwaveFlash.6');
+ try { axo.AllowScriptAccess = 'always'; }
+ catch(e) { return '6,0,0'; }
+ } catch(e) {}
+ return new ActiveXObject('ShockwaveFlash.ShockwaveFlash').GetVariable('$version').replace(/\D+/g, ',').match(/^,?(.+),?$/)[1];
+ // other browsers
+ } catch(e) {
+ try {
+ if(navigator.mimeTypes["application/x-shockwave-flash"].enabledPlugin){
+ return (navigator.plugins["Shockwave Flash 2.0"] || navigator.plugins["Shockwave Flash"]).description.replace(/\D+/g, ",").match(/^,?(.+),?$/)[1];
+ }
+ } catch(e) {}
+ }
+ return '0,0,0';
+ }
+
+})(jQuery);
+
+// Callback fired when ExternalInterface is established
+function flash_ready(){
+ $.jStore.delegate.trigger('flash-ready');
+}
+/*!
+ * jStore Google Gears Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.gears = function(){
+ return !!(window.google && window.google.gears)
+ }
+
+ this.jStoreGears = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'Google Gears';
+
+ // Add required third-party scripts
+ this.includes.push('http://code.google.com/apis/gears/gears_init.js');
+
+ // Allow Autoloading on fail
+ this.autoload = true;
+ },
+ connect: function(){
+ // Create our database connection
+ var db = this.db = google.gears.factory.create('beta.database');
+ db.open( 'jstore-' + this.project );
+ db.execute( 'CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)' );
+
+ // Cache the data from the table
+ this.updateCache();
+ },
+ updateCache: function(){
+ // Read the database into our cache object
+ var result = this.db.execute( 'SELECT k,v FROM jstore' );
+ while (result.isValidRow()){
+ this.data[result.field(0)] = result.field(1);
+ result.next();
+ } result.close();
+
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ set: function(key, value){
+ // Update the database
+ var db = this.db;
+ db.execute( 'BEGIN' );
+ db.execute( 'INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)', [key,value] );
+ db.execute( 'COMMIT' );
+ return this._super(key, value);
+ },
+ rem: function(key){
+ // Update the database
+ var db = this.db;
+ db.execute( 'BEGIN' );
+ db.execute( 'DELETE FROM jstore WHERE k = ?', [key] );
+ db.execute( 'COMMIT' );
+ return this._super(key);
+ }
+ })
+
+ $.jStore.Engines.gears = jStoreGears;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 3 ] = 'gears';
+
+})(jQuery);
+/*!
+ * jStore HTML5 Specification Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.html5 = function(){
+ return !!window.openDatabase
+ }
+
+ this.jStoreHtml5 = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'HTML5';
+
+ // Set the Database limit
+ this.limit = 1024 * 200;
+ },
+ connect: function(){
+ // Create our database connection
+ var db = this.db = openDatabase('jstore-' + this.project, '1.0', this.project, this.limit);
+ if (!db) throw 'JSTORE_ENGINE_HTML5_NODB';
+ db.transaction(function(db){
+ db.executeSql( 'CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)' );
+ });
+
+ // Cache the data from the table
+ this.updateCache();
+ },
+ updateCache: function(){
+ var self = this;
+ // Read the database into our cache object
+ this.db.transaction(function(db){
+ db.executeSql( 'SELECT k,v FROM jstore', [], function(db, result){
+ var rows = result.rows, i = 0, row;
+ for (; i < rows.length; ++i){
+ row = rows.item(i);
+ self.data[row.k] = row.v;
+ }
+
+ // Fire our delegate to indicate we're ready for data transactions
+ self.delegate.trigger('engine-ready');
+ });
+ });
+ },
+ isAvailable: avilability,
+ set: function(key, value){
+ // Update the database
+ this.db.transaction(function(db){
+ db.executeSql( 'INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)', [key,value]);
+ });
+ return this._super(key, value);
+ },
+ rem: function(key){
+ // Update the database
+ this.db.transaction(function(db){
+ db.executeSql( 'DELETE FROM jstore WHERE k = ?', [key] )
+ })
+ return this._super(key);
+ }
+ })
+
+ $.jStore.Engines.html5 = jStoreHtml5;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 0 ] = 'html5';
+
+})(jQuery);
+/*!*
+ * jStore IE Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.ie = function(){
+ return !!window.ActiveXObject;
+ }
+
+ this.jStoreIE = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'IE';
+
+ // Allow Autoloading on fail
+ this.limit = 64 * 1024;
+ },
+ connect: function(){
+ // Create a hidden div to store attributes in
+ this.db = $('<div style="display:none;behavior:url(\'#default#userData\')" id="jstore-' + this.project + '"></div>')
+ .appendTo(document.body).get(0);
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ get: function(key){
+ this.db.load(this.project);
+ return this.db.getAttribute(key);
+ },
+ set: function(key, value){
+ this.db.setAttribute(key, value);
+ this.db.save(this.project);
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.get(key);
+ this.db.removeAttribute(key);
+ this.db.save(this.project);
+ return beforeDelete;
+ }
+ })
+
+ $.jStore.Engines.ie = jStoreIE;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 4 ] = 'ie';
+
+})(jQuery);
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/json2.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/json2.js Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,476 @@
+/*
+ http://www.JSON.org/json2.js
+ 2009-06-29
+
+ Public Domain.
+
+ NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+
+ See http://www.JSON.org/js.html
+
+ This file creates a global JSON object containing two methods: stringify
+ and parse.
+
+ JSON.stringify(value, replacer, space)
+ value any JavaScript value, usually an object or array.
+
+ replacer an optional parameter that determines how object
+ values are stringified for objects. It can be a
+ function or an array of strings.
+
+ space an optional parameter that specifies the indentation
+ of nested structures. If it is omitted, the text will
+ be packed without extra whitespace. If it is a number,
+ it will specify the number of spaces to indent at each
+ level. If it is a string (such as '\t' or ' '),
+ it contains the characters used to indent at each level.
+
+ This method produces a JSON text from a JavaScript value.
+
+ When an object value is found, if the object contains a toJSON
+ method, its toJSON method will be called and the result will be
+ stringified. A toJSON method does not serialize: it returns the
+ value represented by the name/value pair that should be serialized,
+ or undefined if nothing should be serialized. The toJSON method
+ will be passed the key associated with the value, and this will be
+ bound to the object holding the key.
+
+ For example, this would serialize Dates as ISO strings.
+
+ Date.prototype.toJSON = function (key) {
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ return this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z';
+ };
+
+ You can provide an optional replacer method. It will be passed the
+ key and value of each member, with this bound to the containing
+ object. The value that is returned from your method will be
+ serialized. If your method returns undefined, then the member will
+ be excluded from the serialization.
+
+ If the replacer parameter is an array of strings, then it will be
+ used to select the members to be serialized. It filters the results
+ such that only members with keys listed in the replacer array are
+ stringified.
+
+ Values that do not have JSON representations, such as undefined or
+ functions, will not be serialized. Such values in objects will be
+ dropped; in arrays they will be replaced with null. You can use
+ a replacer function to replace those with JSON values.
+ JSON.stringify(undefined) returns undefined.
+
+ The optional space parameter produces a stringification of the
+ value that is filled with line breaks and indentation to make it
+ easier to read.
+
+ If the space parameter is a non-empty string, then that string will
+ be used for indentation. If the space parameter is a number, then
+ the indentation will be that many spaces.
+
+ Example:
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}]);
+ // text is '["e",{"pluribus":"unum"}]'
+
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
+ // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
+
+ text = JSON.stringify([new Date()], function (key, value) {
+ return this[key] instanceof Date ?
+ 'Date(' + this[key] + ')' : value;
+ });
+ // text is '["Date(---current time---)"]'
+
+
+ JSON.parse(text, reviver)
+ This method parses a JSON text to produce an object or array.
+ It can throw a SyntaxError exception.
+
+ The optional reviver parameter is a function that can filter and
+ transform the results. It receives each of the keys and values,
+ and its return value is used instead of the original value.
+ If it returns what it received, then the structure is not modified.
+ If it returns undefined then the member is deleted.
+
+ Example:
+
+ // Parse the text. Values that look like ISO date strings will
+ // be converted to Date objects.
+
+ myData = JSON.parse(text, function (key, value) {
+ var a;
+ if (typeof value === 'string') {
+ a =
+/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
+ if (a) {
+ return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+ +a[5], +a[6]));
+ }
+ }
+ return value;
+ });
+
+ myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
+ var d;
+ if (typeof value === 'string' &&
+ value.slice(0, 5) === 'Date(' &&
+ value.slice(-1) === ')') {
+ d = new Date(value.slice(5, -1));
+ if (d) {
+ return d;
+ }
+ }
+ return value;
+ });
+
+
+ This is a reference implementation. You are free to copy, modify, or
+ redistribute.
+
+ This code should be minified before deployment.
+ See http://javascript.crockford.com/jsmin.html
+
+ USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
+ NOT CONTROL.
+*/
+
+/*jslint evil: true */
+
+/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
+ call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
+ getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
+ lastIndex, length, parse, prototype, push, replace, slice, stringify,
+ test, toJSON, toString, valueOf
+*/
+
+// Create a JSON object only if one does not already exist. We create the
+// methods in a closure to avoid creating global variables.
+
+var JSON = JSON || {};
+
+(function () {
+
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ if (typeof Date.prototype.toJSON !== 'function') {
+
+ Date.prototype.toJSON = function (key) {
+
+ return isFinite(this.valueOf()) ?
+ this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z' : null;
+ };
+
+ String.prototype.toJSON =
+ Number.prototype.toJSON =
+ Boolean.prototype.toJSON = function (key) {
+ return this.valueOf();
+ };
+ }
+
+ var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ gap,
+ indent,
+ meta = { // table of character substitutions
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '"' : '\\"',
+ '\\': '\\\\'
+ },
+ rep;
+
+
+ function quote(string) {
+
+// If the string contains no control characters, no quote characters, and no
+// backslash characters, then we can safely slap some quotes around it.
+// Otherwise we must also replace the offending characters with safe escape
+// sequences.
+
+ escapable.lastIndex = 0;
+ return escapable.test(string) ?
+ '"' + string.replace(escapable, function (a) {
+ var c = meta[a];
+ return typeof c === 'string' ? c :
+ '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ }) + '"' :
+ '"' + string + '"';
+ }
+
+
+ function str(key, holder) {
+
+// Produce a string from holder[key].
+
+ var i, // The loop counter.
+ k, // The member key.
+ v, // The member value.
+ length,
+ mind = gap,
+ partial,
+ value = holder[key];
+
+// If the value has a toJSON method, call it to obtain a replacement value.
+
+ if (value && typeof value === 'object' &&
+ typeof value.toJSON === 'function') {
+ value = value.toJSON(key);
+ }
+
+// If we were called with a replacer function, then call the replacer to
+// obtain a replacement value.
+
+ if (typeof rep === 'function') {
+ value = rep.call(holder, key, value);
+ }
+
+// What happens next depends on the value's type.
+
+ switch (typeof value) {
+ case 'string':
+ return quote(value);
+
+ case 'number':
+
+// JSON numbers must be finite. Encode non-finite numbers as null.
+
+ return isFinite(value) ? String(value) : 'null';
+
+ case 'boolean':
+ case 'null':
+
+// If the value is a boolean or null, convert it to a string. Note:
+// typeof null does not produce 'null'. The case is included here in
+// the remote chance that this gets fixed someday.
+
+ return String(value);
+
+// If the type is 'object', we might be dealing with an object or an array or
+// null.
+
+ case 'object':
+
+// Due to a specification blunder in ECMAScript, typeof null is 'object',
+// so watch out for that case.
+
+ if (!value) {
+ return 'null';
+ }
+
+// Make an array to hold the partial results of stringifying this object value.
+
+ gap += indent;
+ partial = [];
+
+// Is the value an array?
+
+ if (Object.prototype.toString.apply(value) === '[object Array]') {
+
+// The value is an array. Stringify every element. Use null as a placeholder
+// for non-JSON values.
+
+ length = value.length;
+ for (i = 0; i < length; i += 1) {
+ partial[i] = str(i, value) || 'null';
+ }
+
+// Join all of the elements together, separated with commas, and wrap them in
+// brackets.
+
+ v = partial.length === 0 ? '[]' :
+ gap ? '[\n' + gap +
+ partial.join(',\n' + gap) + '\n' +
+ mind + ']' :
+ '[' + partial.join(',') + ']';
+ gap = mind;
+ return v;
+ }
+
+// If the replacer is an array, use it to select the members to be stringified.
+
+ if (rep && typeof rep === 'object') {
+ length = rep.length;
+ for (i = 0; i < length; i += 1) {
+ k = rep[i];
+ if (typeof k === 'string') {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ } else {
+
+// Otherwise, iterate through all of the keys in the object.
+
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ }
+
+// Join all of the member texts together, separated with commas,
+// and wrap them in braces.
+
+ v = partial.length === 0 ? '{}' :
+ gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
+ mind + '}' : '{' + partial.join(',') + '}';
+ gap = mind;
+ return v;
+ }
+ }
+
+// If the JSON object does not yet have a stringify method, give it one.
+
+ if (typeof JSON.stringify !== 'function') {
+ JSON.stringify = function (value, replacer, space) {
+
+// The stringify method takes a value and an optional replacer, and an optional
+// space parameter, and returns a JSON text. The replacer can be a function
+// that can replace values, or an array of strings that will select the keys.
+// A default replacer method can be provided. Use of the space parameter can
+// produce text that is more easily readable.
+
+ var i;
+ gap = '';
+ indent = '';
+
+// If the space parameter is a number, make an indent string containing that
+// many spaces.
+
+ if (typeof space === 'number') {
+ for (i = 0; i < space; i += 1) {
+ indent += ' ';
+ }
+
+// If the space parameter is a string, it will be used as the indent string.
+
+ } else if (typeof space === 'string') {
+ indent = space;
+ }
+
+// If there is a replacer, it must be a function or an array.
+// Otherwise, throw an error.
+
+ rep = replacer;
+ if (replacer && typeof replacer !== 'function' &&
+ (typeof replacer !== 'object' ||
+ typeof replacer.length !== 'number')) {
+ throw new Error('JSON.stringify');
+ }
+
+// Make a fake root object containing our value under the key of ''.
+// Return the result of stringifying the value.
+
+ return str('', {'': value});
+ };
+ }
+
+
+// If the JSON object does not yet have a parse method, give it one.
+
+ if (typeof JSON.parse !== 'function') {
+ JSON.parse = function (text, reviver) {
+
+// The parse method takes a text and an optional reviver function, and returns
+// a JavaScript value if the text is a valid JSON text.
+
+ var j;
+
+ function walk(holder, key) {
+
+// The walk method is used to recursively walk the resulting structure so
+// that modifications can be made.
+
+ var k, v, value = holder[key];
+ if (value && typeof value === 'object') {
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = walk(value, k);
+ if (v !== undefined) {
+ value[k] = v;
+ } else {
+ delete value[k];
+ }
+ }
+ }
+ }
+ return reviver.call(holder, key, value);
+ }
+
+
+// Parsing happens in four stages. In the first stage, we replace certain
+// Unicode characters with escape sequences. JavaScript handles many characters
+// incorrectly, either silently deleting them, or treating them as line endings.
+
+ cx.lastIndex = 0;
+ if (cx.test(text)) {
+ text = text.replace(cx, function (a) {
+ return '\\u' +
+ ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ });
+ }
+
+// In the second stage, we run the text against regular expressions that look
+// for non-JSON patterns. We are especially concerned with '()' and 'new'
+// because they can cause invocation, and '=' because it can cause mutation.
+// But just to be safe, we want to reject all unexpected forms.
+
+// We split the second stage into 4 regexp operations in order to work around
+// crippling inefficiencies in IE's and Safari's regexp engines. First we
+// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
+// replace all simple value tokens with ']' characters. Third, we delete all
+// open brackets that follow a colon or comma or that begin the text. Finally,
+// we look to see that the remaining characters are only whitespace or ']' or
+// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
+
+ if (/^[\],:{}\s]*$/.
+test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
+replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
+
+// In the third stage we use the eval function to compile the text into a
+// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
+// in JavaScript: it can begin a block or an object literal. We wrap the text
+// in parens to eliminate the ambiguity.
+
+ j = eval('(' + text + ')');
+
+// In the optional fourth stage, we recursively walk the new structure, passing
+// each name/value pair to a reviver function for possible transformation.
+
+ return typeof reviver === 'function' ?
+ walk({'': j}, '') : j;
+ }
+
+// If the text is not JSON parseable, then a SyntaxError is thrown.
+
+ throw new SyntaxError('JSON.parse');
+ };
+ }
+}());
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/json_cookie.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/json_cookie.js Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,61 @@
+/*
+ JSONCookie: Uses JSON to allow the settings of multiple preferences in one cookie.
+ Kanwei Li, 2009
+
+ cookie = new JSONCookie("cookie_name"); // Pass in the name of the cookie
+
+ // Gets the value of a preference, returns optional second argument if pref not found
+ cookie.get("pref", "val_if_not_found");
+
+ cookie.set("pref", "val"); // Sets a value for the preference and saves cookie
+ cookie.unset("pref"); // Unsets the preference and saves cookie
+ cookie.clear() // Deletes the cookie
+
+*/
+
+function JSONCookie(name) {
+ this.cookie_name = name;
+
+}
+
+JSONCookie.prototype = {
+ json_data : function() {
+ cookie = $.cookie(this.cookie_name);
+ return cookie ? JSON.parse(cookie) : null;
+ },
+
+ save : function(data) {
+ $.cookie(this.cookie_name, JSON.stringify(data));
+ },
+
+ get : function(attr, else_val) {
+ data = this.json_data();
+ if (data && data[attr]) { return data[attr];
+ } else if (else_val) { return else_val;
+ } else { return null;
+ }
+ },
+
+ set : function(attr, val) {
+ data = this.json_data();
+ if (data) {
+ data[attr] = val;
+ } else {
+ data = { attr : val }
+ }
+ this.save(data);
+ },
+
+ unset : function(attr) {
+ data = this.json_data();
+ if (data) {
+ delete data[attr];
+ }
+ this.save(data);
+ },
+
+ clear : function() {
+ this.save(null);
+ }
+
+};
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js Thu Aug 20 10:52:08 2009 -0400
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js Thu Aug 20 11:39:32 2009 -0400
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connectors,function(j,k){f[h.name]={id:k.ha
ndle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.clear_active_node();parent.show_
form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[sucessors[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var
q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(
this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var
d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prot
otype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;
var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({wi
dth:f,height:f});b.draw_overview()})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewp
ort_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connecto
rs,function(j,k){f[h.name]={id:k.handle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},enable_auto_save:function(){outer_this=this;$(".toolFormBody").find("input,textarea,select").each(function(){$(this).focus(function(){outer_this.active_form_has_changes=true})})},check_changes_in_active_form:functio
n(){if(this.active_form_has_changes){this.has_changes=true;$(".toolFormBody").find("form").each(function(){$(this).submit()});this.active_form_has_changes=false}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){this.check_changes_in_active_form();var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){l
evel_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this
.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></d
iv>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).
offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.c
ss("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_n
odes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(f){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var
g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/packed/jquery.jstore-all.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/packed/jquery.jstore-all.js Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,41 @@
+/*
+ * jStore - Persistent Client-Side Storage
+ *
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ *
+ * Dual licensed under:
+ * MIT: http://www.opensource.org/licenses/mit-license.php
+ * GPLv3: http://www.opensource.org/licenses/gpl-3.0.html
+ */
+(function(){var a=false,b=/xyz/.test(function(){xyz})?/\b_super\b/:/.*/;this.Class=function(){};Class.extend=function(g){var f=this.prototype;a=true;var e=new this();a=false;for(var d in g){e[d]=typeof g[d]=="function"&&typeof f[d]=="function"&&b.test(g[d])?(function(h,i){return function(){var k=this._super;this._super=f[h];var j=i.apply(this,arguments);this._super=k;return j}})(d,g[d]):g[d]}function c(){if(!a&&this.init){this.init.apply(this,arguments)}}c.prototype=e;c.constructor=c;c.extend=arguments.callee;return c}})();
+/*
+ * jStore Delegate Framework
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(a){this.jStoreDelegate=Class.extend({init:function(b){this.parent=b;this.callbacks={}},bind:function(b,c){if(!a.isFunction(c)){return this}if(!this.callbacks[b]){this.callbacks[b]=[]}this.callbacks[b].push(c);return this},trigger:function(){var d=this.parent,c=[].slice.call(arguments),e=c.shift(),b=this.callbacks[e];if(!b){return false}a.each(b,function(){this.apply(d,c)});return this}})})(jQuery);(function(a){a.jStore={};a.extend(a.jStore,{EngineOrder:[],Availability:{},Engines:{},Instances:{},CurrentEngine:null,defaults:{project:null,engine:null,autoload:true,flash:"jStore.Flash.html"},isReady:false,isFlashReady:false,delegate:new jStoreDelegate(a.jStore).bind("jStore-ready",function(b){a.jStore.isReady=true;if(a.jStore.defaults.autoload){b.connect()}}).bind("flash-ready",function(){a.jStore.isFlashReady=true})});a.jStore.ready=function(b){if(a.jStore.isReady){b.apply(a.jStore,[a.jStore.CurrentEngine])}else{a.jStore.delegate.bind("jStore-ready",b)}};a.jStore.fail
=function(b){a.jStore.delegate.bind("jStore-failure",b)};a.jStore.flashReady=function(b){if(a.jStore.isFlashReady){b.apply(a.jStore,[a.jStore.CurrentEngine])}else{a.jStore.delegate.bind("flash-ready",b)}};a.jStore.use=function(d,g,c){g=g||a.jStore.defaults.project||location.hostname.replace(/\./g,"-")||"unknown";var f=a.jStore.Engines[d.toLowerCase()]||null,b=(c?c+".":"")+g+"."+d;if(!f){throw"JSTORE_ENGINE_UNDEFINED"}f=new f(g,b);if(a.jStore.Instances[b]){throw"JSTORE_JRI_CONFLICT"}if(f.isAvailable()){a.jStore.Instances[b]=f;if(!a.jStore.CurrentEngine){a.jStore.CurrentEngine=f}a.jStore.delegate.trigger("jStore-ready",f)}else{if(!f.autoload){throw"JSTORE_ENGINE_UNAVILABLE"}else{f.included(function(){if(this.isAvailable()){a.jStore.Instances[b]=this;if(!a.jStore.CurrentEngine){a.jStore.CurrentEngine=this}a.jStore.delegate.trigger("jStore-ready",this)}else{a.jStore.delegate.trigger("jStore-failure",this)}}).include()}}};a.jStore.setCurrentEngine=function(b){if(!a.jStore.Instanc
es.length){return a.jStore.FindEngine()}if(!b&&a.jStore.Instances.length>=1){a.jStore.delegate.trigger("jStore-ready",a.jStore.Instances[0]);return a.jStore.CurrentEngine=a.jStore.Instances[0]}if(b&&a.jStore.Instances[b]){a.jStore.delegate.trigger("jStore-ready",a.jStore.Instances[b]);return a.jStore.CurrentEngine=a.jStore.Instances[b]}throw"JSTORE_JRI_NO_MATCH"};a.jStore.FindEngine=function(){a.each(a.jStore.EngineOrder,function(b){if(a.jStore.Availability[this]()){a.jStore.use(this,a.jStore.defaults.project,"default");return false}})};a.jStore.store=function(b,c){if(!a.jStore.CurrentEngine){return false}if(!c){return a.jStore.CurrentEngine.get(b)}return a.jStore.CurrentEngine.set(b,c)};a.jStore.remove=function(b){if(!a.jStore.CurrentEngine){return false}return a.jStore.CurrentEngine.rem(b)};a.fn.store=function(c,d){if(!a.jStore.CurrentEngine){return this}var b=a.jStore.store(c,d);return !d?b:this};a.fn.removeStore=function(b){a.jStore.remove(b);return this};a.jStore.load=f
unction(){if(a.jStore.defaults.engine){return a.jStore.use(a.jStore.defaults.engine,a.jStore.defaults.project,"default")}try{a.jStore.FindEngine()}catch(b){}}})(jQuery);(function(a){this.StorageEngine=Class.extend({init:function(c,b){this.project=c;this.jri=b;this.data={};this.limit=-1;this.includes=[];this.delegate=new jStoreDelegate(this).bind("engine-ready",function(){this.isReady=true}).bind("engine-included",function(){this.hasIncluded=true});this.autoload=false;this.isReady=false;this.hasIncluded=false},include:function(){var b=this,d=this.includes.length,c=0;a.each(this.includes,function(){a.ajax({type:"get",url:this,dataType:"script",cache:true,success:function(){c++;if(c==d){b.delegate.trigger("engine-included")}}})})},isAvailable:function(){return false},ready:function(b){if(this.isReady){b.apply(this)}else{this.delegate.bind("engine-ready",b)}return this},included:function(b){if(this.hasIncluded){b.apply(this)}else{this.delegate.bind("engine-included",b)}return th
is},get:function(b){return this.data[b]||null},set:function(b,c){this.data[b]=c;return c},rem:function(b){var c=this.data[b];this.data[b]=null;return c}})})(jQuery);
+/*
+ * jStore DOM Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(c){var b=c.jStore.Availability.session=function(){return !!window.sessionStorage},a=c.jStore.Availability.local=function(){return !!(window.localStorage||window.globalStorage)};this.jStoreDom=StorageEngine.extend({init:function(e,d){this._super(e,d);this.type="DOM";this.limit=5*1024*1024},connect:function(){this.delegate.trigger("engine-ready")},get:function(e){var d=this.db.getItem(e);return d&&d.value?d.value:d},set:function(d,e){this.db.setItem(d,e);return e},rem:function(e){var d=this.get(e);this.db.removeItem(e);return d}});this.jStoreLocal=jStoreDom.extend({connect:function(){this.db=!window.globalStorage?window.localStorage:window.globalStorage[location.hostname];this._super()},isAvailable:a});this.jStoreSession=jStoreDom.extend({connect:function(){this.db=sessionStorage;this._super()},isAvailable:b});c.jStore.Engines.local=jStoreLocal;c.jStore.Engines.session=jStoreSession;c.jStore.EngineOrder[1]="local"})(jQuery);
+/*
+ * jStore Flash Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ * jStore.swf Copyright (c) 2008 Daniel Bulli (http://www.nuff-respec.com)
+ */
+(function(b){var a=b.jStore.Availability.flash=function(){return !!(b.jStore.hasFlash("8.0.0"))};this.jStoreFlash=StorageEngine.extend({init:function(e,d){this._super(e,d);this.type="Flash";var c=this;b.jStore.flashReady(function(){c.flashReady()})},connect:function(){var c="jstore-flash-embed-"+this.project;b(document.body).append('<iframe style="height:1px;width:1px;position:absolute;left:0;top:0;margin-left:-100px;" id="jStoreFlashFrame" src="'+b.jStore.defaults.flash+'"></iframe>')},flashReady:function(f){var c=b("#jStoreFlashFrame")[0];if(c.Document&&b.isFunction(c.Document.jStoreFlash.f_get_cookie)){this.db=c.Document.jStoreFlash}else{if(c.contentWindow&&c.contentWindow.document){var d=c.contentWindow.document;if(b.isFunction(b("object",b(d))[0].f_get_cookie)){this.db=b("object",b(d))[0]}else{if(b.isFunction(b("embed",b(d))[0].f_get_cookie)){this.db=b("embed",b(d))[0]}}}}if(this.db){this.delegate.trigger("engine-ready")}},isAvailable:a,get:function(d){var c=this.db.f_g
et_cookie(d);return c=="null"?null:c},set:function(c,d){this.db.f_set_cookie(c,d);return d},rem:function(c){var d=this.get(c);this.db.f_delete_cookie(c);return d}});b.jStore.Engines.flash=jStoreFlash;b.jStore.EngineOrder[2]="flash";b.jStore.hasFlash=function(c){var e=b.jStore.flashVersion().match(/\d+/g),f=c.match(/\d+/g);for(var d=0;d<3;d++){e[d]=parseInt(e[d]||0);f[d]=parseInt(f[d]||0);if(e[d]<f[d]){return false}if(e[d]>f[d]){return true}}return true};b.jStore.flashVersion=function(){try{try{var c=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.6");try{c.AllowScriptAccess="always"}catch(d){return"6,0,0"}}catch(d){}return new ActiveXObject("ShockwaveFlash.ShockwaveFlash").GetVariable("$version").replace(/\D+/g,",").match(/^,?(.+),?$/)[1]}catch(d){try{if(navigator.mimeTypes["application/x-shockwave-flash"].enabledPlugin){return(navigator.plugins["Shockwave Flash 2.0"]||navigator.plugins["Shockwave Flash"]).description.replace(/\D+/g,",").match(/^,?(.+),?$/)[1]}}catch(d){}}r
eturn"0,0,0"}})(jQuery);function flash_ready(){$.jStore.delegate.trigger("flash-ready")}
+/*
+ * jStore Google Gears Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.gears=function(){return !!(window.google&&window.google.gears)};this.jStoreGears=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="Google Gears";this.includes.push("http://code.google.com/apis/gears/gears_init.js");this.autoload=true},connect:function(){var c=this.db=google.gears.factory.create("beta.database");c.open("jstore-"+this.project);c.execute("CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)");this.updateCache()},updateCache:function(){var c=this.db.execute("SELECT k,v FROM jstore");while(c.isValidRow()){this.data[c.field(0)]=c.field(1);c.next()}c.close();this.delegate.trigger("engine-ready")},isAvailable:a,set:function(d,e){var c=this.db;c.execute("BEGIN");c.execute("INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)",[d,e]);c.execute("COMMIT");return this._super(d,e)},rem:function(d){var c=this.db;c.execute("BEGIN");c.execute("DELETE FROM jstore WHERE k = ?",[d]);c.ex
ecute("COMMIT");return this._super(d)}});b.jStore.Engines.gears=jStoreGears;b.jStore.EngineOrder[3]="gears"})(jQuery);
+/*
+ * jStore HTML5 Specification Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.html5=function(){return !!window.openDatabase};this.jStoreHtml5=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="HTML5";this.limit=1024*200},connect:function(){var c=this.db=openDatabase("jstore-"+this.project,"1.0",this.project,this.limit);if(!c){throw"JSTORE_ENGINE_HTML5_NODB"}c.transaction(function(d){d.executeSql("CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)")});this.updateCache()},updateCache:function(){var c=this;this.db.transaction(function(d){d.executeSql("SELECT k,v FROM jstore",[],function(f,e){var h=e.rows,g=0,j;for(;g<h.length;++g){j=h.item(g);c.data[j.k]=j.v}c.delegate.trigger("engine-ready")})})},isAvailable:a,set:function(c,d){this.db.transaction(function(e){e.executeSql("INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)",[c,d])});return this._super(c,d)},rem:function(c){this.db.transaction(function(d){d.executeSql("DELETE FROM jstore WHERE k = ?",[c])});ret
urn this._super(c)}});b.jStore.Engines.html5=jStoreHtml5;b.jStore.EngineOrder[0]="html5"})(jQuery);
+/**
+ * jStore IE Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.ie=function(){return !!window.ActiveXObject};this.jStoreIE=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="IE";this.limit=64*1024},connect:function(){this.db=b('<div style="display:none;behavior:url(\'#default#userData\')" id="jstore-'+this.project+'"></div>').appendTo(document.body).get(0);this.delegate.trigger("engine-ready")},isAvailable:a,get:function(c){this.db.load(this.project);return this.db.getAttribute(c)},set:function(c,d){this.db.setAttribute(c,d);this.db.save(this.project);return d},rem:function(c){var d=this.get(c);this.db.removeAttribute(c);this.db.save(this.project);return d}});b.jStore.Engines.ie=jStoreIE;b.jStore.EngineOrder[4]="ie"})(jQuery);
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 static/scripts/packed/json2.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/packed/json2.js Thu Aug 20 11:39:32 2009 -0400
@@ -0,0 +1,1 @@
+var JSON=JSON||{};(function(){function f(n){return n<10?"0"+n:n}if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(key){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(key){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;function quote(string){escapable.lastIndex=0;return escapable.test(string)?'"'+string.replace(escapable,function(a){var c=meta[a];return typeof c==="string"?c:"\\u"+("0000"+a.charCodeAt(0)
.toString(16)).slice(-4)})+'"':'"'+string+'"'}function str(key,holder){var i,k,v,length,mind=gap,partial,value=holder[key];if(value&&typeof value==="object"&&typeof value.toJSON==="function"){value=value.toJSON(key)}if(typeof rep==="function"){value=rep.call(holder,key,value)}switch(typeof value){case"string":return quote(value);case"number":return isFinite(value)?String(value):"null";case"boolean":case"null":return String(value);case"object":if(!value){return"null"}gap+=indent;partial=[];if(Object.prototype.toString.apply(value)==="[object Array]"){length=value.length;for(i=0;i<length;i+=1){partial[i]=str(i,value)||"null"}v=partial.length===0?"[]":gap?"[\n"+gap+partial.join(",\n"+gap)+"\n"+mind+"]":"["+partial.join(",")+"]";gap=mind;return v}if(rep&&typeof rep==="object"){length=rep.length;for(i=0;i<length;i+=1){k=rep[i];if(typeof k==="string"){v=str(k,value);if(v){partial.push(quote(k)+(gap?": ":":")+v)}}}}else{for(k in value){if(Object.hasOwnProperty.call(value,k)){v=str(
k,value);if(v){partial.push(quote(k)+(gap?": ":":")+v)}}}}v=partial.length===0?"{}":gap?"{\n"+gap+partial.join(",\n"+gap)+"\n"+mind+"}":"{"+partial.join(",")+"}";gap=mind;return v}}if(typeof JSON.stringify!=="function"){JSON.stringify=function(value,replacer,space){var i;gap="";indent="";if(typeof space==="number"){for(i=0;i<space;i+=1){indent+=" "}}else{if(typeof space==="string"){indent=space}}rep=replacer;if(replacer&&typeof replacer!=="function"&&(typeof replacer!=="object"||typeof replacer.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":value})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){var j;function walk(holder,key){var k,v,value=holder[key];if(value&&typeof value==="object"){for(k in value){if(Object.hasOwnProperty.call(value,k)){v=walk(value,k);if(v!==undefined){value[k]=v}else{delete value[k]}}}}return reviver.call(holder,key,value)}cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("000
0"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}}());
\ No newline at end of file
diff -r 881dd4c2de9f -r 070cf5f6f928 templates/admin/center.mako
--- a/templates/admin/center.mako Thu Aug 20 10:52:08 2009 -0400
+++ b/templates/admin/center.mako Thu Aug 20 11:39:32 2009 -0400
@@ -17,7 +17,7 @@
<li>
<strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
each group (both private and non-private roles). Non-private roles include a link to a page that allows you to manage the users
- and groups that are associated with the role. The page also includes a view of the library datasets that are associated with the
+ and groups that are associated with the role. The page also includes a view of the data library datasets that are associated with the
role and the permissions applied to each dataset.
</li>
<p/>
@@ -32,9 +32,9 @@
<p/>
<ul>
<li>
- <strong>Manage libraries</strong> - Dataset libraries enable a Galaxy administrator to upload datasets into a library. Currently,
- only administrators can create dataset libraries, but permission to perform the following functions on the library can be granted to
- users (a library item is one of: a library, a library folder, a library dataset).
+ <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
+ only administrators can create data libraries, but permission to perform the following functions on the data library can be granted to
+ users (a library item is one of: a data library, a library folder, a library dataset).
<p/>
<ul>
<li><strong>add library item</strong> - Role members can add library items to this library or folder</li>
@@ -42,12 +42,12 @@
<li><strong>manage library permissions</strong> - Role members can manage permissions applied to this library item</li>
</ul>
<p/>
- The default behavior is for no permissions to be applied to a library item, but applied permissions are inherited downward, so it is
- important to set desired permissions on a new library when it is created. When this is done, new folders and datasets added to the
- library will automatically inherit those permissions. In the same way, permissions can be applied to a folder, which will be
+ The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward, so it is
+ important to set desired permissions on a new data library when it is created. When this is done, new folders and datasets added to the
+ data library will automatically inherit those permissions. In the same way, permissions can be applied to a folder, which will be
automatically inherited by all contained datasets and sub-folders.
<p/>
- The "Libraries" menu item allows users to access the datasets in a library as long as they are not restricted from accessing them.
+ The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
approach allows for multiple users to use a single (possibly very large) dataset file.
</li>
@@ -72,7 +72,7 @@
</ul>
</li>
</ul>
-<p><strong>Data Security and Dataset Libraries</strong></p>
+<p><strong>Data Security and Data Libraries</strong></p>
<p/>
<strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
@@ -121,8 +121,8 @@
<strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
<p>
If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by
- anyone. Public library datasets will be accessible to all users (as well as anyone not logged in during a Galaxy session)
- from the list of libraries displayed when the "Libraries" menu item is selected.
+ anyone. Public data library datasets will be accessible to all users (as well as anyone not logged in during a Galaxy session)
+ from the list of data libraries displayed when the "Data Libraries" menu item is selected.
</p>
<p>
Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
diff -r 881dd4c2de9f -r 070cf5f6f928 templates/admin/dataset_security/role.mako
--- a/templates/admin/dataset_security/role.mako Thu Aug 20 10:52:08 2009 -0400
+++ b/templates/admin/dataset_security/role.mako Thu Aug 20 11:39:32 2009 -0400
@@ -84,7 +84,7 @@
<br clear="left"/>
<br/>
%if len( library_dataset_actions ) > 0:
- <h3>Library datasets associated with role '${role.name}'</h3>
+ <h3>Data library datasets associated with role '${role.name}'</h3>
<table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
<tr>
<td>
diff -r 881dd4c2de9f -r 070cf5f6f928 templates/admin/index.mako
--- a/templates/admin/index.mako Thu Aug 20 10:52:08 2009 -0400
+++ b/templates/admin/index.mako Thu Aug 20 11:39:32 2009 -0400
@@ -89,7 +89,7 @@
</div>
<div class="toolSectionBody">
<div class="toolSectionBg">
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='browse_libraries' )}" target="galaxy_main">Manage libraries</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
</div>
</div>
<div class="toolSectionPad"></div>
diff -r 881dd4c2de9f -r 070cf5f6f928 templates/admin/library/browse_libraries.mako
--- a/templates/admin/library/browse_libraries.mako Thu Aug 20 10:52:08 2009 -0400
+++ b/templates/admin/library/browse_libraries.mako Thu Aug 20 11:39:32 2009 -0400
@@ -1,19 +1,19 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<%def name="title()">Browse Libraries</%def>
+<%def name="title()">Browse Data Libraries</%def>
<h2>
%if deleted:
Deleted
%endif
- Libraries
+ Data Libraries
</h2>
<ul class="manage-table-actions">
%if not deleted:
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='library', new=True )}"><span>Create a new library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library', new=True )}"><span>Create a new data library</span></a>
</li>
<li>
<a class="action-button" href="${h.url_for( controller='admin', action='deleted_libraries' )}"><span>Manage deleted libraries</span></a>
diff -r 881dd4c2de9f -r 070cf5f6f928 templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Thu Aug 20 10:52:08 2009 -0400
+++ b/templates/admin/library/browse_library.mako Thu Aug 20 11:39:32 2009 -0400
@@ -162,16 +162,16 @@
%if deleted:
Deleted
%endif
- Library '${library.name}'
+ Data Library “${library.name}”
</h2>
<ul class="manage-table-actions">
%if not deleted:
<li>
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/e66e1e99183c
changeset: 2585:e66e1e99183c
user: rc
date: Fri Aug 14 15:47:13 2009 -0400
description:
First pass of the functional tests for forms & requests
14 file(s) affected in this change:
lib/galaxy/web/controllers/admin.py
lib/galaxy/web/controllers/forms.py
lib/galaxy/web/controllers/requests.py
lib/galaxy/web/controllers/requests_admin.py
lib/galaxy/web/controllers/user.py
lib/galaxy/web/framework/__init__.py
templates/admin/forms/edit_form.mako
templates/admin/requests/add_states.mako
templates/admin/requests/grid.mako
templates/admin/samples/bar_codes.mako
templates/admin/samples/events.mako
templates/base_panels.mako
templates/requests/grid.mako
test/base/twilltestcase.py
diffs (493 lines):
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Fri Aug 14 15:47:13 2009 -0400
@@ -2011,7 +2011,7 @@
num_states = int( util.restore_text( params.get( 'num_states', 0 ) ))
proceed = True
for i in range( num_states ):
- if not util.restore_text( params.get( 'new_element_name_%i' % i, None ) ):
+ if not util.restore_text( params.get( 'state_name_%i' % i, None ) ):
proceed = False
break
if not proceed:
@@ -2029,8 +2029,8 @@
ss.delete()
ss.flush()
for i in range( num_states ):
- name = util.restore_text( params.get( 'new_element_name_%i' % i, None ))
- desc = util.restore_text( params.get( 'new_element_description_%i' % i, None ))
+ name = util.restore_text( params.get( 'state_name_%i' % i, None ))
+ desc = util.restore_text( params.get( 'state_desc_%i' % i, None ))
ss = trans.app.model.SampleState(name, desc, rt)
ss.flush()
msg = "The new request type named '%s' with %s state(s) has been created" % (rt.name, num_states)
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/controllers/forms.py Fri Aug 14 15:47:13 2009 -0400
@@ -175,7 +175,7 @@
'visible': True,
'required': False,
'type': BaseField.form_field_types()[0],
- 'selectlist': '' }
+ 'selectlist': [] }
self.current_form['fields'].append(empty_field)
def __get_field(self, index, **kwd):
params = util.Params( kwd )
@@ -183,10 +183,10 @@
# To reproduce, create a new form, click the "add field" button, click the
# browser back arrow, then click the "add field" button again.
# You should never attempt to "restore_text()" on a None object...
- name = util.restore_text( params.get( 'field_name_%i' % index, None ) )
- helptext = util.restore_text( params.get( 'field_helptext_%i' % index, None ) )
+ name = util.restore_text( params.get( 'field_name_%i' % index, '' ) )
+ helptext = util.restore_text( params.get( 'field_helptext_%i' % index, '' ) )
required = params.get( 'field_required_%i' % index, False )
- field_type = util.restore_text( params.get( 'field_type_%i' % index, None ) )
+ field_type = util.restore_text( params.get( 'field_type_%i' % index, '' ) )
if field_type == 'SelectField':
selectlist = self.__get_selectbox_options(index, **kwd)
return {'label': name,
@@ -434,7 +434,10 @@
if field_name in kwd:
# the user had already filled out this field and the same form is re-rendered
# due to some reason like required fields have been left out.
- value = util.restore_text( params.get( field_name, '' ) )
+ if field[ 'type' ] == 'CheckboxField':
+ value = CheckboxField.is_checked( util.restore_text( params.get( field_name, False ) ) )
+ else:
+ value = util.restore_text( params.get( field_name, '' ) )
elif contents:
# this field has a saved value
value = str(contents[ index ])
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Fri Aug 14 15:47:13 2009 -0400
@@ -63,6 +63,7 @@
request_grid = RequestsListGrid()
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def index( self, trans ):
return trans.fill_template( "requests/index.mako" )
@@ -75,6 +76,7 @@
authorized_libraries.append(library)
return authorized_libraries
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def list( self, trans, **kwargs ):
'''
List all request made by the current user
@@ -206,6 +208,7 @@
copy_list.add_option(s[0], i)
return copy_list
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def show_request(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -331,6 +334,7 @@
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def delete_sample(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -353,6 +357,7 @@
edit_mode=self.edit_mode)
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def toggle_request_details(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -389,6 +394,7 @@
select_reqtype.add_option(rt.name, rt.id)
return select_reqtype
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def new(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -581,6 +587,7 @@
request.flush()
return request
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def edit(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -742,10 +749,14 @@
request.flush()
kwd = {}
kwd['id'] = trans.security.encode_id(request.id)
+ kwd['status'] = 'done'
+ kwd['message'] = 'The request <b>%s</b> has been submitted.' % request.name
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
+ show_filter=trans.app.model.Request.states.SUBMITTED,
**kwd) )
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def submit_request(self, trans, **kwd):
params = util.Params( kwd )
try:
@@ -775,13 +786,16 @@
# change request's submitted field
request.state = request.states.SUBMITTED
request.flush()
- ## TODO
kwd['id'] = trans.security.encode_id(request.id)
+ kwd['status'] = 'done'
+ kwd['message'] = 'The request <b>%s</b> has been submitted.' % request.name
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
+ show_filter=trans.app.model.Request.states.SUBMITTED,
**kwd) )
@web.expose
+ @web.require_login( "create/submit sequencing requests" )
def show_events(self, trans, **kwd):
params = util.Params( kwd )
try:
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Fri Aug 14 15:47:13 2009 -0400
@@ -172,6 +172,8 @@
@web.require_admin
def bar_codes(self, trans, **kwd):
params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
request_id = params.get( 'request_id', None )
if request_id:
request = trans.app.model.Request.get( int( request_id ))
@@ -192,7 +194,9 @@
bc))
return trans.fill_template( '/admin/samples/bar_codes.mako',
samples_list=[s for s in request.samples],
- user=request.user, request=request, widgets=widgets)
+ user=request.user, request=request, widgets=widgets,
+ messagetype=messagetype,
+ msg=msg)
@web.expose
@web.require_admin
def save_bar_codes(self, trans, **kwd):
@@ -256,9 +260,10 @@
sample.bar_code = bar_code
sample.flush()
return trans.response.send_redirect( web.url_for( controller='requests_admin',
- action='list',
- operation='show_request',
- id=trans.security.encode_id(request.id)) )
+ action='bar_codes',
+ request_id=request.id,
+ msg='Bar codes has been saved for this request',
+ messagetype='done'))
def __set_request_state(self, request):
# check if all the samples of the current request are in the final state
complete = True
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/controllers/user.py
--- a/lib/galaxy/web/controllers/user.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/controllers/user.py Fri Aug 14 15:47:13 2009 -0400
@@ -229,6 +229,8 @@
def manage_addresses(self, trans, **kwd):
if trans.user:
params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
show_filter = util.restore_text( params.get( 'show_filter', 'Active' ) )
if show_filter == 'All':
addresses = [address for address in trans.user.addresses]
@@ -238,7 +240,9 @@
addresses = [address for address in trans.user.addresses if not address.deleted]
return trans.fill_template( 'user/address.mako',
addresses=addresses,
- show_filter=show_filter)
+ show_filter=show_filter,
+ msg=msg,
+ messagetype=messagetype)
else:
# User not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change your default permitted actions." )
diff -r f3d25adcace6 -r e66e1e99183c lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Wed Aug 12 10:31:33 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Fri Aug 14 15:47:13 2009 -0400
@@ -641,6 +641,7 @@
self.name = name
self.action = action
self.submit_text = submit_text
+ #self.submit_name = submit_text+"_button"
self.inputs = []
def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
diff -r f3d25adcace6 -r e66e1e99183c templates/admin/forms/edit_form.mako
--- a/templates/admin/forms/edit_form.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/admin/forms/edit_form.mako Fri Aug 14 15:47:13 2009 -0400
@@ -70,7 +70,7 @@
</%def>
<div class="toolForm">
- <div class="toolFormTitle">Edit form definition '${form.name}'</div>
+ <div class="toolFormTitle">Edit form definition "${form.name}"</div>
<form id="edit_form" name="edit_form" action="${h.url_for( controller='forms', action='edit', form_id=form.id, num_fields=len(form.fields) )}" method="post" >
%for label, input in form_details:
<div class="form-row">
diff -r f3d25adcace6 -r e66e1e99183c templates/admin/requests/add_states.mako
--- a/templates/admin/requests/add_states.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/admin/requests/add_states.mako Fri Aug 14 15:47:13 2009 -0400
@@ -12,9 +12,9 @@
%for element_count in range( num_states ):
<div class="form-row">
<label>${1+element_count}) State name:</label>
- <input type="text" name="new_element_name_${element_count}" value="" size="40"/>
+ <input type="text" name="state_name_${element_count}" value="" size="40"/>
<label>State help text (optional):</label>
- <input type="text" name="new_element_description_${element_count}" value="" size="40"/>
+ <input type="text" name="state_desc_${element_count}" value="" size="40"/>
</div>
<div style="clear: both"></div>
%endfor
diff -r f3d25adcace6 -r e66e1e99183c templates/admin/requests/grid.mako
--- a/templates/admin/requests/grid.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/admin/requests/grid.mako Fri Aug 14 15:47:13 2009 -0400
@@ -76,7 +76,7 @@
<div class="grid-header">
<h2>${grid.title}</h2>
- ##%if len(query.all()):
+ %if len(trans.user.requests):
##<span class="title">Filter:</span>
%for i, filter in enumerate( grid.standard_filters ):
%if i > 0:
@@ -96,7 +96,7 @@
%endif
%endif
%endfor
- ##%endif
+ %endif
</div>
diff -r f3d25adcace6 -r e66e1e99183c templates/admin/samples/bar_codes.mako
--- a/templates/admin/samples/bar_codes.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/admin/samples/bar_codes.mako Fri Aug 14 15:47:13 2009 -0400
@@ -40,7 +40,7 @@
</tbody>
</table>
<div class="form-row">
- <input type="submit" name="save_new_sample_type" value="Save"/>
+ <input type="submit" name="save_bar_codes" value="Save"/>
</div>
</form>
</div>
\ No newline at end of file
diff -r f3d25adcace6 -r e66e1e99183c templates/admin/samples/events.mako
--- a/templates/admin/samples/events.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/admin/samples/events.mako Fri Aug 14 15:47:13 2009 -0400
@@ -56,7 +56,7 @@
</div>
%endfor
<div class="form-row">
- <input type="submit" name="add_event" value="Save"/>
+ <input type="submit" name="add_event_button" value="Save"/>
</div>
</form>
</div>
diff -r f3d25adcace6 -r e66e1e99183c templates/base_panels.mako
--- a/templates/base_panels.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/base_panels.mako Fri Aug 14 15:47:13 2009 -0400
@@ -150,12 +150,12 @@
${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
- %if trans.request_types():
+ %if trans.user and trans.request_types():
<td class="tab">
<a>Lab</a>
<div class="submenu">
<ul>
- <li><a target="requests" href="${h.url_for( controller='requests', action='index' )}">Sequencing Requests</a></li>
+ <li><a href="${h.url_for( controller='requests', action='index' )}">Sequencing Requests</a></li>
</ul>
</div>
</td>
diff -r f3d25adcace6 -r e66e1e99183c templates/requests/grid.mako
--- a/templates/requests/grid.mako Wed Aug 12 10:31:33 2009 -0400
+++ b/templates/requests/grid.mako Fri Aug 14 15:47:13 2009 -0400
@@ -76,7 +76,7 @@
<div class="grid-header">
<h2>${grid.title}</h2>
- ##%if len(query.all()):
+ %if len(trans.user.requests):
##<span class="title">Filter:</span>
%for i, filter in enumerate( grid.standard_filters ):
%if i > 0:
@@ -88,7 +88,7 @@
<span class="filter"><a href="${h.url_for( controller='requests', action='list', show_filter=filter.label )}">${filter.label}</a></span>
%endif
%endfor
- ##%endif
+ %endif
</div>
<ul class="manage-table-actions">
@@ -98,7 +98,6 @@
<span>New request</span></a>
</li>
</ul>
-
%if not len(query.all()):
There are no request(s).
@@ -215,4 +214,4 @@
</tfoot>
</table>
</form>
-%endif
\ No newline at end of file
+%endif
diff -r f3d25adcace6 -r e66e1e99183c test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Aug 12 10:31:33 2009 -0400
+++ b/test/base/twilltestcase.py Fri Aug 14 15:47:13 2009 -0400
@@ -968,7 +968,7 @@
self.home()
# Form stuff
- def create_form( self, name='Form One', description='This is Form One', num_fields=1 ):
+ def create_form( self, name='Form One', desc='This is Form One', num_fields=1 ):
"""
Create a new form definition. Testing framework is still limited to only testing
one instance for each repeat. This has to do with the 'flat' nature of defining
@@ -984,7 +984,7 @@
self.visit_url( "%s/forms/new" % self.url )
self.check_page_for_string( 'Create a new form definition' )
tc.fv( "1", "name", name ) # form field 1 is the field named name...
- tc.fv( "1", "description", description ) # form field 1 is the field named name...
+ tc.fv( "1", "description", desc ) # form field 1 is the field named name...
tc.submit( "create_form_button" )
for index in range( num_fields ):
field_name = 'field_name_%i' % index
@@ -997,7 +997,119 @@
check_str = "The form '%s' has been updated with the changes." % name
self.check_page_for_string( check_str )
self.home()
-
+ def edit_form( self, form_id, form_name, new_form_name="Form One's Name (Renamed)", new_form_desc="This is Form One's description (Re-described)"):
+ """
+ Edit form details; name & description
+ """
+ self.home()
+ self.visit_url( "%s/forms/edit?form_id=%i&show_form=True" % (self.url, form_id) )
+ self.check_page_for_string( 'Edit form definition "%s"' % form_name )
+ tc.fv( "1", "name", new_form_name )
+ tc.fv( "1", "description", new_form_desc )
+ tc.submit( "save_changes_button" )
+ self.check_page_for_string( "The form '%s' has been updated with the changes." % new_form_name )
+ self.home()
+ def form_add_field( self, form_id, form_name, field_index, fields):
+ """
+ Add a new fields to the form definition
+ """
+ self.home()
+ self.visit_url( "%s/forms/edit?form_id=%i&show_form=True" % (self.url, form_id) )
+ self.check_page_for_string( 'Edit form definition "%s"' % form_name)
+ for i, field in enumerate(fields):
+ index = i+field_index
+ tc.submit( "add_field_button" )
+ tc.fv( "1", "field_name_%i" % index, field['name'] )
+ tc.fv( "1", "field_helptext_%i" % index, field['desc'] )
+ tc.fv( "1", "field_type_%i" % index, field['type'] )
+ tc.fv( "1", "field_required_%i" % index, field['required'] )
+# if field['type'] == 'SelectField':
+# for option_index, option in enumerate(field['selectlist']):
+# self.visit_url( "%s/forms/edit?select_box_options=add&form_id=%i&field_index=%i" % \
+# (self.url, form_id, index))
+# #data = self.last_page()
+# #file( "rc.html", 'wb' ).write(data)
+# tc.fv( "1", "field_%i_option_%i" % (index, option_index), option )
+ tc.submit( "save_changes_button" )
+ check_str = "The form '%s' has been updated with the changes." % form_name
+ self.check_page_for_string( check_str )
+ self.home()
+ def form_remove_field( self, form_id, form_name, field_name):
+ """
+ Remove a field from the form definition
+ """
+ self.home()
+ self.visit_url( "%s/forms/edit?form_id=%i&show_form=True" % (self.url, form_id) )
+ self.check_page_for_string( 'Edit form definition "%s"' % form_name)
+ tc.submit( "remove_button" )
+ tc.submit( "save_changes_button" )
+ check_str = "The form '%s' has been updated with the changes." % form_name
+ self.check_page_for_string( check_str )
+ self.home()
+ # Requests stuff
+ def create_request_type( self, name, desc, request_form_id, sample_form_id, states ):
+ self.home()
+ self.visit_url( "%s/admin/request_type?create=True" % self.url )
+ self.check_page_for_string( 'Create a new request type' )
+ tc.fv( "1", "name", name )
+ tc.fv( "1", "description", desc )
+ tc.fv( "1", "request_form_id", request_form_id )
+ tc.fv( "1", "sample_form_id", sample_form_id )
+ tc.fv( "1", "num_states", str( len( states ) ) )
+ tc.submit( "define_states_button" )
+ self.check_page_for_string( "Create %i states for the '%s' request type" % ( len(states), name ))
+ for index, state in enumerate(states):
+ tc.fv("1", "state_name_%i" % index, state[0])
+ tc.fv("1", "state_desc_%i" % index, state[1])
+ tc.submit( "save_request_type" )
+ self.check_page_for_string( "Request type <b>%s</b> has been created" % name )
+ def create_request( self, request_type_id, name, desc, library_id, fields ):
+ self.home()
+ self.visit_url( "%s/requests/new?create=True&select_request_type=%i" % (self.url, request_type_id) )
+ self.check_page_for_string( 'Add a new request' )
+ tc.fv( "1", "name", name )
+ tc.fv( "1", "desc", desc )
+ tc.fv( "1", "library_id", str(library_id) )
+ for index, field_value in enumerate(fields):
+ tc.fv( "1", "field_%i" % index, field_value )
+ tc.submit( "create_request_button" )
+ def add_samples( self, request_id, request_name, samples ):
+ self.home()
+ self.visit_url( "%s/requests/list?sort=-create_time&operation=show_request&id=%s" % ( self.url, self.security.encode_id( request_id ) ))
+ self.check_page_for_string( 'Sequencing Request "%s"' % request_name )
+ for sample_index, sample in enumerate(samples):
+ tc.submit( "add_sample_button" )
+ sample_name, fields = sample
+ tc.fv( "1", "sample_%i_name" % sample_index, sample_name )
+ for field_index, field_value in enumerate(fields):
+ tc.fv( "1", "sample_%i_field_%i" % ( sample_index, field_index ), field_value )
+ tc.submit( "save_samples_button" )
+ def submit_request( self, request_id, request_name ):
+ self.home()
+ self.visit_url( "%s/requests/submit_request?id=%i" % ( self.url, request_id ))
+ self.check_page_for_string( 'The request <b>%s</b> has been submitted.' % request_name )
+ def add_bar_codes( self, request_id, request_name, bar_codes ):
+ self.home()
+ self.visit_url( "%s/requests_admin/bar_codes?request_id=%i" % (self.url, request_id) )
+ self.check_page_for_string( 'Bar codes for Samples of Request "%s"' % request_name )
+ for index, bar_code in enumerate(bar_codes):
+ tc.fv( "1", "sample_%i_bar_code" % index, bar_code )
+ tc.submit( "save_bar_codes" )
+ def change_sample_state( self, sample_name, sample_id, new_state_id, comment='' ):
+ self.home()
+ self.visit_url( "%s/requests_admin/show_events?sample_id=%i" % (self.url, sample_id) )
+ self.check_page_for_string( 'Events for Sample "%s"' % sample_name )
+ tc.fv( "1", "select_state", str(new_state_id) )
+ tc.fv( "1", "comment", comment )
+ tc.submit( "add_event_button" )
+ # Address stuff
+ def create_address( self, address ):
+ self.home()
+ self.visit_url( "%s/user/new_address" % self.url )
+ self.check_page_for_string( 'New address' )
+ for name, value in address.iteritems():
+ tc.fv( "1", name, value )
+ tc.submit( "Save_button" )
# Library stuff
def create_library( self, name='Library One', description='This is Library One' ):
"""Create a new library"""
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/98fa3b9cd980
changeset: 2586:98fa3b9cd980
user: rc
date: Thu Aug 20 10:46:52 2009 -0400
description:
Added functional tests for requests & forms
LIMS features:
- admin can now edit other requests
- admin can now submit requests on behalf of other users
11 file(s) affected in this change:
lib/galaxy/model/migrate/versions/0009_request_table.py
lib/galaxy/web/controllers/library.py
lib/galaxy/web/controllers/requests.py
lib/galaxy/web/controllers/requests_admin.py
templates/admin/requests/edit_request.mako
templates/admin/requests/grid.mako
templates/admin/requests/new_request.mako
templates/admin/requests/show_request.mako
templates/requests/grid.mako
templates/requests/show_request.mako
test/functional/test_forms_and_requests.py
diffs (1566 lines):
diff -r e66e1e99183c -r 98fa3b9cd980 lib/galaxy/model/migrate/versions/0009_request_table.py
--- a/lib/galaxy/model/migrate/versions/0009_request_table.py Fri Aug 14 15:47:13 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0009_request_table.py Thu Aug 20 10:46:52 2009 -0400
@@ -9,6 +9,7 @@
from migrate.changeset import *
import sys, logging
from galaxy.model.custom_types import *
+from sqlalchemy.exceptions import *
log = logging.getLogger( __name__ )
log.setLevel(logging.DEBUG)
diff -r e66e1e99183c -r 98fa3b9cd980 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Fri Aug 14 15:47:13 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Thu Aug 20 10:46:52 2009 -0400
@@ -1161,3 +1161,13 @@
edit_info=True,
msg=util.sanitize_text( msg ),
messagetype='done' ) )
+
+
+def get_authorized_libs(trans, user):
+ all_libraries = trans.app.model.Library.filter(trans.app.model.Library.table.c.deleted == False).order_by(trans.app.model.Library.name).all()
+ authorized_libraries = []
+ for library in all_libraries:
+ if trans.app.security_agent.allow_action(user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library) \
+ or trans.app.security_agent.allow_action(user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library):
+ authorized_libraries.append(library)
+ return authorized_libraries
\ No newline at end of file
diff -r e66e1e99183c -r 98fa3b9cd980 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Fri Aug 14 15:47:13 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Thu Aug 20 10:46:52 2009 -0400
@@ -9,6 +9,7 @@
from datetime import datetime, timedelta
from cgi import escape, FieldStorage
from galaxy.web.controllers.forms import get_form_widgets
+from galaxy.web.controllers.library import get_authorized_libs
log = logging.getLogger( __name__ )
@@ -66,15 +67,7 @@
@web.require_login( "create/submit sequencing requests" )
def index( self, trans ):
return trans.fill_template( "requests/index.mako" )
-
- def get_authorized_libs(self, trans):
- all_libraries = trans.app.model.Library.filter(trans.app.model.Library.table.c.deleted == False).order_by(trans.app.model.Library.name).all()
- authorized_libraries = []
- for library in all_libraries:
- if trans.app.security_agent.allow_action(trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library) \
- or trans.app.security_agent.allow_action(trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=library):
- authorized_libraries.append(library)
- return authorized_libraries
+
@web.expose
@web.require_login( "create/submit sequencing requests" )
def list( self, trans, **kwargs ):
@@ -140,7 +133,6 @@
Shows the request details
'''
request = trans.app.model.Request.get(id)
- libraries = self.get_authorized_libs(trans)
# list of widgets to be rendered on the request form
request_details = []
# main details
@@ -424,8 +416,8 @@
if params.get('create_request_button', False) == 'Save':
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
- msg=msg ,
- messagetype='done') )
+ message=msg ,
+ status='done') )
elif params.get('create_request_samples_button', False) == 'Add samples':
new_kwd = {}
new_kwd['id'] = trans.security.encode_id(request.id)
@@ -468,7 +460,7 @@
helptext='(Optional)'))
# libraries selectbox
- libraries = self.get_authorized_libs(trans)
+ libraries = get_authorized_libs(trans, trans.user)
libui = self.__library_ui(libraries, **kwd)
widgets = widgets + libui
widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], **kwd)
@@ -517,8 +509,8 @@
Validates the request entered by the user
'''
empty_fields = []
- if not request.library:
- empty_fields.append('Library')
+# if not request.library:
+# empty_fields.append('Library')
# check rest of the fields of the form
for index, field in enumerate(request.type.request_form.fields):
if field['required'] == 'required' and request.values.content[index] in ['', None]:
@@ -664,7 +656,7 @@
helptext='(Optional)'))
# libraries selectbox
- libraries = self.get_authorized_libs(trans)
+ libraries = get_authorized_libs(trans, trans.user)
libui = self.__library_ui(libraries, request, **kwd)
widgets = widgets + libui
widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, **kwd)
@@ -700,6 +692,8 @@
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
+ status='done',
+ message='The request <b>%s</b> has been deleted.' % request.name,
**kwd) )
def __undelete_request(self, trans, id):
try:
@@ -719,6 +713,8 @@
kwd['id'] = trans.security.encode_id(request.id)
return trans.response.send_redirect( web.url_for( controller='requests',
action='list',
+ status='done',
+ message='The request <b>%s</b> has been undeleted.' % request.name,
**kwd) )
def __submit(self, trans, id):
try:
diff -r e66e1e99183c -r 98fa3b9cd980 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Fri Aug 14 15:47:13 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Thu Aug 20 10:46:52 2009 -0400
@@ -7,19 +7,20 @@
import logging, tempfile, zipfile, tarfile, os, sys
from galaxy.web.form_builder import *
from datetime import datetime, timedelta
+from galaxy.web.controllers.forms import get_form_widgets
+from galaxy.web.controllers.library import get_authorized_libs
log = logging.getLogger( __name__ )
-
-# States for passing messages
-SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
class RequestsListGrid( grids.Grid ):
title = "Sequencing Requests"
model_class = model.Request
default_sort_key = "-create_time"
+ show_filter = model.Request.states.SUBMITTED
columns = [
grids.GridColumn( "Name", key="name",
- link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) )),
+ link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ),
+ attach_popup=True ),
grids.GridColumn( "Description", key="desc"),
grids.GridColumn( "Sample(s)", method='number_of_samples',
link=( lambda item: iff( item.deleted, None, dict( operation="show_request", id=item.id ) ) ), ),
@@ -30,15 +31,18 @@
]
operations = [
-# grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
-# grids.GridOperation( "Samples", allow_multiple=False, condition=( lambda item: not item.deleted ) ),
-# grids.GridOperation( "Delete", condition=( lambda item: not item.deleted ) ),
-# grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
+ grids.GridOperation( "Submit", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() and item.samples ) ),
+ grids.GridOperation( "Edit", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() ) ),
+ grids.GridOperation( "Delete", allow_multiple=False, condition=( lambda item: not item.deleted and item.unsubmitted() ) ),
+ grids.GridOperation( "Undelete", condition=( lambda item: item.deleted ) ),
]
standard_filters = [
+ grids.GridColumnFilter( model.Request.states.UNSUBMITTED,
+ args=dict( state=model.Request.states.UNSUBMITTED, deleted=False ) ),
grids.GridColumnFilter( model.Request.states.SUBMITTED,
args=dict( state=model.Request.states.SUBMITTED, deleted=False ) ),
grids.GridColumnFilter( model.Request.states.COMPLETE, args=dict( state=model.Request.states.COMPLETE, deleted=False ) ),
+ grids.GridColumnFilter( "Deleted", args=dict( deleted=True ) ),
grids.GridColumnFilter( "All", args=dict( deleted=False ) )
]
def get_user(self, trans, request):
@@ -48,9 +52,9 @@
def get_request_type(self, trans, request):
request_type = trans.app.model.RequestType.get(request.request_type_id)
return request_type.name
- def apply_default_filter( self, trans, query ):
- return query.filter(or_(self.model_class.state==self.model_class.states.SUBMITTED,
- self.model_class.state==self.model_class.states.COMPLETE))
+# def apply_default_filter( self, trans, query ):
+# return query.filter(or_(self.model_class.state==self.model_class.states.SUBMITTED,
+# self.model_class.state==self.model_class.states.COMPLETE))
def number_of_samples(self, trans, request):
return str(len(request.samples))
@@ -75,14 +79,256 @@
if operation == "show_request":
id = trans.security.decode_id(kwargs['id'])
return self.__show_request(trans, id)
-
+ elif operation == "submit":
+ id = trans.security.decode_id(kwargs['id'])
+ return self.__submit(trans, id)
+ elif operation == "edit":
+ id = trans.security.decode_id(kwargs['id'])
+ return self.__edit_request(trans, id)
+ elif operation == "delete":
+ id = trans.security.decode_id(kwargs['id'])
+ return self.__delete_request(trans, id)
+ elif operation == "undelete":
+ id = trans.security.decode_id(kwargs['id'])
+ return self.__undelete_request(trans, id)
if 'show_filter' in kwargs.keys():
if kwargs['show_filter'] == 'All':
- self.request_grid.default_filter = dict(deleted=False)
+ self.request_grid.default_filter = {}
+ elif kwargs['show_filter'] == 'Deleted':
+ self.request_grid.default_filter = dict(deleted=True)
else:
- self.request_grid.default_filter = dict(state=kwargs['show_filter'], deleted=False)
+ self.request_grid.default_filter = dict(state=kwargs['show_filter'], deleted=False)
+ self.request_grid.show_filter = kwargs.get('show_filter', trans.app.model.Request.states.SUBMITTED)
# Render the list view
return self.request_grid( trans, template='/admin/requests/grid.mako', **kwargs )
+ @web.expose
+ @web.require_admin
+ def edit(self, trans, **kwd):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ try:
+ request = trans.app.model.Request.get(int(params.get('request_id', None)))
+ except:
+ return trans.response.send_redirect( web.url_for( controller='requests',
+ action='list',
+ status='error',
+ message="Invalid request ID",
+ **kwd) )
+ if params.get('show', False) == 'True':
+ return self.__edit_request(trans, request.id, **kwd)
+ elif params.get('save_changes_request_button', False) == 'Save changes' \
+ or params.get('edit_samples_button', False) == 'Edit samples':
+ request_type = trans.app.model.RequestType.get(int(params.select_request_type))
+ if not util.restore_text(params.get('name', '')):
+ msg = 'Please enter the <b>Name</b> of the request'
+ kwd['messagetype'] = 'error'
+ kwd['msg'] = msg
+ kwd['show'] = 'True'
+ return trans.response.send_redirect( web.url_for( controller='requests',
+ action='edit',
+ **kwd) )
+ request = self.__save_request(trans, request, **kwd)
+ msg = 'The changes made to the request named %s has been saved' % request.name
+ if params.get('save_changes_request_button', False) == 'Save changes':
+ return trans.response.send_redirect( web.url_for( controller='requests',
+ action='list',
+ message=msg ,
+ status='done') )
+ elif params.get('edit_samples_button', False) == 'Edit samples':
+ new_kwd = {}
+ new_kwd['request_id'] = request.id
+ new_kwd['edit_samples_button'] = 'Edit samples'
+ return trans.response.send_redirect( web.url_for( controller='requests',
+ action='show_request',
+ msg=msg ,
+ messagetype='done',
+ **new_kwd) )
+ elif params.get('refresh', False) == 'true':
+ return self.__edit_request(trans, request.id, **kwd)
+ def __edit_request(self, trans, id, **kwd):
+ try:
+ request = trans.app.model.Request.get(id)
+ except:
+ msg = "Invalid request ID"
+ log.warn( msg )
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message=msg) )
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ select_request_type = self.__select_request_type(trans, request.type.id)
+ # list of widgets to be rendered on the request form
+ widgets = []
+ if util.restore_text( params.get( 'name', '' ) ):
+ name = util.restore_text( params.get( 'name', '' ) )
+ else:
+ name = request.name
+ widgets.append(dict(label='Name',
+ widget=TextField('name', 40, name),
+ helptext='(Required)'))
+ if util.restore_text( params.get( 'desc', '' ) ):
+ desc = util.restore_text( params.get( 'desc', '' ) )
+ else:
+ desc = request.desc
+ widgets.append(dict(label='Description',
+ widget=TextField('desc', 40, desc),
+ helptext='(Optional)'))
+ # libraries selectbox
+ libraries = get_authorized_libs(trans, trans.user)
+ libui = self.__library_ui(libraries, request, **kwd)
+ widgets = widgets + libui
+ widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, **kwd)
+ return trans.fill_template( '/admin/requests/edit_request.mako',
+ select_request_type=select_request_type,
+ request_type=request.type,
+ request=request,
+ widgets=widgets,
+ msg=msg,
+ messagetype=messagetype)
+ return self.__show_request_form(trans)
+ def __delete_request(self, trans, id):
+ try:
+ request = trans.app.model.Request.get(id)
+ except:
+ msg = "Invalid request ID"
+ log.warn( msg )
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message=msg,
+ **kwd) )
+ # change request's submitted field
+ if not request.unsubmitted():
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message='This request cannot be deleted as it is already been submitted',
+ **kwd) )
+ request.deleted = True
+ request.flush()
+ kwd = {}
+ kwd['id'] = trans.security.encode_id(request.id)
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='done',
+ message='The request <b>%s</b> has been deleted.' % request.name,
+ **kwd) )
+ def __undelete_request(self, trans, id):
+ try:
+ request = trans.app.model.Request.get(id)
+ except:
+ msg = "Invalid request ID"
+ log.warn( msg )
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message=msg,
+ **kwd) )
+ # change request's submitted field
+ request.deleted = False
+ request.flush()
+ kwd = {}
+ kwd['id'] = trans.security.encode_id(request.id)
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='done',
+ message='The request <b>%s</b> has been undeleted.' % request.name,
+ **kwd) )
+ def __submit(self, trans, id):
+ try:
+ request = trans.app.model.Request.get(id)
+ except:
+ msg = "Invalid request ID"
+ log.warn( msg )
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message=msg,
+ **kwd) )
+ msg = self.__validate(trans, request)
+ if msg:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='edit',
+ messagetype = 'error',
+ msg=msg,
+ request_id=request.id,
+ show='True') )
+ # get the new state
+ new_state = request.type.states[0]
+ for s in request.samples:
+ event = trans.app.model.SampleEvent(s, new_state, 'Samples submitted to the system')
+ event.flush()
+ # change request's submitted field
+ request.state = request.states.SUBMITTED
+ request.flush()
+ kwd = {}
+ kwd['id'] = trans.security.encode_id(request.id)
+ kwd['status'] = 'done'
+ kwd['message'] = 'The request <b>%s</b> has been submitted.' % request.name
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ show_filter=trans.app.model.Request.states.SUBMITTED,
+ **kwd) )
+ @web.expose
+ @web.require_admin
+ def submit_request(self, trans, **kwd):
+ params = util.Params( kwd )
+ try:
+ id = int(params.get('id', False))
+ request = trans.app.model.Request.get(id)
+ except:
+ msg = "Invalid request ID"
+ log.warn( msg )
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message=msg,
+ **kwd) )
+ msg = self.__validate(trans, request)
+ if msg:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='edit',
+ messagetype = 'error',
+ msg=msg,
+ request_id=request.id,
+ show='True') )
+ # get the new state
+ new_state = request.type.states[0]
+ for s in request.samples:
+ event = trans.app.model.SampleEvent(s, new_state, 'Samples submitted to the system')
+ event.flush()
+ # change request's submitted field
+ request.state = request.states.SUBMITTED
+ request.flush()
+ kwd['id'] = trans.security.encode_id(request.id)
+ kwd['status'] = 'done'
+ kwd['message'] = 'The request <b>%s</b> has been submitted.' % request.name
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ show_filter=trans.app.model.Request.states.SUBMITTED,
+ **kwd) )
+ def __copy_sample(self):
+ copy_list = SelectField('copy_sample')
+ copy_list.add_option('None', -1, selected=True)
+ for i, s in enumerate(self.current_samples):
+ copy_list.add_option(s[0], i)
+ return copy_list
+ def __update_samples(self, request, **kwd):
+ params = util.Params( kwd )
+ num_samples = len(self.current_samples)
+ self.current_samples = []
+ for s in request.samples:
+ self.current_samples.append([s.name, s.values.content])
+ for index in range(num_samples-len(request.samples)):
+ sample_index = index + len(request.samples)
+ sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
+ sample_values = []
+ for field_index in range(len(request.type.sample_form.fields)):
+ sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) ))
+ self.current_samples.append([sample_name, sample_values])
def __show_request(self, trans, id):
try:
request = trans.app.model.Request.get(id)
@@ -93,6 +339,7 @@
message="Invalid request ID",
**kwd) )
self.current_samples = []
+ self.edit_mode = False
for s in request.samples:
self.current_samples.append([s.name, s.values.content])
self.details_state = 'Show request details'
@@ -100,8 +347,160 @@
request=request,
request_details=self.request_details(trans, id),
current_samples = self.current_samples,
- details_state=self.details_state)
+ sample_copy=self.__copy_sample(),
+ details_state=self.details_state,
+ edit_mode=self.edit_mode)
@web.expose
+ @web.require_admin
+ def show_request(self, trans, **kwd):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ try:
+ request = trans.app.model.Request.get(int(params.get('request_id', None)))
+ except:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message="Invalid request ID",
+ **kwd) )
+ if params.get('import_samples_button', False) == 'Import samples':
+ try:
+ file_obj = params.get('file_data', '')
+ import csv
+ reader = csv.reader(file_obj.file)
+ for row in reader:
+ self.current_samples.append([row[0], row[1:]])
+ return trans.fill_template( '/admin/requests/show_request.mako',
+ request=request,
+ request_details=self.request_details(trans, request.id),
+ current_samples=self.current_samples,
+ sample_copy=self.__copy_sample(),
+ details_state=self.details_state,
+ edit_mode=self.edit_mode)
+ except:
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ status='error',
+ message='Error in importing <b>%s</b> samples file' % file_obj.file,
+ **kwd))
+ elif params.get('add_sample_button', False) == 'Add New':
+ # save the all (saved+unsaved) sample info in 'current_samples'
+ self.__update_samples(request, **kwd)
+ # add an empty or filled sample
+ # if the user has selected a sample no. to copy then copy the contents
+ # of the src sample to the new sample else an empty sample
+ src_sample_index = int(params.get( 'copy_sample', -1 ))
+ if src_sample_index == -1:
+ # empty sample
+ self.current_samples.append(['Sample_%i' % (len(self.current_samples)+1),['' for field in request.type.sample_form.fields]])
+ else:
+ self.current_samples.append([self.current_samples[src_sample_index][0]+'_%i' % (len(self.current_samples)+1),
+ [val for val in self.current_samples[src_sample_index][1]]])
+ return trans.fill_template( '/admin/requests/show_request.mako',
+ request=request,
+ request_details=self.request_details(trans, request.id),
+ current_samples=self.current_samples,
+ sample_copy=self.__copy_sample(),
+ details_state=self.details_state,
+ edit_mode=self.edit_mode)
+ elif params.get('save_samples_button', False) == 'Save':
+ # update current_samples
+ self.__update_samples(request, **kwd)
+ # check for duplicate sample names
+ msg = ''
+ for index in range(len(self.current_samples)-len(request.samples)):
+ sample_index = index + len(request.samples)
+ sample_name = self.current_samples[sample_index][0]
+ if not sample_name.strip():
+ msg = 'Please enter the name of sample number %i' % sample_index
+ break
+ count = 0
+ for i in range(len(self.current_samples)):
+ if sample_name == self.current_samples[i][0]:
+ count = count + 1
+ if count > 1:
+ msg = "This request has <b>%i</b> samples with the name <b>%s</b>.\nSamples belonging to a request must have unique names." % (count, sample_name)
+ break
+ if msg:
+ return trans.fill_template( '/admin/requests/show_request.mako',
+ request=request,
+ request_details=self.request_details(trans, request.id),
+ current_samples = self.current_samples,
+ sample_copy=self.__copy_sample(), details_state=self.details_state,
+ messagetype='error', msg=msg)
+ # save all the new/unsaved samples entered by the user
+ if not self.edit_mode:
+ for index in range(len(self.current_samples)-len(request.samples)):
+ sample_index = index + len(request.samples)
+ sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
+ sample_values = []
+ for field_index in range(len(request.type.sample_form.fields)):
+ sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) ))
+ form_values = trans.app.model.FormValues(request.type.sample_form, sample_values)
+ form_values.flush()
+ s = trans.app.model.Sample(sample_name, '', request, form_values)
+ s.flush()
+ else:
+ for index in range(len(self.current_samples)):
+ sample_index = index
+ sample_name = self.current_samples[sample_index][0]
+ new_sample_name = util.restore_text( params.get( 'sample_%i_name' % sample_index, '' ) )
+ sample_values = []
+ for field_index in range(len(request.type.sample_form.fields)):
+ sample_values.append(util.restore_text( params.get( 'sample_%i_field_%i' % (sample_index, field_index), '' ) ))
+ sample = request.has_sample(sample_name)
+ if sample:
+ form_values = trans.app.model.FormValues.get(sample.values.id)
+ form_values.content = sample_values
+ form_values.flush()
+ sample.name = new_sample_name
+ sample.flush()
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ operation='show_request',
+ id=trans.security.encode_id(request.id)) )
+ elif params.get('edit_samples_button', False) == 'Edit samples':
+ self.edit_mode = True
+ return trans.fill_template( '/admin/requests/show_request.mako',
+ request=request,
+ request_details=self.request_details(trans, request.id),
+ current_samples=self.current_samples,
+ sample_copy=self.__copy_sample(),
+ details_state=self.details_state,
+ edit_mode=self.edit_mode)
+ elif params.get('cancel_changes_button', False) == 'Cancel':
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ operation='show_request',
+ id=trans.security.encode_id(request.id)) )
+
+
+ @web.expose
+ @web.require_admin
+ def delete_sample(self, trans, **kwd):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ request = trans.app.model.Request.get(int(params.get('request_id', 0)))
+ sample_index = int(params.get('sample_id', 0))
+ sample_name = self.current_samples[sample_index][0]
+ s = request.has_sample(sample_name)
+ if s:
+ s.delete()
+ s.flush()
+ request.flush()
+ del self.current_samples[sample_index]
+ return trans.fill_template( '/admin/requests/show_request.mako',
+ request=request,
+ request_details=self.request_details(trans, request.id),
+ current_samples = self.current_samples,
+ sample_copy=self.__copy_sample(),
+ details_state=self.details_state,
+ edit_mode=self.edit_mode)
+
+ @web.expose
+ @web.require_admin
def toggle_request_details(self, trans, **kwd):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -144,10 +543,16 @@
value=str(request.user.email),
helptext=''))
# library associated
- request_details.append(dict(label='Library',
- value=trans.app.model.Library.get(request.library_id).name,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ if request.library:
+ request_details.append(dict(label='Library',
+ value=request.library.name,
+ helptext='Associated library where the resultant \
+ dataset will be stored'))
+ else:
+ request_details.append(dict(label='Library',
+ value=None,
+ helptext='Associated library where the resultant \
+ dataset will be stored'))
# form fields
for index, field in enumerate(request.type.request_form.fields):
if field['required']:
@@ -168,6 +573,253 @@
value=request.values.content[index],
helptext=field['helptext']+' ('+req+')'))
return request_details
+
+ def __select_request_type(self, trans, rtid):
+ rt_ids = ['none']
+ for rt in trans.app.model.RequestType.query().all():
+ if not rt.deleted:
+ rt_ids.append(str(rt.id))
+ select_reqtype = SelectField('select_request_type',
+ refresh_on_change=True,
+ refresh_on_change_values=rt_ids[1:])
+ if rtid == 'none':
+ select_reqtype.add_option('Select one', 'none', selected=True)
+ else:
+ select_reqtype.add_option('Select one', 'none')
+ for rt in trans.app.model.RequestType.query().all():
+ if not rt.deleted:
+ if rtid == rt.id:
+ select_reqtype.add_option(rt.name, rt.id, selected=True)
+ else:
+ select_reqtype.add_option(rt.name, rt.id)
+ return select_reqtype
+ @web.expose
+ @web.require_admin
+ def new(self, trans, **kwd):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ if params.get('select_request_type', False) == 'True':
+ return trans.fill_template( '/admin/requests/new_request.mako',
+ select_request_type=self.__select_request_type(trans, 'none'),
+ widgets=[],
+ msg=msg,
+ messagetype=messagetype)
+ elif params.get('create', False) == 'True':
+ if params.get('create_request_button', False) == 'Save' \
+ or params.get('create_request_samples_button', False) == 'Add samples':
+ request_type = trans.app.model.RequestType.get(int(params.select_request_type))
+ if not util.restore_text(params.get('name', '')) \
+ or util.restore_text(params.get('select_user', '')) == unicode('none'):
+ msg = 'Please enter the <b>Name</b> of the request and the <b>user</b> on behalf of whom this request will be submitted before saving this request'
+ kwd['create'] = 'True'
+ kwd['messagetype'] = 'error'
+ kwd['msg'] = msg
+ kwd['create_request_button'] = None
+ kwd['create_request_samples_button'] = None
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='new',
+ **kwd) )
+ request = self.__save_request(trans, None, **kwd)
+ msg = 'The new request named %s has been created' % request.name
+ if params.get('create_request_button', False) == 'Save':
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ show_filter=trans.app.model.Request.states.UNSUBMITTED,
+ message=msg ,
+ status='done') )
+ elif params.get('create_request_samples_button', False) == 'Add samples':
+ new_kwd = {}
+ new_kwd['id'] = trans.security.encode_id(request.id)
+ new_kwd['operation'] = 'show_request'
+ new_kwd['add_sample'] = True
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
+ action='list',
+ message=msg ,
+ status='done',
+ **new_kwd) )
+ else:
+ return self.__show_request_form(trans, **kwd)
+ elif params.get('refresh', False) == 'true':
+ return self.__show_request_form(trans, **kwd)
+ def __show_request_form(self, trans, **kwd):
+ params = util.Params( kwd )
+ msg = util.restore_text( params.get( 'msg', '' ) )
+ messagetype = params.get( 'messagetype', 'done' )
+ try:
+ request_type = trans.app.model.RequestType.get(int(params.select_request_type))
+ except:
+ return trans.fill_template( '/admin/requests/new_request.mako',
+ select_request_type=self.__select_request_type(trans, 'none'),
+ widgets=[],
+ msg=msg,
+ messagetype=messagetype)
+ form_values = None
+ select_request_type = self.__select_request_type(trans, request_type.id)
+ # user
+ user_id = params.get( 'select_user', 'none' )
+ try:
+ user = trans.app.model.User.get(int(user_id))
+ except:
+ user = None
+ # list of widgets to be rendered on the request form
+ widgets = []
+ widgets.append(dict(label='Select user',
+ widget=self.__select_user(trans, user_id),
+ helptext='The request would be submitted on behalf of this user (Required)'))
+ widgets.append(dict(label='Name',
+ widget=TextField('name', 40,
+ util.restore_text( params.get( 'name', '' ) )),
+ helptext='(Required)'))
+ widgets.append(dict(label='Description',
+ widget=TextField('desc', 40,
+ util.restore_text( params.get( 'desc', '' ) )),
+ helptext='(Optional)'))
+ # libraries selectbox
+ if not user:
+ libraries = []
+ else:
+ libraries = get_authorized_libs(trans, user)
+ libui = self.__library_ui(libraries, **kwd)
+ widgets = widgets + libui
+ widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], **kwd)
+ return trans.fill_template( '/admin/requests/new_request.mako',
+ select_request_type=select_request_type,
+ request_type=request_type,
+ widgets=widgets,
+ msg=msg,
+ messagetype=messagetype)
+ def __select_user(self, trans, userid):
+ user_ids = ['none']
+ for user in trans.app.model.User.query().all():
+ if not user.deleted:
+ user_ids.append(str(user.id))
+ select_user = SelectField('select_user',
+ refresh_on_change=True,
+ refresh_on_change_values=user_ids[1:])
+ if userid == 'none':
+ select_user.add_option('Select one', 'none', selected=True)
+ else:
+ select_user.add_option('Select one', 'none')
+ for user in trans.app.model.User.query().all():
+ if not user.deleted:
+ if userid == str(user.id):
+ select_user.add_option(user.email, user.id, selected=True)
+ else:
+ select_user.add_option(user.email, user.id)
+ return select_user
+
+ def __library_ui(self, libraries, request=None, **kwd):
+ params = util.Params( kwd )
+ lib_id = params.get( 'library_id', 'none' )
+ lib_list = SelectField('library_id', refresh_on_change=True,
+ refresh_on_change_values=['new'])
+ if request and lib_id == 'none':
+ if request.library:
+ lib_id = str(request.library.id)
+ if lib_id == 'none':
+ lib_list.add_option('Select one', 'none', selected=True)
+ else:
+ lib_list.add_option('Select one', 'none')
+ for lib in libraries:
+ if str(lib.id) == lib_id:
+ lib_list.add_option(lib.name, lib.id, selected=True)
+ else:
+ lib_list.add_option(lib.name, lib.id)
+ if lib_id == 'new':
+ lib_list.add_option('Create a new library', 'new', selected=True)
+ else:
+ lib_list.add_option('Create a new library', 'new')
+ widget = dict(label='Library',
+ widget=lib_list,
+ helptext='Associated library where the resultant \
+ dataset will be stored.')
+ if lib_id == 'new':
+ new_lib = dict(label='Create a new Library',
+ widget=TextField('new_library_name', 40,
+ util.restore_text( params.get( 'new_library_name', '' ) )),
+ helptext='Enter a library name here to request a new library')
+ return [widget, new_lib]
+ else:
+ return [widget]
+ def __validate(self, trans, request):
+ '''
+ Validates the request entered by the user
+ '''
+ empty_fields = []
+# if not request.library:
+# empty_fields.append('Library')
+ # check rest of the fields of the form
+ for index, field in enumerate(request.type.request_form.fields):
+ if field['required'] == 'required' and request.values.content[index] in ['', None]:
+ empty_fields.append(field['label'])
+ if empty_fields:
+ msg = 'Fill the following fields of the request <b>%s</b> before submitting<br/>' % request.name
+ for ef in empty_fields:
+ msg = msg + '<b>' +ef + '</b><br/>'
+ return msg
+ return None
+ def __save_request(self, trans, request=None, **kwd):
+ '''
+ This method saves a new request if request_id is None.
+ '''
+ params = util.Params( kwd )
+ request_type = trans.app.model.RequestType.get(int(params.select_request_type))
+ if request:
+ user = request.user
+ else:
+ user = trans.app.model.User.get(int(params.get('select_user', '')))
+ name = util.restore_text(params.get('name', ''))
+ desc = util.restore_text(params.get('desc', ''))
+ # library
+ try:
+ library = trans.app.model.Library.get(int(params.get('library_id', None)))
+ except:
+ library = None
+ # fields
+ values = []
+ for index, field in enumerate(request_type.request_form.fields):
+ if field['type'] == 'AddressField':
+ value = util.restore_text(params.get('field_%i' % index, ''))
+ if value == 'new':
+ # save this new address in the list of this user's addresses
+ user_address = trans.app.model.UserAddress( user=trans.user )
+ user_address.desc = util.restore_text(params.get('field_%i_short_desc' % index, ''))
+ user_address.name = util.restore_text(params.get('field_%i_name' % index, ''))
+ user_address.institution = util.restore_text(params.get('field_%i_institution' % index, ''))
+ user_address.address = util.restore_text(params.get('field_%i_address1' % index, ''))+' '+util.restore_text(params.get('field_%i_address2' % index, ''))
+ user_address.city = util.restore_text(params.get('field_%i_city' % index, ''))
+ user_address.state = util.restore_text(params.get('field_%i_state' % index, ''))
+ user_address.postal_code = util.restore_text(params.get('field_%i_postal_code' % index, ''))
+ user_address.country = util.restore_text(params.get('field_%i_country' % index, ''))
+ user_address.phone = util.restore_text(params.get('field_%i_phone' % index, ''))
+ user_address.flush()
+ trans.user.refresh()
+ values.append(int(user_address.id))
+ elif value == unicode('none'):
+ values.append('')
+ else:
+ values.append(int(value))
+ else:
+ values.append(util.restore_text(params.get('field_%i' % index, '')))
+ form_values = trans.app.model.FormValues(request_type.request_form, values)
+ form_values.flush()
+ if not request:
+ request = trans.app.model.Request(name, desc, request_type,
+ user, form_values,
+ library=library,
+ state=trans.app.model.Request.states.UNSUBMITTED)
+ request.flush()
+ else:
+ request.name = name
+ request.desc = desc
+ request.type = request_type
+ request.user = user
+ request.values = form_values
+ request.library = library
+ request.state = trans.app.model.Request.states.UNSUBMITTED
+ request.flush()
+ return request
@web.expose
@web.require_admin
def bar_codes(self, trans, **kwd):
@@ -178,7 +830,7 @@
if request_id:
request = trans.app.model.Request.get( int( request_id ))
if not request:
- return trans.response.send_redirect( web.url_for( controller='requests',
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='list',
status='error',
message="Invalid request ID",
@@ -204,7 +856,7 @@
try:
request = trans.app.model.Request.get(int(params.get('request_id', None)))
except:
- return trans.response.send_redirect( web.url_for( controller='requests',
+ return trans.response.send_redirect( web.url_for( controller='requests_admin',
action='list',
status='error',
message="Invalid request ID",
diff -r e66e1e99183c -r 98fa3b9cd980 templates/admin/requests/edit_request.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/admin/requests/edit_request.mako Thu Aug 20 10:46:52 2009 -0400
@@ -0,0 +1,88 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if msg:
+ ${render_msg( msg, messagetype )}
+%endif
+
+<script type="text/javascript">
+$( function() {
+ $( "select[refresh_on_change='true']").change( function() {
+ var refresh = false;
+ var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+ if ( refresh_on_change_values ) {
+ refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+ var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+ for( i= 0; i < refresh_on_change_values.length; i++ ) {
+ if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+ refresh = true;
+ break;
+ }
+ }
+ }
+ else {
+ refresh = true;
+ }
+ if ( refresh ){
+ $( "#edit_request" ).submit();
+ }
+ });
+});
+</script>
+
+<br/>
+<br/>
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='list', operation='show_request', id=trans.security.encode_id(request.id) )}">
+ <span>Browse this request</span></a>
+ </li>
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='list')}">
+ <span>Browse requests</span></a>
+ </li>
+</ul>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Edit request "${request.name}" from ${request.user.email}</div>
+ %if len(select_request_type.options) == 1:
+ There are no request types created for a new request.
+ %else:
+ <div class="toolFormBody">
+ <form name="edit_request" id="edit_request" action="${h.url_for( controller='requests_admin', action='edit', request_id=request.id)}" method="post" >
+ <div class="form-row">
+ <label>
+ Select Request Type:
+ </label>
+ ${select_request_type.get_html()}
+ </div>
+
+ %if select_request_type.get_selected() != ('Select one', 'none'):
+ %for i, field in enumerate(widgets):
+ <div class="form-row">
+ <label>${field['label']}</label>
+ ${field['widget'].get_html()}
+ %if field['label'] == 'Library' and new_library:
+ ${new_library.get_html()}
+ %endif
+ <div class="toolParamHelp" style="clear: both;">
+ ${field['helptext']}
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ %endfor
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="hidden" name="refresh" value="true" size="40"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="save_changes_request_button" value="Save changes"/>
+ ##<input type="submit" name="edit_samples_button" value="Edit samples"/>
+ </div>
+ %endif
+ </form>
+ </div>
+</div>
+%endif
\ No newline at end of file
diff -r e66e1e99183c -r 98fa3b9cd980 templates/admin/requests/grid.mako
--- a/templates/admin/requests/grid.mako Fri Aug 14 15:47:13 2009 -0400
+++ b/templates/admin/requests/grid.mako Thu Aug 20 10:46:52 2009 -0400
@@ -82,23 +82,22 @@
%if i > 0:
<span>|</span>
%endif
- %if 'state' in grid.default_filter:
- %if grid.default_filter['state'] == filter.label:
- <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}"><b>${filter.label}</b></a></span>
- %else:
- <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}">${filter.label}</a></span>
- %endif
+ %if grid.show_filter == filter.label:
+ <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}"><b>${filter.label}</b></a></span>
%else:
- %if filter.label == 'All':
- <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}"><b>${filter.label}</b></a></span>
- %else:
- <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}">${filter.label}</a></span>
- %endif
+ <span class="filter"><a href="${h.url_for( controller='requests_admin', action='list', show_filter=filter.label )}">${filter.label}</a></span>
%endif
%endfor
%endif
</div>
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='new', select_request_type=True )}">
+ <img src="${h.url_for('/static/images/silk/add.png')}" />
+ <span>Create a new request</span></a>
+ </li>
+</ul>
%if not len(query.all()):
There are no request(s).
diff -r e66e1e99183c -r 98fa3b9cd980 templates/admin/requests/new_request.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/admin/requests/new_request.mako Thu Aug 20 10:46:52 2009 -0400
@@ -0,0 +1,84 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+
+%if msg:
+ ${render_msg( msg, messagetype )}
+%endif
+
+<script type="text/javascript">
+$( function() {
+ $( "select[refresh_on_change='true']").change( function() {
+ var refresh = false;
+ var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+ if ( refresh_on_change_values ) {
+ refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+ var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+ for( i= 0; i < refresh_on_change_values.length; i++ ) {
+ if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+ refresh = true;
+ break;
+ }
+ }
+ }
+ else {
+ refresh = true;
+ }
+ if ( refresh ){
+ $( "#new_request" ).submit();
+ }
+ });
+});
+</script>
+
+<br/>
+<br/>
+<ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='list')}">
+ <span>Browse requests</span></a>
+ </li>
+</ul>
+
+<div class="toolForm">
+ <div class="toolFormTitle">Add a new request</div>
+ %if len(select_request_type.options) == 1:
+ There are no request types created for a new request.
+ %else:
+ <div class="toolFormBody">
+ <form name="new_request" id="new_request" action="${h.url_for( controller='requests_admin', action='new', create=True )}" method="post" >
+ <div class="form-row">
+ <label>
+ Select Request Type
+ </label>
+ ${select_request_type.get_html()}
+ </div>
+
+ %if select_request_type.get_selected() != ('Select one', 'none'):
+ %for i, field in enumerate(widgets):
+ <div class="form-row">
+ <label>${field['label']}</label>
+ ${field['widget'].get_html()}
+ %if field['label'] == 'Library' and new_library:
+ ${new_library.get_html()}
+ %endif
+ <div class="toolParamHelp" style="clear: both;">
+ ${field['helptext']}
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ %endfor
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="hidden" name="refresh" value="true" size="40"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <input type="submit" name="create_request_button" value="Save"/>
+ <input type="submit" name="create_request_samples_button" value="Add samples"/>
+ </div>
+ %endif
+ </form>
+ </div>
+</div>
+%endif
\ No newline at end of file
diff -r e66e1e99183c -r 98fa3b9cd980 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Fri Aug 14 15:47:13 2009 -0400
+++ b/templates/admin/requests/show_request.mako Thu Aug 20 10:46:52 2009 -0400
@@ -12,18 +12,49 @@
</div>
<ul class="manage-table-actions">
- <li>
- <a class="action-button" href="${h.url_for( controller='requests_admin', action='bar_codes', request_id=request.id)}">
- <span>Bar codes</span></a>
- </li>
+ %if request.unsubmitted() and request.samples:
+ <li>
+ <a class="action-button" confirm="More samples cannot be added to this request once it is submitted. Click OK to submit." href="${h.url_for( controller='requests_admin', action='submit_request', id=request.id)}">
+ <span>Submit request</span></a>
+ </li>
+ %endif
+ %if request.submitted() and request.samples:
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='bar_codes', request_id=request.id)}">
+ <span>Bar codes</span></a>
+ </li>
+ %endif
</ul>
+
+<%def name="render_sample_form( index, sample_name, sample_values )">
+ <td>
+ <input type="text" name=sample_${index}_name value="${sample_name}" size="10"/>
+ <div class="toolParamHelp" style="clear: both;">
+ <i>${' (required)' }</i>
+ </div>
+ </td>
+ <td>
+ </td>
+ %for field_index, field in enumerate(request.type.sample_form.fields):
+ <td>
+ <input type="text" name=sample_${index}_field_${field_index} value="${sample_values[field_index]}" size="7"/>
+ <div class="toolParamHelp" style="clear: both;">
+ <i>${'('+field['required']+')' }</i>
+ </div>
+ </td>
+ %endfor
+</%def>
<%def name="render_sample( index, sample )">
<td>
${sample.name}
</td>
<td>
- <a href="${h.url_for( controller='requests_admin', action='show_events', sample_id=sample.id)}">${sample.current_state().name}</a>
+ %if sample.request.unsubmitted():
+ Unsubmitted
+ %else:
+ <a href="${h.url_for( controller='requests_admin', action='show_events', sample_id=sample.id)}">${sample.current_state().name}</a>
+ %endif
</td>
%for field_index, field in enumerate(request.type.sample_form.fields):
<td>
@@ -34,10 +65,10 @@
%endif
</td>
%endfor
+
</%def>
<div class="toolForm">
- ##<div class="toolFormTitle">Request Details: '${request_details[0]['value']}'</div>
<div class="form-row">
<a href="${h.url_for( controller='requests_admin', action='toggle_request_details', request_id=request.id )}">${details_state}</a>
</div>
@@ -57,13 +88,21 @@
</div>
<div style="clear: both"></div>
%endfor
+ <div class="form-row">
+ <ul class="manage-table-actions">
+ <li>
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='edit', show=True, request_id=request.id)}">
+ <span>Edit request details</span></a>
+ </li>
+ </ul>
+ </div>
%endif
</div>
</div>
<div class="toolForm">
##<div class="toolFormTitle">Samples (${len(request.samples)})</div>
- <form id="edit_form" name="edit_form" action="${h.url_for( controller='requests', action='show_request', request_id=request.id )}" method="post" >
+ <form id="edit_form" name="edit_form" action="${h.url_for( controller='requests_admin', action='show_request' )}" enctype="multipart/form-data" method="post" >
<div class="form-row">
%if current_samples:
<table class="grid">
@@ -80,22 +119,88 @@
</div>
</th>
%endfor
+ <th></th>
</tr>
<thead>
<tbody>
+ <%
+ request.refresh()
+ %>
%for sample_index, sample in enumerate(current_samples):
- <tr>
- <td>${sample_index+1}</td>
- ${render_sample( sample_index, request.samples[sample_index] )}
- </tr>
+ %if edit_mode:
+ <tr>
+ <td>${sample_index+1}</td>
+ ${render_sample_form( sample_index, sample[0], sample[1])}
+ </tr>
+ %else:
+ <tr>
+ <td>${sample_index+1}</td>
+ %if sample_index in range(len(request.samples)):
+ ${render_sample( sample_index, request.samples[sample_index] )}
+ %else:
+ ${render_sample_form( sample_index, sample[0], sample[1])}
+ %endif
+ <td>
+ %if request.unsubmitted():
+ <a class="action-button" href="${h.url_for( controller='requests_admin', action='delete_sample', request_id=request.id, sample_id=sample_index)}">
+ <img src="${h.url_for('/static/images/delete_icon.png')}" />
+ <span></span></a>
+ %endif
+ </td>
+ </tr>
+ %endif
%endfor
</tbody>
</table>
%else:
<label>There are no samples.</label>
%endif
-
</div>
- ##</div>
+ %if not edit_mode:
+ <table class="grid">
+ <tbody>
+ <tr>
+ <div class="form-row">
+ <td>
+ %if current_samples:
+ <input type="submit" name="edit_samples_button" value="Edit samples"/>
+ %endif
+ </td>
+ %if request.unsubmitted():
+ <td>
+ <label>Import from csv file</label>
+ <input type="file" name="file_data" />
+ <input type="submit" name="import_samples_button" value="Import samples"/>
+ </td>
+ <td>
+ %if current_samples:
+ <label>Copy from sample</label>
+ ${sample_copy.get_html()}
+ %endif
+ <input type="submit" name="add_sample_button" value="Add New"/>
+ </td>
+ %endif
+ </div>
+ </tr>
+ </tbody>
+ </table>
+ %endif
+ %if request.samples or current_samples:
+ <div class="form-row">
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="hidden" name="refresh" value="true" size="40"/>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ %if edit_mode:
+ <input type="submit" name="save_samples_button" value="Save"/>
+ <input type="submit" name="cancel_changes_button" value="Cancel"/>
+ %elif request.unsubmitted():
+ <input type="submit" name="save_samples_button" value="Save"/>
+ %endif
+ </div>
+ %endif
+ <input type="hidden" name="request_id" value="${request.id}" />
</form>
</div>
diff -r e66e1e99183c -r 98fa3b9cd980 templates/requests/grid.mako
--- a/templates/requests/grid.mako Fri Aug 14 15:47:13 2009 -0400
+++ b/templates/requests/grid.mako Thu Aug 20 10:46:52 2009 -0400
@@ -95,7 +95,7 @@
<li>
<a class="action-button" href="${h.url_for( controller='requests', action='new', select_request_type=True )}">
<img src="${h.url_for('/static/images/silk/add.png')}" />
- <span>New request</span></a>
+ <span>Create a new request</span></a>
</li>
</ul>
diff -r e66e1e99183c -r 98fa3b9cd980 templates/requests/show_request.mako
--- a/templates/requests/show_request.mako Fri Aug 14 15:47:13 2009 -0400
+++ b/templates/requests/show_request.mako Thu Aug 20 10:46:52 2009 -0400
@@ -68,7 +68,6 @@
</%def>
<div class="toolForm">
- ##<div class="toolFormTitle">Request Details: '${request_details[0]['value']}'</div>
<div class="form-row">
<a href="${h.url_for( controller='requests', action='toggle_request_details', request_id=request.id )}">${details_state}</a>
</div>
@@ -217,7 +216,6 @@
%endif
</div>
%endif
- ##</div>
- <input type="hidden" name="request_id" value="${request.id}" />
+ <input type="hidden" name="request_id" value="${request.id}" />
</form>
</div>
diff -r e66e1e99183c -r 98fa3b9cd980 test/functional/test_forms_and_requests.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/functional/test_forms_and_requests.py Thu Aug 20 10:46:52 2009 -0400
@@ -0,0 +1,212 @@
+import galaxy.model
+from galaxy.model.orm import *
+from base.twilltestcase import *
+
+not_logged_in_as_admin_security_msg = 'You must be logged in as an administrator to access this feature.'
+logged_in_as_admin_security_msg = 'You must be an administrator to access this feature.'
+not_logged_in_security_msg = 'You must be logged in to create/submit sequencing requests'
+form_one_name = "Request Form"
+form_two_name = "Sample Form"
+request_type_name = 'Test Requestype'
+sample_states = [ ( 'New', 'Sample entered into the system' ),
+ ( 'Received', 'Sample tube received' ),
+ ( 'Done', 'Sequence run complete' ) ]
+address1 = dict( short_desc="Office",
+ name="James Bond",
+ institution="MI6" ,
+ address1="MI6 Headquaters",
+ address2="",
+ city="London",
+ state="London",
+ postal_code="007",
+ country="United Kingdom",
+ phone="007-007-0007" )
+
+
+def get_latest_form(form_name):
+ fdc_list = galaxy.model.FormDefinitionCurrent.filter( galaxy.model.FormDefinitionCurrent.table.c.deleted==False )\
+ .order_by( galaxy.model.FormDefinitionCurrent.table.c.create_time.desc() )
+ for fdc in fdc_list:
+ if form_name == fdc.latest_form.name:
+ return fdc.latest_form
+ return None
+
+
+class TestFormsAndRequests( TwillTestCase ):
+ def test_000_create_form( self ):
+ """Testing creating a new form and editing it"""
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ # create a form
+ global form_one_name
+ name = form_one_name
+ desc = "This is Form One's description"
+ self.create_form( name=name, desc=desc )
+ self.home()
+ self.visit_page( 'forms/manage' )
+ self.check_page_for_string( name )
+ self.check_page_for_string( desc )
+ # Get the form_definition object for later tests
+ form_one = galaxy.model.FormDefinition.filter( and_( galaxy.model.FormDefinition.table.c.name==name,
+ galaxy.model.FormDefinition.table.c.desc==desc ) ).all()[-1]
+ assert form_one is not None, 'Problem retrieving form named "%s" from the database' % name
+ # edit form & add few more fields
+ new_name = "Request Form (Renamed)"
+ new_desc = "This is Form One's Re-described"
+ self.edit_form( form_one.id, form_one.name, new_form_name=new_name, new_form_desc=new_desc )
+ self.home()
+ self.visit_page( 'forms/manage' )
+ self.check_page_for_string( new_name )
+ self.check_page_for_string( new_desc )
+ form_one_name = new_name
+ def test_005_add_form_fields( self ):
+ """Testing adding fields to a form definition"""
+ fields = [dict(name='Test field name one',
+ desc='Test field description one',
+ type='TextField',
+ required='required'),
+ dict(name='Test field name two',
+ desc='Test field description two',
+ type='AddressField',
+ required='optional')]
+ form_one = get_latest_form(form_one_name)
+ self.form_add_field(form_one.id, form_one.name, field_index=len(form_one.fields), fields=fields)
+ form_one_latest = get_latest_form(form_one_name)
+ assert len(form_one_latest.fields) == len(form_one.fields)+len(fields)
+#This following test has been commented out as it is causing:
+#TwillException: multiple matches to "remove_button"
+# def test_010_remove_form_fields( self ):
+# """Testing removing fields from a form definition"""
+# form_one = get_latest_form(form_one_name)
+# self.form_remove_field( form_one.id, form_one.name, 'Test field name one' )
+# form_one_latest = get_latest_form(form_one_name)
+# assert len(form_one_latest.fields) == len(form_one.fields)-1
+ def test_015_create_sample_form( self ):
+ """Testing creating another form (for samples)"""
+ global form_two_name
+ name = form_two_name
+ desc = "This is Form One's description"
+ self.create_form( name=name, desc=desc )
+ self.home()
+ self.visit_page( 'forms/manage' )
+ self.check_page_for_string( name )
+ self.check_page_for_string( desc )
+ def test_020_create_request_type( self ):
+ """Testing creating a new requestype"""
+ request_form = get_latest_form(form_one_name)
+ sample_form = get_latest_form(form_two_name)
+ self.create_request_type(request_type_name, "test request type",
+ str(request_form.id), str(sample_form.id), sample_states )
+ global request_type
+ request_type = galaxy.model.RequestType.filter( and_( galaxy.model.RequestType.table.c.name==request_type_name ) ).all()[-1]
+ assert request_type is not None, 'Problem retrieving request type named "%s" from the database' % request_type_name
+ def test_025_create_address( self ):
+ """Testing address creation"""
+ #self.create_address( user_address1 )
+ #self.check_page_for_string( 'Address <b>%s</b> has been added' % user_address1[ 'short_desc' ] )
+ ## TODO: FIX HACK
+ ## the user address creation should be done as a test.
+ global user_address
+ user_address = galaxy.model.UserAddress()
+ user_address.user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
+ user_address.desc = address1[ 'short_desc' ]
+ user_address.name = address1[ 'name' ]
+ user_address.institution = address1[ 'institution' ]
+ user_address.address = address1[ 'address1' ]+' '+address1[ 'address2' ]
+ user_address.city = address1[ 'city' ]
+ user_address.state = address1[ 'state' ]
+ user_address.postal_code = address1[ 'postal_code' ]
+ user_address.country = address1[ 'country' ]
+ user_address.phone = address1[ 'phone' ]
+ user_address.flush()
+ user_address.user.refresh()
+ def test_030_create_request( self ):
+ """Testing creating and submitting a request"""
+ # first create a library for the request so that it can be submitted later
+ lib_name = 'TestLib001'
+ self.create_library( lib_name, '' )
+ self.visit_page( 'admin/browse_libraries' )
+ self.check_page_for_string( lib_name )
+ # Get the library object for later tests
+ global library_one
+ library_one = galaxy.model.Library.filter( and_( galaxy.model.Library.table.c.name==lib_name,
+ galaxy.model.Library.table.c.deleted==False ) ).first()
+ assert library_one is not None, 'Problem retrieving library named "%s" from the database' % lib_name
+ global admin_user
+ admin_user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ # Get the admin user's private role for later use
+ global admin_user_private_role
+ admin_user_private_role = None
+ for role in admin_user.all_roles():
+ if role.name == admin_user.email and role.description == 'Private Role for %s' % admin_user.email:
+ admin_user_private_role = role
+ break
+ if not admin_user_private_role:
+ raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
+ # Set permissions on the library, sort for later testing
+ permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+ permissions_out = []
+ # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted to
+ # LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE for library items.
+ self.set_library_permissions( str( library_one.id ), library_one.name, str( admin_user_private_role.id ), permissions_in, permissions_out )
+ # set field values
+ fields = ['field one value', 'field two value', str(user_address.id)]
+ # create the request
+ request_name, request_desc = 'Request One', 'Request One Description'
+ self.create_request(request_type.id, request_name, request_desc, library_one.id, fields)
+ self.check_page_for_string( request_name )
+ self.check_page_for_string( request_desc )
+ global request_one
+ request_one = galaxy.model.Request.filter( and_( galaxy.model.Request.table.c.name==request_name,
+ galaxy.model.Request.table.c.deleted==False ) ).first()
+ # check if the request's state is now set to 'unsubmitted'
+ assert request_one.state is not request_one.states.UNSUBMITTED, "The state of the request '%s' should be set to '%s'" % ( request_one.name, request_one.states.UNSUBMITTED )
+
+ # sample fields
+ samples = [ ( 'Sample One', [ 'S1 Field 0 Value' ] ),
+ ( 'Sample Two', [ 'S2 Field 0 Value' ] ) ]
+ # add samples to this request
+ self.add_samples( request_one.id, request_one.name, samples )
+ for sample_name, fields in samples:
+ self.check_page_for_string( sample_name )
+ self.check_page_for_string( 'Unsubmitted' )
+ for field_value in fields:
+ self.check_page_for_string( field_value )
+ # submit the request
+ self.submit_request( request_one.id, request_one.name )
+ request_one.refresh()
+ # check if the request's state is now set to 'submitted'
+ assert request_one.state is not request_one.states.SUBMITTED, "The state of the request '%s' should be set to '%s'" % ( request_one.name, request_one.states.SUBMITTED )
+ def test_035_request_lifecycle( self ):
+ """Testing request lifecycle as it goes through all the states"""
+ # goto admin manage requests page
+ self.home()
+ self.visit_page( 'requests_admin/list' )
+ self.check_page_for_string( request_one.name )
+ self.visit_url( "%s/requests_admin/list?sort=-create_time&operation=show_request&id=%s" \
+ % ( self.url, self.security.encode_id( request_one.id ) ))
+ self.check_page_for_string( 'Sequencing Request "%s"' % request_one.name )
+ # set bar codes for the samples
+ bar_codes = [ '1234567890', '0987654321' ]
+ self.add_bar_codes( request_one.id, request_one.name, bar_codes )
+ self.check_page_for_string( 'Bar codes has been saved for this request' )
+ # change the states of all the samples of this request
+ for sample in request_one.samples:
+ self.change_sample_state( sample.name, sample.id, request_type.states[1].id )
+ self.check_page_for_string( request_type.states[1].name )
+ self.check_page_for_string( request_type.states[1].desc )
+ self.change_sample_state( sample.name, sample.id, request_type.states[2].id )
+ self.check_page_for_string( request_type.states[2].name )
+ self.check_page_for_string( request_type.states[2].desc )
+ self.home()
+ request_one.refresh()
+ # check if the request's state is now set to 'complete'
+ assert request_one.state is not request_one.states.COMPLETE, "The state of the request '%s' should be set to '%s'" % ( request_one.name, request_one.states.COMPLETE )
+
+
+
+
+
+
+
\ No newline at end of file
1
0
24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/5fa8803716fd
changeset: 2582:5fa8803716fd
user: James Taylor <james(a)jamestaylor.org>
date: Wed Aug 19 18:07:55 2009 -0400
description:
Tweak workflow save changes to use more specific '#right-content'
2 file(s) affected in this change:
static/scripts/galaxy.workflow_editor.canvas.js
templates/workflow/editor.mako
diffs (49 lines):
diff -r 3353b15d0fb5 -r 5fa8803716fd static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js Wed Aug 19 17:51:46 2009 -0400
+++ b/static/scripts/galaxy.workflow_editor.canvas.js Wed Aug 19 18:07:55 2009 -0400
@@ -439,25 +439,11 @@
});
});
},
- enable_auto_save : function() {
- // Implements auto-saving based on whether the inputs change. We consider
- // "changed" to be when a field is accessed and not necessarily modified
- // because of an issue where "onchange" is not triggered when activating
- // another node, or saving the workflow.
- outer_this = this;
- $(".toolFormBody").find("input,textarea,select").each( function() {
- $(this).focus( function() {
- outer_this.active_form_has_changes = true;
- });
- });
- },
check_changes_in_active_form : function() {
// If active form has changed, save it
if (this.active_form_has_changes) {
this.has_changes = true;
- $(".toolFormBody").find("form").each( function() {
- $(this).submit();
- });
+ $("#right-content").find("form").submit();
this.active_form_has_changes = false;
}
},
diff -r 3353b15d0fb5 -r 5fa8803716fd templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Wed Aug 19 17:51:46 2009 -0400
+++ b/templates/workflow/editor.mako Wed Aug 19 18:07:55 2009 -0400
@@ -303,6 +303,15 @@
$(this).remove();
make_popupmenu( b, options );
});
+ // Implements auto-saving based on whether the inputs change. We consider
+ // "changed" to be when a field is accessed and not necessarily modified
+ // because of an issue where "onchange" is not triggered when activating
+ // another node, or saving the workflow.
+ $(this).find("input,textarea,select").each( function() {
+ $(this).focus( function() {
+ workflow.active_form_has_changes = true;
+ });
+ });
});
}
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/3353b15d0fb5
changeset: 2581:3353b15d0fb5
user: James Taylor <james(a)jamestaylor.org>
date: Wed Aug 19 17:51:46 2009 -0400
description:
Merge.
0 file(s) affected in this change:
diffs (2275 lines):
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py Wed Aug 19 11:08:58 2009 -0400
+++ b/lib/galaxy/web/form_builder.py Wed Aug 19 17:51:46 2009 -0400
@@ -3,6 +3,7 @@
"""
import logging,sys
+from cgi import escape
log = logging.getLogger(__name__)
class BaseField(object):
@@ -28,7 +29,7 @@
self.value = value or ""
def get_html( self, prefix="" ):
return '<input type="text" name="%s%s" size="%d" value="%s">' \
- % ( prefix, self.name, self.size, self.value )
+ % ( prefix, self.name, self.size, escape(str(self.value), quote=True) )
def set_size(self, size):
self.size = int( size )
@@ -49,7 +50,7 @@
self.value = value or ""
def get_html( self, prefix="" ):
return '<textarea name="%s%s" rows="%d" cols="%d">%s</textarea>' \
- % ( prefix, self.name, self.rows, self.cols, self.value )
+ % ( prefix, self.name, self.rows, self.cols, escape(str(self.value), quote=True) )
def set_size(self, rows, cols):
self.rows = rows
self.cols = cols
@@ -113,7 +114,7 @@
self.name = name
self.value = value or ""
def get_html( self, prefix="" ):
- return '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, self.value )
+ return '<input type="hidden" name="%s%s" value="%s">' % ( prefix, self.name, escape(str(self.value), quote=True) )
class SelectField(BaseField):
"""
@@ -190,9 +191,9 @@
if len(self.options) > 2 and ctr % 2 == 1:
style = " class=\"odd_row\""
if selected:
- rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" checked>%s</div>' % ( style, prefix, self.name, value, text) )
+ rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" checked>%s</div>' % ( style, prefix, self.name, escape(str(value), quote=True), text) )
else:
- rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s">%s</div>' % ( style, prefix, self.name, value, text) )
+ rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s">%s</div>' % ( style, prefix, self.name, escape(str(value), quote=True), text) )
ctr += 1
return "\n".join( rval )
def get_html_radio( self, prefix="" ):
@@ -204,7 +205,7 @@
style = " class=\"odd_row\""
if selected: selected_text = " checked"
else: selected_text = ""
- rval.append( '<div%s><input type="radio" name="%s%s"%s value="%s"%s>%s</div>' % ( style, prefix, self.name, self.refresh_on_change_text, value, selected_text, text ) )
+ rval.append( '<div%s><input type="radio" name="%s%s"%s value="%s"%s>%s</div>' % ( style, prefix, self.name, self.refresh_on_change_text, escape(str(value), quote=True), selected_text, text ) )
ctr += 1
return "\n".join( rval )
def get_html_default( self, prefix="" ):
@@ -217,9 +218,9 @@
selected_text = " selected"
last_selected_value = value
else: selected_text = ""
- rval.append( '<option value="%s"%s>%s</option>' % ( value, selected_text, text ) )
+ rval.append( '<option value="%s"%s>%s</option>' % ( escape(str(value), quote=True), selected_text, text ) )
if last_selected_value:
- last_selected_value = ' last_selected_value="%s"' % last_selected_value
+ last_selected_value = ' last_selected_value="%s"' % escape(str(last_selected_value), quote=True)
rval.insert( 0, '<select name="%s%s"%s%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text, last_selected_value ) )
rval.append( '</select>' )
return "\n".join( rval )
@@ -326,12 +327,12 @@
if option['value'] in expanded_options:
default_state = 'expanded'
default_icon = '[-]'
- html.append( '<li><span class="toolParameterExpandableCollapsable">%s</span><input type="%s" name="%s%s" value="%s"%s">%s' % ( default_icon, self.display, prefix, self.name, option['value'], selected, option['name']) )
+ html.append( '<li><span class="toolParameterExpandableCollapsable">%s</span><input type="%s" name="%s%s" value="%s"%s">%s' % ( default_icon, self.display, prefix, self.name, escape(str(option['value']), quote=True), selected, option['name']) )
html.append( '<ul class="toolParameterExpandableCollapsable" default_state="%s">' % default_state )
recurse_options( html, option['options'], expanded_options )
html.append( '</ul>')
else:
- html.append( '<li><input type="%s" name="%s%s" value="%s"%s">%s' % ( self.display, prefix, self.name, option['value'], selected, option['name']) )
+ html.append( '<li><input type="%s" name="%s%s" value="%s"%s">%s' % ( self.display, prefix, self.name, escape(str(option['value']), quote=True), selected, option['name']) )
html.append( '</li>' )
rval = []
rval.append( '<div><ul class="toolParameterExpandableCollapsable">' )
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/jStore.Flash.html
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/jStore.Flash.html Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,19 @@
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
+ <head>
+ <title>Flash External Object</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ <script type="text/javascript">
+ /**
+ * This function captures the flash_ready event. We need to relay this
+ * back to the parent so it knows flash is ready.
+ */
+ function flash_ready(){
+ parent.flash_ready();
+ }
+ </script>
+ </head>
+ <body>
+ <object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="http://fpdownload.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#versi…" width="1" height="1" id="jStoreFlash"><param name="allowScriptAccess" value="always" /><param name="movie" value="jStore.swf" /><param name="quality" value="high" /><param name="bgcolor" value="#ffcc00" /><embed src="jStore.swf" quality="high" bgcolor="#ffcc00" width="1" height="1" name="jStoreFlash" align="middle" allowScriptAccess="always" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer" /></object>
+ </body>
+</html>
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/jStore.swf
Binary file static/jStore.swf has changed
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js Wed Aug 19 11:08:58 2009 -0400
+++ b/static/scripts/galaxy.workflow_editor.canvas.js Wed Aug 19 17:51:46 2009 -0400
@@ -359,6 +359,7 @@
this.nodes = {};
this.name = null;
this.has_changes = false;
+ this.active_form_has_changes = false;
}
$.extend( Workflow.prototype, {
add_node : function( node ) {
@@ -438,6 +439,28 @@
});
});
},
+ enable_auto_save : function() {
+ // Implements auto-saving based on whether the inputs change. We consider
+ // "changed" to be when a field is accessed and not necessarily modified
+ // because of an issue where "onchange" is not triggered when activating
+ // another node, or saving the workflow.
+ outer_this = this;
+ $(".toolFormBody").find("input,textarea,select").each( function() {
+ $(this).focus( function() {
+ outer_this.active_form_has_changes = true;
+ });
+ });
+ },
+ check_changes_in_active_form : function() {
+ // If active form has changed, save it
+ if (this.active_form_has_changes) {
+ this.has_changes = true;
+ $(".toolFormBody").find("form").each( function() {
+ $(this).submit();
+ });
+ this.active_form_has_changes = false;
+ }
+ },
clear_active_node : function() {
if ( this.active_node ) {
this.active_node.make_inactive();
@@ -447,6 +470,7 @@
},
activate_node : function( node ) {
if ( this.active_node != node ) {
+ this.check_changes_in_active_form();
this.clear_active_node();
parent.show_form_for_tool( node.form_html, node );
node.make_active();
@@ -461,6 +485,7 @@
}
},
layout : function () {
+ this.check_changes_in_active_form();
// Prepare predecessor / successor tracking
var n_pred = {};
var successors = {};
@@ -502,7 +527,7 @@
var v = level_parents[k];
delete n_pred[v];
for ( var sk in successors[v] ) {
- n_pred[ sucessors[v][sk] ] -= 1;
+ n_pred[ successors[v][sk] ] -= 1;
}
}
}
@@ -805,6 +830,10 @@
self.draw_overview();
});
+ /* Disable dragging for child element of the panel so that resizing can
+ only be done by dragging the borders */
+ $("#overview-border div").bind("drag", function(e) { });
+
},
update_viewport_overlay: function() {
var cc = this.cc,
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/jquery.jstore-all.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/jquery.jstore-all.js Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,748 @@
+/*!
+ * jStore - Persistent Client-Side Storage
+ *
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ *
+ * Dual licensed under:
+ * MIT: http://www.opensource.org/licenses/mit-license.php
+ * GPLv3: http://www.opensource.org/licenses/gpl-3.0.html
+ *//**
+ * Javascript Class Framework
+ *
+ * Copyright (c) 2008 John Resig (http://ejohn.org/blog/simple-javascript-inheritance/)
+ * Inspired by base2 and Prototype
+ */
+(function(){
+ var initializing = false, fnTest = /xyz/.test(function(){xyz;}) ? /\b_super\b/ : /.*/;
+
+ // The base Class implementation (does nothing)
+ this.Class = function(){};
+
+ // Create a new Class that inherits from this class
+ Class.extend = function(prop) {
+ var _super = this.prototype;
+
+ // Instantiate a base class (but only create the instance,
+ // don't run the init constructor)
+ initializing = true;
+ var prototype = new this();
+ initializing = false;
+
+ // Copy the properties over onto the new prototype
+ for (var name in prop) {
+ // Check if we're overwriting an existing function
+ prototype[name] = typeof prop[name] == "function" &&
+ typeof _super[name] == "function" && fnTest.test(prop[name]) ?
+ (function(name, fn){
+ return function() {
+ var tmp = this._super;
+
+ // Add a new ._super() method that is the same method
+ // but on the super-class
+ this._super = _super[name];
+
+ // The method only need to be bound temporarily, so we
+ // remove it when we're done executing
+ var ret = fn.apply(this, arguments);
+ this._super = tmp;
+
+ return ret;
+ };
+ })(name, prop[name]) :
+ prop[name];
+ }
+
+ // The dummy class constructor
+ function Class() {
+ // All construction is actually done in the init method
+ if ( !initializing && this.init )
+ this.init.apply(this, arguments);
+ }
+
+ // Populate our constructed prototype object
+ Class.prototype = prototype;
+
+ // Enforce the constructor to be what we expect
+ Class.constructor = Class;
+
+ // And make this class extendable
+ Class.extend = arguments.callee;
+
+ return Class;
+ };
+})();
+/*!
+ * jStore Delegate Framework
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ this.jStoreDelegate = Class.extend({
+ init: function(parent){
+ // The Object this delgate operates for
+ this.parent = parent;
+ // Container for callbacks to dispatch.
+ // eventType => [ callback, callback, ... ]
+ this.callbacks = {};
+ },
+ bind: function(event, callback){
+ if ( !$.isFunction(callback) ) return this;
+ if ( !this.callbacks[ event ] ) this.callbacks[ event ] = [];
+
+ this.callbacks[ event ].push(callback);
+
+ return this;
+ },
+ trigger: function(){
+ var parent = this.parent,
+ args = [].slice.call(arguments),
+ event = args.shift(),
+ handlers = this.callbacks[ event ];
+
+ if ( !handlers ) return false;
+
+ $.each(handlers, function(){ this.apply(parent, args) });
+ return this;
+ }
+ });
+
+})(jQuery);/**
+ * jStore-jQuery Interface
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Setup the jStore namespace in jQuery for options storage
+ $.jStore = {};
+
+ // Seed the options in
+ $.extend($.jStore, {
+ EngineOrder: [],
+ // Engines should put their availability tests within jStore.Availability
+ Availability: {},
+ // Defined engines should enter themselves into the jStore.Engines
+ Engines: {},
+ // Instanciated engines should exist within jStore.Instances
+ Instances: {},
+ // The current engine to use for storage
+ CurrentEngine: null,
+ // Provide global settings for overwriting
+ defaults: {
+ project: null,
+ engine: null,
+ autoload: true,
+ flash: 'jStore.Flash.html'
+ },
+ // Boolean for ready state handling
+ isReady: false,
+ // Boolean for flash ready state handling
+ isFlashReady: false,
+ // An event delegate
+ delegate: new jStoreDelegate($.jStore)
+ .bind('jStore-ready', function(engine){
+ $.jStore.isReady = true;
+ if ($.jStore.defaults.autoload) engine.connect();
+ })
+ .bind('flash-ready', function(){
+ $.jStore.isFlashReady = true;
+ })
+ });
+
+ // Enable ready callback for jStore
+ $.jStore.ready = function(callback){
+ if ($.jStore.isReady) callback.apply($.jStore, [$.jStore.CurrentEngine]);
+ else $.jStore.delegate.bind('jStore-ready', callback);
+ }
+
+ // Enable failure callback registration for jStore
+ $.jStore.fail = function(callback){
+ $.jStore.delegate.bind('jStore-failure', callback);
+ }
+
+ // Enable ready callback for Flash
+ $.jStore.flashReady = function(callback){
+ if ($.jStore.isFlashReady) callback.apply($.jStore, [$.jStore.CurrentEngine]);
+ else $.jStore.delegate.bind('flash-ready', callback);
+ }
+
+ // Enable and test an engine
+ $.jStore.use = function(engine, project, identifier){
+ project = project || $.jStore.defaults.project || location.hostname.replace(/\./g, '-') || 'unknown';
+
+ var e = $.jStore.Engines[engine.toLowerCase()] || null,
+ name = (identifier ? identifier + '.' : '') + project + '.' + engine;
+
+ if ( !e ) throw 'JSTORE_ENGINE_UNDEFINED';
+
+ // Instanciate the engine
+ e = new e(project, name);
+
+ // Prevent against naming conflicts
+ if ($.jStore.Instances[name]) throw 'JSTORE_JRI_CONFLICT';
+
+ // Test the engine
+ if (e.isAvailable()){
+ $.jStore.Instances[name] = e; // The Easy Way
+ if (!$.jStore.CurrentEngine){
+ $.jStore.CurrentEngine = e;
+ }
+ $.jStore.delegate.trigger('jStore-ready', e);
+ } else {
+ if (!e.autoload) // Not available
+ throw 'JSTORE_ENGINE_UNAVILABLE';
+ else { // The hard way
+ e.included(function(){
+ if (this.isAvailable()) { // Worked out
+ $.jStore.Instances[name] = this;
+ // If there is no current engine, use this one
+ if (!$.jStore.CurrentEngine){
+ $.jStore.CurrentEngine = this;
+ }
+ $.jStore.delegate.trigger('jStore-ready', this);
+ }
+ else $.jStore.delegate.trigger('jStore-failure', this);
+ }).include();
+ }
+ }
+ }
+
+ // Set the current storage engine
+ $.jStore.setCurrentEngine = function(name){
+ if (!$.jStore.Instances.length ) // If no instances exist, attempt to load one
+ return $.jStore.FindEngine();
+
+ if (!name && $.jStore.Instances.length >= 1) { // If no name is specified, use the first engine
+ $.jStore.delegate.trigger('jStore-ready', $.jStore.Instances[0]);
+ return $.jStore.CurrentEngine = $.jStore.Instances[0];
+ }
+
+ if (name && $.jStore.Instances[name]) { // If a name is specified and exists, use it
+ $.jStore.delegate.trigger('jStore-ready', $.jStore.Instances[name]);
+ return $.jStore.CurrentEngine = $.jStore.Instances[name];
+ }
+
+ throw 'JSTORE_JRI_NO_MATCH';
+ }
+
+ // Test all possible engines for straightforward useability
+ $.jStore.FindEngine = function(){
+ $.each($.jStore.EngineOrder, function(k){
+ if ($.jStore.Availability[this]()){ // Find the first, easiest option and use it.
+ $.jStore.use(this, $.jStore.defaults.project, 'default');
+ return false;
+ }
+ })
+ }
+
+ // Provide a simple interface for storing/getting values
+ $.jStore.store = function(key, value){
+ if (!$.jStore.CurrentEngine) return false;
+
+ if ( !value ) // Executing a get command
+ return $.jStore.CurrentEngine.get(key);
+ // Executing a set command
+ return $.jStore.CurrentEngine.set(key, value);
+ }
+ // Provide a simple interface for storing/getting values
+ $.jStore.remove = function(key){
+ if (!$.jStore.CurrentEngine) return false;
+
+ return $.jStore.CurrentEngine.rem(key);
+ }
+
+ // Provide a chainable interface for storing values/getting a value at the end of a chain
+ $.fn.store = function(key, value){
+ if (!$.jStore.CurrentEngine) return this;
+
+ var result = $.jStore.store(key, value);
+
+ return !value ? result : this;
+ }
+
+ // Provide a chainable interface for removing values
+ $.fn.removeStore = function(key){
+ $.jStore.remove(key);
+
+ return this;
+ }
+
+ // Provide a way for users to call for auto-loading
+ $.jStore.load = function(){
+ if ($.jStore.defaults.engine)
+ return $.jStore.use($.jStore.defaults.engine, $.jStore.defaults.project, 'default');
+
+ // Attempt to find a valid engine, and catch any exceptions if we can't
+ try {
+ $.jStore.FindEngine();
+ } catch (e) {}
+ }
+
+})(jQuery);
+/**
+ * jStore Engine Core
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ this.StorageEngine = Class.extend({
+ init: function(project, name){
+ // Configure the project name
+ this.project = project;
+ // The JRI name given by the manager
+ this.jri = name;
+ // Cache the data so we can work synchronously
+ this.data = {};
+ // The maximum limit of the storage engine
+ this.limit = -1;
+ // Third party script includes
+ this.includes = [];
+ // Create an event delegate for users to subscribe to event triggers
+ this.delegate = new jStoreDelegate(this)
+ .bind('engine-ready', function(){
+ this.isReady = true;
+ })
+ .bind('engine-included', function(){
+ this.hasIncluded = true;
+ });
+ // If enabled, the manager will check availability, then run include(), then check again
+ this.autoload = false; // This should be changed by the engines, if they have required includes
+ // When set, we're ready to transact data
+ this.isReady = false;
+ // When the includer is finished, it will set this to true
+ this.hasIncluded = false;
+ },
+ // Performs all necessary script includes
+ include: function(){
+ var self = this,
+ total = this.includes.length,
+ count = 0;
+
+ $.each(this.includes, function(){
+ $.ajax({type: 'get', url: this, dataType: 'script', cache: true,
+ success: function(){
+ count++;
+ if (count == total) self.delegate.trigger('engine-included');
+ }
+ })
+ });
+ },
+ // This should be overloaded with an actual functionality presence check
+ isAvailable: function(){
+ return false;
+ },
+ /** Event Subscription Shortcuts **/
+ ready: function(callback){
+ if (this.isReady) callback.apply(this);
+ else this.delegate.bind('engine-ready', callback);
+ return this;
+ },
+ included: function(callback){
+ if (this.hasIncluded) callback.apply(this);
+ else this.delegate.bind('engine-included', callback);
+ return this;
+ },
+ /** Cache Data Access **/
+ get: function(key){
+ return this.data[key] || null;
+ },
+ set: function(key, value){
+ this.data[key] = value;
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.data[key];
+ this.data[key] = null;
+ return beforeDelete;
+ }
+ });
+
+})(jQuery);
+/*!
+ * jStore DOM Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var sessionAvailability = $.jStore.Availability.session = function(){
+ return !!window.sessionStorage;
+ },
+ localAvailability = $.jStore.Availability.local = function(){
+ return !!(window.localStorage || window.globalStorage);
+ };
+
+ this.jStoreDom = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'DOM';
+
+ // Set the Database limit
+ this.limit = 5 * 1024 * 1024;
+ },
+ connect: function(){
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ get: function(key){
+ var out = this.db.getItem(key);
+ // Gecko's getItem returns {value: 'the value'}, WebKit returns 'the value'
+ return out && out.value ? out.value : out
+ },
+ set: function(key, value){
+ this.db.setItem(key,value);
+ return value;
+ },
+ rem: function(key){
+ var out = this.get(key);
+ this.db.removeItem(key);
+ return out
+ }
+ })
+
+ this.jStoreLocal = jStoreDom.extend({
+ connect: function(){
+ // Gecko uses a non-standard globalStorage[ www.example.com ] DOM access object for persistant storage.
+ this.db = !window.globalStorage ? window.localStorage : window.globalStorage[location.hostname];
+ this._super();
+ },
+ isAvailable: localAvailability
+ })
+
+ this.jStoreSession = jStoreDom.extend({
+ connect: function(){
+ this.db = sessionStorage;
+ this._super();
+ },
+ isAvailable: sessionAvailability
+ })
+
+ $.jStore.Engines.local = jStoreLocal;
+ $.jStore.Engines.session = jStoreSession;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 1 ] = 'local';
+
+})(jQuery);
+/*!
+ * jStore Flash Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ * jStore.swf Copyright (c) 2008 Daniel Bulli (http://www.nuff-respec.com)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.flash = function(){
+ return !!($.jStore.hasFlash('8.0.0'));
+ }
+
+ this.jStoreFlash = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'Flash';
+
+ // Bind our flashReady function to the jStore Delegate
+ var self = this;
+ $.jStore.flashReady(function(){ self.flashReady() });
+ },
+ connect: function(){
+ var name = 'jstore-flash-embed-' + this.project;
+
+ // To make Flash Storage work on IE, we have to load up an iFrame
+ // which contains an HTML page that embeds the object using an
+ // object tag wrapping an embed tag. Of course, this is unnecessary for
+ // all browsers except for IE, which, to my knowledge, is the only browser
+ // in existance where you need to complicate your code to fix bugs. Goddamnit. :(
+ $(document.body)
+ .append('<iframe style="height:1px;width:1px;position:absolute;left:0;top:0;margin-left:-100px;" ' +
+ 'id="jStoreFlashFrame" src="' +$.jStore.defaults.flash + '"></iframe>');
+ },
+ flashReady: function(e){
+ var iFrame = $('#jStoreFlashFrame')[0];
+
+ // IE
+ if (iFrame.Document && $.isFunction(iFrame.Document['jStoreFlash'].f_get_cookie)) this.db = iFrame.Document['jStoreFlash'];
+ // Safari && Firefox
+ else if (iFrame.contentWindow && iFrame.contentWindow.document){
+ var doc = iFrame.contentWindow.document;
+ // Safari
+ if ($.isFunction($('object', $(doc))[0].f_get_cookie)) this.db = $('object', $(doc))[0];
+ // Firefox
+ else if ($.isFunction($('embed', $(doc))[0].f_get_cookie)) this.db = $('embed', $(doc))[0];
+ }
+
+ // We're ready to process data
+ if (this.db) this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ get: function(key){
+ var out = this.db.f_get_cookie(key);
+ return out == 'null' ? null : out;
+ },
+ set: function(key, value){
+ this.db.f_set_cookie(key, value);
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.get(key);
+ this.db.f_delete_cookie(key);
+ return beforeDelete;
+ }
+ })
+
+ $.jStore.Engines.flash = jStoreFlash;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 2 ] = 'flash';
+
+ /**
+ * Flash Detection functions copied from the jQuery Flash Plugin
+ * Copyright (c) 2006 Luke Lutman (http://jquery.lukelutman.com/plugins/flash)
+ * Dual licensed under the MIT and GPL licenses.
+ * http://www.opensource.org/licenses/mit-license.php
+ * http://www.opensource.org/licenses/gpl-license.php
+ */
+ $.jStore.hasFlash = function(version){
+ var pv = $.jStore.flashVersion().match(/\d+/g),
+ rv = version.match(/\d+/g);
+
+ for(var i = 0; i < 3; i++) {
+ pv[i] = parseInt(pv[i] || 0);
+ rv[i] = parseInt(rv[i] || 0);
+ // player is less than required
+ if(pv[i] < rv[i]) return false;
+ // player is greater than required
+ if(pv[i] > rv[i]) return true;
+ }
+ // major version, minor version and revision match exactly
+ return true;
+ }
+
+ $.jStore.flashVersion = function(){
+ // ie
+ try {
+ try {
+ // avoid fp6 minor version lookup issues
+ // see: http://blog.deconcept.com/2006/01/11/getvariable-setvariable-crash-internet…
+ var axo = new ActiveXObject('ShockwaveFlash.ShockwaveFlash.6');
+ try { axo.AllowScriptAccess = 'always'; }
+ catch(e) { return '6,0,0'; }
+ } catch(e) {}
+ return new ActiveXObject('ShockwaveFlash.ShockwaveFlash').GetVariable('$version').replace(/\D+/g, ',').match(/^,?(.+),?$/)[1];
+ // other browsers
+ } catch(e) {
+ try {
+ if(navigator.mimeTypes["application/x-shockwave-flash"].enabledPlugin){
+ return (navigator.plugins["Shockwave Flash 2.0"] || navigator.plugins["Shockwave Flash"]).description.replace(/\D+/g, ",").match(/^,?(.+),?$/)[1];
+ }
+ } catch(e) {}
+ }
+ return '0,0,0';
+ }
+
+})(jQuery);
+
+// Callback fired when ExternalInterface is established
+function flash_ready(){
+ $.jStore.delegate.trigger('flash-ready');
+}
+/*!
+ * jStore Google Gears Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.gears = function(){
+ return !!(window.google && window.google.gears)
+ }
+
+ this.jStoreGears = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'Google Gears';
+
+ // Add required third-party scripts
+ this.includes.push('http://code.google.com/apis/gears/gears_init.js');
+
+ // Allow Autoloading on fail
+ this.autoload = true;
+ },
+ connect: function(){
+ // Create our database connection
+ var db = this.db = google.gears.factory.create('beta.database');
+ db.open( 'jstore-' + this.project );
+ db.execute( 'CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)' );
+
+ // Cache the data from the table
+ this.updateCache();
+ },
+ updateCache: function(){
+ // Read the database into our cache object
+ var result = this.db.execute( 'SELECT k,v FROM jstore' );
+ while (result.isValidRow()){
+ this.data[result.field(0)] = result.field(1);
+ result.next();
+ } result.close();
+
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ set: function(key, value){
+ // Update the database
+ var db = this.db;
+ db.execute( 'BEGIN' );
+ db.execute( 'INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)', [key,value] );
+ db.execute( 'COMMIT' );
+ return this._super(key, value);
+ },
+ rem: function(key){
+ // Update the database
+ var db = this.db;
+ db.execute( 'BEGIN' );
+ db.execute( 'DELETE FROM jstore WHERE k = ?', [key] );
+ db.execute( 'COMMIT' );
+ return this._super(key);
+ }
+ })
+
+ $.jStore.Engines.gears = jStoreGears;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 3 ] = 'gears';
+
+})(jQuery);
+/*!
+ * jStore HTML5 Specification Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.html5 = function(){
+ return !!window.openDatabase
+ }
+
+ this.jStoreHtml5 = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'HTML5';
+
+ // Set the Database limit
+ this.limit = 1024 * 200;
+ },
+ connect: function(){
+ // Create our database connection
+ var db = this.db = openDatabase('jstore-' + this.project, '1.0', this.project, this.limit);
+ if (!db) throw 'JSTORE_ENGINE_HTML5_NODB';
+ db.transaction(function(db){
+ db.executeSql( 'CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)' );
+ });
+
+ // Cache the data from the table
+ this.updateCache();
+ },
+ updateCache: function(){
+ var self = this;
+ // Read the database into our cache object
+ this.db.transaction(function(db){
+ db.executeSql( 'SELECT k,v FROM jstore', [], function(db, result){
+ var rows = result.rows, i = 0, row;
+ for (; i < rows.length; ++i){
+ row = rows.item(i);
+ self.data[row.k] = row.v;
+ }
+
+ // Fire our delegate to indicate we're ready for data transactions
+ self.delegate.trigger('engine-ready');
+ });
+ });
+ },
+ isAvailable: avilability,
+ set: function(key, value){
+ // Update the database
+ this.db.transaction(function(db){
+ db.executeSql( 'INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)', [key,value]);
+ });
+ return this._super(key, value);
+ },
+ rem: function(key){
+ // Update the database
+ this.db.transaction(function(db){
+ db.executeSql( 'DELETE FROM jstore WHERE k = ?', [key] )
+ })
+ return this._super(key);
+ }
+ })
+
+ $.jStore.Engines.html5 = jStoreHtml5;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 0 ] = 'html5';
+
+})(jQuery);
+/*!*
+ * jStore IE Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function($){
+
+ // Set up a static test function for this instance
+ var avilability = $.jStore.Availability.ie = function(){
+ return !!window.ActiveXObject;
+ }
+
+ this.jStoreIE = StorageEngine.extend({
+ init: function(project, name){
+ // Call the parental init object
+ this._super(project, name);
+
+ // The type of storage engine
+ this.type = 'IE';
+
+ // Allow Autoloading on fail
+ this.limit = 64 * 1024;
+ },
+ connect: function(){
+ // Create a hidden div to store attributes in
+ this.db = $('<div style="display:none;behavior:url(\'#default#userData\')" id="jstore-' + this.project + '"></div>')
+ .appendTo(document.body).get(0);
+ // Fire our delegate to indicate we're ready for data transactions
+ this.delegate.trigger('engine-ready');
+ },
+ isAvailable: avilability,
+ get: function(key){
+ this.db.load(this.project);
+ return this.db.getAttribute(key);
+ },
+ set: function(key, value){
+ this.db.setAttribute(key, value);
+ this.db.save(this.project);
+ return value;
+ },
+ rem: function(key){
+ var beforeDelete = this.get(key);
+ this.db.removeAttribute(key);
+ this.db.save(this.project);
+ return beforeDelete;
+ }
+ })
+
+ $.jStore.Engines.ie = jStoreIE;
+
+ // Store the ordering preference
+ $.jStore.EngineOrder[ 4 ] = 'ie';
+
+})(jQuery);
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/json2.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/json2.js Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,476 @@
+/*
+ http://www.JSON.org/json2.js
+ 2009-06-29
+
+ Public Domain.
+
+ NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
+
+ See http://www.JSON.org/js.html
+
+ This file creates a global JSON object containing two methods: stringify
+ and parse.
+
+ JSON.stringify(value, replacer, space)
+ value any JavaScript value, usually an object or array.
+
+ replacer an optional parameter that determines how object
+ values are stringified for objects. It can be a
+ function or an array of strings.
+
+ space an optional parameter that specifies the indentation
+ of nested structures. If it is omitted, the text will
+ be packed without extra whitespace. If it is a number,
+ it will specify the number of spaces to indent at each
+ level. If it is a string (such as '\t' or ' '),
+ it contains the characters used to indent at each level.
+
+ This method produces a JSON text from a JavaScript value.
+
+ When an object value is found, if the object contains a toJSON
+ method, its toJSON method will be called and the result will be
+ stringified. A toJSON method does not serialize: it returns the
+ value represented by the name/value pair that should be serialized,
+ or undefined if nothing should be serialized. The toJSON method
+ will be passed the key associated with the value, and this will be
+ bound to the object holding the key.
+
+ For example, this would serialize Dates as ISO strings.
+
+ Date.prototype.toJSON = function (key) {
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ return this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z';
+ };
+
+ You can provide an optional replacer method. It will be passed the
+ key and value of each member, with this bound to the containing
+ object. The value that is returned from your method will be
+ serialized. If your method returns undefined, then the member will
+ be excluded from the serialization.
+
+ If the replacer parameter is an array of strings, then it will be
+ used to select the members to be serialized. It filters the results
+ such that only members with keys listed in the replacer array are
+ stringified.
+
+ Values that do not have JSON representations, such as undefined or
+ functions, will not be serialized. Such values in objects will be
+ dropped; in arrays they will be replaced with null. You can use
+ a replacer function to replace those with JSON values.
+ JSON.stringify(undefined) returns undefined.
+
+ The optional space parameter produces a stringification of the
+ value that is filled with line breaks and indentation to make it
+ easier to read.
+
+ If the space parameter is a non-empty string, then that string will
+ be used for indentation. If the space parameter is a number, then
+ the indentation will be that many spaces.
+
+ Example:
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}]);
+ // text is '["e",{"pluribus":"unum"}]'
+
+
+ text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t');
+ // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]'
+
+ text = JSON.stringify([new Date()], function (key, value) {
+ return this[key] instanceof Date ?
+ 'Date(' + this[key] + ')' : value;
+ });
+ // text is '["Date(---current time---)"]'
+
+
+ JSON.parse(text, reviver)
+ This method parses a JSON text to produce an object or array.
+ It can throw a SyntaxError exception.
+
+ The optional reviver parameter is a function that can filter and
+ transform the results. It receives each of the keys and values,
+ and its return value is used instead of the original value.
+ If it returns what it received, then the structure is not modified.
+ If it returns undefined then the member is deleted.
+
+ Example:
+
+ // Parse the text. Values that look like ISO date strings will
+ // be converted to Date objects.
+
+ myData = JSON.parse(text, function (key, value) {
+ var a;
+ if (typeof value === 'string') {
+ a =
+/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value);
+ if (a) {
+ return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4],
+ +a[5], +a[6]));
+ }
+ }
+ return value;
+ });
+
+ myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) {
+ var d;
+ if (typeof value === 'string' &&
+ value.slice(0, 5) === 'Date(' &&
+ value.slice(-1) === ')') {
+ d = new Date(value.slice(5, -1));
+ if (d) {
+ return d;
+ }
+ }
+ return value;
+ });
+
+
+ This is a reference implementation. You are free to copy, modify, or
+ redistribute.
+
+ This code should be minified before deployment.
+ See http://javascript.crockford.com/jsmin.html
+
+ USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO
+ NOT CONTROL.
+*/
+
+/*jslint evil: true */
+
+/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply,
+ call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours,
+ getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join,
+ lastIndex, length, parse, prototype, push, replace, slice, stringify,
+ test, toJSON, toString, valueOf
+*/
+
+// Create a JSON object only if one does not already exist. We create the
+// methods in a closure to avoid creating global variables.
+
+var JSON = JSON || {};
+
+(function () {
+
+ function f(n) {
+ // Format integers to have at least two digits.
+ return n < 10 ? '0' + n : n;
+ }
+
+ if (typeof Date.prototype.toJSON !== 'function') {
+
+ Date.prototype.toJSON = function (key) {
+
+ return isFinite(this.valueOf()) ?
+ this.getUTCFullYear() + '-' +
+ f(this.getUTCMonth() + 1) + '-' +
+ f(this.getUTCDate()) + 'T' +
+ f(this.getUTCHours()) + ':' +
+ f(this.getUTCMinutes()) + ':' +
+ f(this.getUTCSeconds()) + 'Z' : null;
+ };
+
+ String.prototype.toJSON =
+ Number.prototype.toJSON =
+ Boolean.prototype.toJSON = function (key) {
+ return this.valueOf();
+ };
+ }
+
+ var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,
+ gap,
+ indent,
+ meta = { // table of character substitutions
+ '\b': '\\b',
+ '\t': '\\t',
+ '\n': '\\n',
+ '\f': '\\f',
+ '\r': '\\r',
+ '"' : '\\"',
+ '\\': '\\\\'
+ },
+ rep;
+
+
+ function quote(string) {
+
+// If the string contains no control characters, no quote characters, and no
+// backslash characters, then we can safely slap some quotes around it.
+// Otherwise we must also replace the offending characters with safe escape
+// sequences.
+
+ escapable.lastIndex = 0;
+ return escapable.test(string) ?
+ '"' + string.replace(escapable, function (a) {
+ var c = meta[a];
+ return typeof c === 'string' ? c :
+ '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ }) + '"' :
+ '"' + string + '"';
+ }
+
+
+ function str(key, holder) {
+
+// Produce a string from holder[key].
+
+ var i, // The loop counter.
+ k, // The member key.
+ v, // The member value.
+ length,
+ mind = gap,
+ partial,
+ value = holder[key];
+
+// If the value has a toJSON method, call it to obtain a replacement value.
+
+ if (value && typeof value === 'object' &&
+ typeof value.toJSON === 'function') {
+ value = value.toJSON(key);
+ }
+
+// If we were called with a replacer function, then call the replacer to
+// obtain a replacement value.
+
+ if (typeof rep === 'function') {
+ value = rep.call(holder, key, value);
+ }
+
+// What happens next depends on the value's type.
+
+ switch (typeof value) {
+ case 'string':
+ return quote(value);
+
+ case 'number':
+
+// JSON numbers must be finite. Encode non-finite numbers as null.
+
+ return isFinite(value) ? String(value) : 'null';
+
+ case 'boolean':
+ case 'null':
+
+// If the value is a boolean or null, convert it to a string. Note:
+// typeof null does not produce 'null'. The case is included here in
+// the remote chance that this gets fixed someday.
+
+ return String(value);
+
+// If the type is 'object', we might be dealing with an object or an array or
+// null.
+
+ case 'object':
+
+// Due to a specification blunder in ECMAScript, typeof null is 'object',
+// so watch out for that case.
+
+ if (!value) {
+ return 'null';
+ }
+
+// Make an array to hold the partial results of stringifying this object value.
+
+ gap += indent;
+ partial = [];
+
+// Is the value an array?
+
+ if (Object.prototype.toString.apply(value) === '[object Array]') {
+
+// The value is an array. Stringify every element. Use null as a placeholder
+// for non-JSON values.
+
+ length = value.length;
+ for (i = 0; i < length; i += 1) {
+ partial[i] = str(i, value) || 'null';
+ }
+
+// Join all of the elements together, separated with commas, and wrap them in
+// brackets.
+
+ v = partial.length === 0 ? '[]' :
+ gap ? '[\n' + gap +
+ partial.join(',\n' + gap) + '\n' +
+ mind + ']' :
+ '[' + partial.join(',') + ']';
+ gap = mind;
+ return v;
+ }
+
+// If the replacer is an array, use it to select the members to be stringified.
+
+ if (rep && typeof rep === 'object') {
+ length = rep.length;
+ for (i = 0; i < length; i += 1) {
+ k = rep[i];
+ if (typeof k === 'string') {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ } else {
+
+// Otherwise, iterate through all of the keys in the object.
+
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = str(k, value);
+ if (v) {
+ partial.push(quote(k) + (gap ? ': ' : ':') + v);
+ }
+ }
+ }
+ }
+
+// Join all of the member texts together, separated with commas,
+// and wrap them in braces.
+
+ v = partial.length === 0 ? '{}' :
+ gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' +
+ mind + '}' : '{' + partial.join(',') + '}';
+ gap = mind;
+ return v;
+ }
+ }
+
+// If the JSON object does not yet have a stringify method, give it one.
+
+ if (typeof JSON.stringify !== 'function') {
+ JSON.stringify = function (value, replacer, space) {
+
+// The stringify method takes a value and an optional replacer, and an optional
+// space parameter, and returns a JSON text. The replacer can be a function
+// that can replace values, or an array of strings that will select the keys.
+// A default replacer method can be provided. Use of the space parameter can
+// produce text that is more easily readable.
+
+ var i;
+ gap = '';
+ indent = '';
+
+// If the space parameter is a number, make an indent string containing that
+// many spaces.
+
+ if (typeof space === 'number') {
+ for (i = 0; i < space; i += 1) {
+ indent += ' ';
+ }
+
+// If the space parameter is a string, it will be used as the indent string.
+
+ } else if (typeof space === 'string') {
+ indent = space;
+ }
+
+// If there is a replacer, it must be a function or an array.
+// Otherwise, throw an error.
+
+ rep = replacer;
+ if (replacer && typeof replacer !== 'function' &&
+ (typeof replacer !== 'object' ||
+ typeof replacer.length !== 'number')) {
+ throw new Error('JSON.stringify');
+ }
+
+// Make a fake root object containing our value under the key of ''.
+// Return the result of stringifying the value.
+
+ return str('', {'': value});
+ };
+ }
+
+
+// If the JSON object does not yet have a parse method, give it one.
+
+ if (typeof JSON.parse !== 'function') {
+ JSON.parse = function (text, reviver) {
+
+// The parse method takes a text and an optional reviver function, and returns
+// a JavaScript value if the text is a valid JSON text.
+
+ var j;
+
+ function walk(holder, key) {
+
+// The walk method is used to recursively walk the resulting structure so
+// that modifications can be made.
+
+ var k, v, value = holder[key];
+ if (value && typeof value === 'object') {
+ for (k in value) {
+ if (Object.hasOwnProperty.call(value, k)) {
+ v = walk(value, k);
+ if (v !== undefined) {
+ value[k] = v;
+ } else {
+ delete value[k];
+ }
+ }
+ }
+ }
+ return reviver.call(holder, key, value);
+ }
+
+
+// Parsing happens in four stages. In the first stage, we replace certain
+// Unicode characters with escape sequences. JavaScript handles many characters
+// incorrectly, either silently deleting them, or treating them as line endings.
+
+ cx.lastIndex = 0;
+ if (cx.test(text)) {
+ text = text.replace(cx, function (a) {
+ return '\\u' +
+ ('0000' + a.charCodeAt(0).toString(16)).slice(-4);
+ });
+ }
+
+// In the second stage, we run the text against regular expressions that look
+// for non-JSON patterns. We are especially concerned with '()' and 'new'
+// because they can cause invocation, and '=' because it can cause mutation.
+// But just to be safe, we want to reject all unexpected forms.
+
+// We split the second stage into 4 regexp operations in order to work around
+// crippling inefficiencies in IE's and Safari's regexp engines. First we
+// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we
+// replace all simple value tokens with ']' characters. Third, we delete all
+// open brackets that follow a colon or comma or that begin the text. Finally,
+// we look to see that the remaining characters are only whitespace or ']' or
+// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval.
+
+ if (/^[\],:{}\s]*$/.
+test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@').
+replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']').
+replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) {
+
+// In the third stage we use the eval function to compile the text into a
+// JavaScript structure. The '{' operator is subject to a syntactic ambiguity
+// in JavaScript: it can begin a block or an object literal. We wrap the text
+// in parens to eliminate the ambiguity.
+
+ j = eval('(' + text + ')');
+
+// In the optional fourth stage, we recursively walk the new structure, passing
+// each name/value pair to a reviver function for possible transformation.
+
+ return typeof reviver === 'function' ?
+ walk({'': j}, '') : j;
+ }
+
+// If the text is not JSON parseable, then a SyntaxError is thrown.
+
+ throw new SyntaxError('JSON.parse');
+ };
+ }
+}());
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/json_cookie.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/json_cookie.js Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,61 @@
+/*
+ JSONCookie: Uses JSON to allow the settings of multiple preferences in one cookie.
+ Kanwei Li, 2009
+
+ cookie = new JSONCookie("cookie_name"); // Pass in the name of the cookie
+
+ // Gets the value of a preference, returns optional second argument if pref not found
+ cookie.get("pref", "val_if_not_found");
+
+ cookie.set("pref", "val"); // Sets a value for the preference and saves cookie
+ cookie.unset("pref"); // Unsets the preference and saves cookie
+ cookie.clear() // Deletes the cookie
+
+*/
+
+function JSONCookie(name) {
+ this.cookie_name = name;
+
+}
+
+JSONCookie.prototype = {
+ json_data : function() {
+ cookie = $.cookie(this.cookie_name);
+ return cookie ? JSON.parse(cookie) : null;
+ },
+
+ save : function(data) {
+ $.cookie(this.cookie_name, JSON.stringify(data));
+ },
+
+ get : function(attr, else_val) {
+ data = this.json_data();
+ if (data && data[attr]) { return data[attr];
+ } else if (else_val) { return else_val;
+ } else { return null;
+ }
+ },
+
+ set : function(attr, val) {
+ data = this.json_data();
+ if (data) {
+ data[attr] = val;
+ } else {
+ data = { attr : val }
+ }
+ this.save(data);
+ },
+
+ unset : function(attr) {
+ data = this.json_data();
+ if (data) {
+ delete data[attr];
+ }
+ this.save(data);
+ },
+
+ clear : function() {
+ this.save(null);
+ }
+
+};
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js Wed Aug 19 11:08:58 2009 -0400
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js Wed Aug 19 17:51:46 2009 -0400
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connectors,function(j,k){f[h.name]={id:k.ha
ndle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.clear_active_node();parent.show_
form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[sucessors[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var
q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(
this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var
d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prot
otype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;
var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({wi
dth:f,height:f});b.draw_overview()})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewp
ort_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatype=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var b in this.datatypes){if(a.datatype=="input"){return true}if(issubtype(a.datatype,this.datatypes[b])){return true}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a)
{this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;
this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropstart",function(g){g.dragProxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g){g.dragProxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g){(new Connector(g.dragTarget.terminal,g.dropTarget.terminal)).redraw()}).bind("hov
er",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img src='../images/delete_icon.png' />").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j){var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var k=new Connector();k.dragging=true;k.connect(this.terminal,i.terminal);$.dropManage({filter:function(h){return this.terminal.can_accept(f)}}).addClass("input-terminal-active");ret
urn i}).bind("drag",function(i){var h=function(){var k=$(i.dragProxy).offsetParent().offset(),j=i.offsetX-k.left,l=i.offsetY-k.top;$(i.dragProxy).css({left:j,top:l});i.dragProxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h){h.dragProxy.terminal.connectors[0].destroy();$(h.dragProxy).remove();$.dropManage().removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b
.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(g){var d=this.element;if(g.type){this.type=g.type}this.name=g.name;this.form_html=g.form_html;this.tool_state=g.tool_state;this.tool_errors=g.tool_errors;if(this.tool_errors){d.addClass("tool-node-error")}else{d.removeClass("tool-node-error")}var c=this;var a=d.find(".toolFormBody");a.find("div").remove();var h=$("<div class='inputs'></div>").appendTo(a);$.each(g.data_inputs,function(j,b){var f=$("<div class='terminal input-terminal'></div>");c.enable_input_terminal(f,b.name,b.extensions);h.append($("<div class='form-row dataRow input-data-row' name='"+b.name+"'>"+b.label+"</div>").prepend(f))});if((g.data_inputs.length>0)&&(g.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(g.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");c.enable_output_terminal(j,b.name,b.extension);var f=b.name;if(b.extension!="input"){f=f+" ("+b.extensio
n+")"}a.append($("<div class='form-row dataRow'>"+f+"</div>").append(j))});workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var g=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=g.find("div.input-data-row");$.each(f.data_inputs,function(k,h){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,h.name,h.extensions);g.find("div[name="+h.name+"]").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){j[0].terminal.connectors[0]=i;i.handle2=j[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+h.name+"'>"+h.label+"</div>").prepend(j))});g.replaceWith(b);g.find("div.input-data-row > .terminal").each(function(){this.terminal.destr
oy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},to_simple:function(){var a={};$.each(this.nodes,function(b,d){var f={};$.each(d.input_terminals,function(g,h){f[h.name]=null;$.each(h.connecto
rs,function(j,k){f[h.name]={id:k.handle1.node.id,output_name:k.handle1.name}})});var c={id:d.id,type:d.type,tool_id:d.tool_id,tool_state:d.tool_state,tool_errors:d.tool_errors,input_connections:f,position:$(d.element).position()};a[d.id]=c});return{steps:a}},from_simple:function(a){wf=this;var b=0;wf.name=a.name;$.each(a.steps,function(f,d){var c=prebuild_node("tool",d.name,d.tool_id);c.init_field_data(d);if(d.position){c.element.css({top:d.position.top,left:d.position.left})}c.id=d.id;wf.nodes[c.id]=c;b=Math.max(b,parseInt(f))});wf.id_counter=b+1;$.each(a.steps,function(f,d){var c=wf.nodes[f];$.each(d.input_connections,function(h,g){if(g){var i=wf.nodes[g.id];var j=new Connector();j.connect(i.output_terminals[g.output_name],c.input_terminals[h]);j.redraw()}})})},enable_auto_save:function(){outer_this=this;$(".toolFormBody").find("input,textarea,select").each(function(){$(this).focus(function(){outer_this.active_form_has_changes=true})})},check_changes_in_active_form:functio
n(){if(this.active_form_has_changes){this.has_changes=true;$(".toolFormBody").find("form").each(function(){$(this).submit()});this.active_form_has_changes=false}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){parent.show_form_for_tool(a.form_html,a)}},layout:function(){this.check_changes_in_active_form();var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){l
evel_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this
.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></d
iv>");var h="<div><img height='16' align='middle' src='../images/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img src='../images/delete_icon.png' />").click(function(b){g.destroy()}).hover(function(){$(this).attr("src","../images/delete_icon_dark.png")},function(){$(this).attr("src","../images/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o){var f=$(this).
offsetParent().offset(),b=o.offsetX-f.left,p=o.offsetY-f.top;$(this).css({left:b,top:p});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.c
ss("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(g){var h=$(this).offset();var f=b.cc.position();c=f.top-h.top;d=f.left-h.left}).bind("drag",function(f){a(f.offsetX+d,f.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_n
odes();b.draw_overview()});this.ov.bind("drag",function(k){var j=b.cc.width(),g=b.cc.height(),f=b.oc.width(),h=b.oc.height(),i=$(this).offsetParent().offset(),m=k.offsetX-i.left,l=k.offsetY-i.top;a(-(m/f*j),-(l/h*g))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g){var i=$(this).offsetParent();var h=i.offset();var f=Math.max(i.width()-(g.offsetX-h.left),i.height()-(g.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(f){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var
g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;$.each(workflow.nodes,function(t,q){var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/packed/jquery.jstore-all.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/packed/jquery.jstore-all.js Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,41 @@
+/*
+ * jStore - Persistent Client-Side Storage
+ *
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ *
+ * Dual licensed under:
+ * MIT: http://www.opensource.org/licenses/mit-license.php
+ * GPLv3: http://www.opensource.org/licenses/gpl-3.0.html
+ */
+(function(){var a=false,b=/xyz/.test(function(){xyz})?/\b_super\b/:/.*/;this.Class=function(){};Class.extend=function(g){var f=this.prototype;a=true;var e=new this();a=false;for(var d in g){e[d]=typeof g[d]=="function"&&typeof f[d]=="function"&&b.test(g[d])?(function(h,i){return function(){var k=this._super;this._super=f[h];var j=i.apply(this,arguments);this._super=k;return j}})(d,g[d]):g[d]}function c(){if(!a&&this.init){this.init.apply(this,arguments)}}c.prototype=e;c.constructor=c;c.extend=arguments.callee;return c}})();
+/*
+ * jStore Delegate Framework
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(a){this.jStoreDelegate=Class.extend({init:function(b){this.parent=b;this.callbacks={}},bind:function(b,c){if(!a.isFunction(c)){return this}if(!this.callbacks[b]){this.callbacks[b]=[]}this.callbacks[b].push(c);return this},trigger:function(){var d=this.parent,c=[].slice.call(arguments),e=c.shift(),b=this.callbacks[e];if(!b){return false}a.each(b,function(){this.apply(d,c)});return this}})})(jQuery);(function(a){a.jStore={};a.extend(a.jStore,{EngineOrder:[],Availability:{},Engines:{},Instances:{},CurrentEngine:null,defaults:{project:null,engine:null,autoload:true,flash:"jStore.Flash.html"},isReady:false,isFlashReady:false,delegate:new jStoreDelegate(a.jStore).bind("jStore-ready",function(b){a.jStore.isReady=true;if(a.jStore.defaults.autoload){b.connect()}}).bind("flash-ready",function(){a.jStore.isFlashReady=true})});a.jStore.ready=function(b){if(a.jStore.isReady){b.apply(a.jStore,[a.jStore.CurrentEngine])}else{a.jStore.delegate.bind("jStore-ready",b)}};a.jStore.fail
=function(b){a.jStore.delegate.bind("jStore-failure",b)};a.jStore.flashReady=function(b){if(a.jStore.isFlashReady){b.apply(a.jStore,[a.jStore.CurrentEngine])}else{a.jStore.delegate.bind("flash-ready",b)}};a.jStore.use=function(d,g,c){g=g||a.jStore.defaults.project||location.hostname.replace(/\./g,"-")||"unknown";var f=a.jStore.Engines[d.toLowerCase()]||null,b=(c?c+".":"")+g+"."+d;if(!f){throw"JSTORE_ENGINE_UNDEFINED"}f=new f(g,b);if(a.jStore.Instances[b]){throw"JSTORE_JRI_CONFLICT"}if(f.isAvailable()){a.jStore.Instances[b]=f;if(!a.jStore.CurrentEngine){a.jStore.CurrentEngine=f}a.jStore.delegate.trigger("jStore-ready",f)}else{if(!f.autoload){throw"JSTORE_ENGINE_UNAVILABLE"}else{f.included(function(){if(this.isAvailable()){a.jStore.Instances[b]=this;if(!a.jStore.CurrentEngine){a.jStore.CurrentEngine=this}a.jStore.delegate.trigger("jStore-ready",this)}else{a.jStore.delegate.trigger("jStore-failure",this)}}).include()}}};a.jStore.setCurrentEngine=function(b){if(!a.jStore.Instanc
es.length){return a.jStore.FindEngine()}if(!b&&a.jStore.Instances.length>=1){a.jStore.delegate.trigger("jStore-ready",a.jStore.Instances[0]);return a.jStore.CurrentEngine=a.jStore.Instances[0]}if(b&&a.jStore.Instances[b]){a.jStore.delegate.trigger("jStore-ready",a.jStore.Instances[b]);return a.jStore.CurrentEngine=a.jStore.Instances[b]}throw"JSTORE_JRI_NO_MATCH"};a.jStore.FindEngine=function(){a.each(a.jStore.EngineOrder,function(b){if(a.jStore.Availability[this]()){a.jStore.use(this,a.jStore.defaults.project,"default");return false}})};a.jStore.store=function(b,c){if(!a.jStore.CurrentEngine){return false}if(!c){return a.jStore.CurrentEngine.get(b)}return a.jStore.CurrentEngine.set(b,c)};a.jStore.remove=function(b){if(!a.jStore.CurrentEngine){return false}return a.jStore.CurrentEngine.rem(b)};a.fn.store=function(c,d){if(!a.jStore.CurrentEngine){return this}var b=a.jStore.store(c,d);return !d?b:this};a.fn.removeStore=function(b){a.jStore.remove(b);return this};a.jStore.load=f
unction(){if(a.jStore.defaults.engine){return a.jStore.use(a.jStore.defaults.engine,a.jStore.defaults.project,"default")}try{a.jStore.FindEngine()}catch(b){}}})(jQuery);(function(a){this.StorageEngine=Class.extend({init:function(c,b){this.project=c;this.jri=b;this.data={};this.limit=-1;this.includes=[];this.delegate=new jStoreDelegate(this).bind("engine-ready",function(){this.isReady=true}).bind("engine-included",function(){this.hasIncluded=true});this.autoload=false;this.isReady=false;this.hasIncluded=false},include:function(){var b=this,d=this.includes.length,c=0;a.each(this.includes,function(){a.ajax({type:"get",url:this,dataType:"script",cache:true,success:function(){c++;if(c==d){b.delegate.trigger("engine-included")}}})})},isAvailable:function(){return false},ready:function(b){if(this.isReady){b.apply(this)}else{this.delegate.bind("engine-ready",b)}return this},included:function(b){if(this.hasIncluded){b.apply(this)}else{this.delegate.bind("engine-included",b)}return th
is},get:function(b){return this.data[b]||null},set:function(b,c){this.data[b]=c;return c},rem:function(b){var c=this.data[b];this.data[b]=null;return c}})})(jQuery);
+/*
+ * jStore DOM Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(c){var b=c.jStore.Availability.session=function(){return !!window.sessionStorage},a=c.jStore.Availability.local=function(){return !!(window.localStorage||window.globalStorage)};this.jStoreDom=StorageEngine.extend({init:function(e,d){this._super(e,d);this.type="DOM";this.limit=5*1024*1024},connect:function(){this.delegate.trigger("engine-ready")},get:function(e){var d=this.db.getItem(e);return d&&d.value?d.value:d},set:function(d,e){this.db.setItem(d,e);return e},rem:function(e){var d=this.get(e);this.db.removeItem(e);return d}});this.jStoreLocal=jStoreDom.extend({connect:function(){this.db=!window.globalStorage?window.localStorage:window.globalStorage[location.hostname];this._super()},isAvailable:a});this.jStoreSession=jStoreDom.extend({connect:function(){this.db=sessionStorage;this._super()},isAvailable:b});c.jStore.Engines.local=jStoreLocal;c.jStore.Engines.session=jStoreSession;c.jStore.EngineOrder[1]="local"})(jQuery);
+/*
+ * jStore Flash Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ * jStore.swf Copyright (c) 2008 Daniel Bulli (http://www.nuff-respec.com)
+ */
+(function(b){var a=b.jStore.Availability.flash=function(){return !!(b.jStore.hasFlash("8.0.0"))};this.jStoreFlash=StorageEngine.extend({init:function(e,d){this._super(e,d);this.type="Flash";var c=this;b.jStore.flashReady(function(){c.flashReady()})},connect:function(){var c="jstore-flash-embed-"+this.project;b(document.body).append('<iframe style="height:1px;width:1px;position:absolute;left:0;top:0;margin-left:-100px;" id="jStoreFlashFrame" src="'+b.jStore.defaults.flash+'"></iframe>')},flashReady:function(f){var c=b("#jStoreFlashFrame")[0];if(c.Document&&b.isFunction(c.Document.jStoreFlash.f_get_cookie)){this.db=c.Document.jStoreFlash}else{if(c.contentWindow&&c.contentWindow.document){var d=c.contentWindow.document;if(b.isFunction(b("object",b(d))[0].f_get_cookie)){this.db=b("object",b(d))[0]}else{if(b.isFunction(b("embed",b(d))[0].f_get_cookie)){this.db=b("embed",b(d))[0]}}}}if(this.db){this.delegate.trigger("engine-ready")}},isAvailable:a,get:function(d){var c=this.db.f_g
et_cookie(d);return c=="null"?null:c},set:function(c,d){this.db.f_set_cookie(c,d);return d},rem:function(c){var d=this.get(c);this.db.f_delete_cookie(c);return d}});b.jStore.Engines.flash=jStoreFlash;b.jStore.EngineOrder[2]="flash";b.jStore.hasFlash=function(c){var e=b.jStore.flashVersion().match(/\d+/g),f=c.match(/\d+/g);for(var d=0;d<3;d++){e[d]=parseInt(e[d]||0);f[d]=parseInt(f[d]||0);if(e[d]<f[d]){return false}if(e[d]>f[d]){return true}}return true};b.jStore.flashVersion=function(){try{try{var c=new ActiveXObject("ShockwaveFlash.ShockwaveFlash.6");try{c.AllowScriptAccess="always"}catch(d){return"6,0,0"}}catch(d){}return new ActiveXObject("ShockwaveFlash.ShockwaveFlash").GetVariable("$version").replace(/\D+/g,",").match(/^,?(.+),?$/)[1]}catch(d){try{if(navigator.mimeTypes["application/x-shockwave-flash"].enabledPlugin){return(navigator.plugins["Shockwave Flash 2.0"]||navigator.plugins["Shockwave Flash"]).description.replace(/\D+/g,",").match(/^,?(.+),?$/)[1]}}catch(d){}}r
eturn"0,0,0"}})(jQuery);function flash_ready(){$.jStore.delegate.trigger("flash-ready")}
+/*
+ * jStore Google Gears Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.gears=function(){return !!(window.google&&window.google.gears)};this.jStoreGears=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="Google Gears";this.includes.push("http://code.google.com/apis/gears/gears_init.js");this.autoload=true},connect:function(){var c=this.db=google.gears.factory.create("beta.database");c.open("jstore-"+this.project);c.execute("CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)");this.updateCache()},updateCache:function(){var c=this.db.execute("SELECT k,v FROM jstore");while(c.isValidRow()){this.data[c.field(0)]=c.field(1);c.next()}c.close();this.delegate.trigger("engine-ready")},isAvailable:a,set:function(d,e){var c=this.db;c.execute("BEGIN");c.execute("INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)",[d,e]);c.execute("COMMIT");return this._super(d,e)},rem:function(d){var c=this.db;c.execute("BEGIN");c.execute("DELETE FROM jstore WHERE k = ?",[d]);c.ex
ecute("COMMIT");return this._super(d)}});b.jStore.Engines.gears=jStoreGears;b.jStore.EngineOrder[3]="gears"})(jQuery);
+/*
+ * jStore HTML5 Specification Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.html5=function(){return !!window.openDatabase};this.jStoreHtml5=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="HTML5";this.limit=1024*200},connect:function(){var c=this.db=openDatabase("jstore-"+this.project,"1.0",this.project,this.limit);if(!c){throw"JSTORE_ENGINE_HTML5_NODB"}c.transaction(function(d){d.executeSql("CREATE TABLE IF NOT EXISTS jstore (k TEXT UNIQUE NOT NULL PRIMARY KEY, v TEXT NOT NULL)")});this.updateCache()},updateCache:function(){var c=this;this.db.transaction(function(d){d.executeSql("SELECT k,v FROM jstore",[],function(f,e){var h=e.rows,g=0,j;for(;g<h.length;++g){j=h.item(g);c.data[j.k]=j.v}c.delegate.trigger("engine-ready")})})},isAvailable:a,set:function(c,d){this.db.transaction(function(e){e.executeSql("INSERT OR REPLACE INTO jstore(k, v) VALUES (?, ?)",[c,d])});return this._super(c,d)},rem:function(c){this.db.transaction(function(d){d.executeSql("DELETE FROM jstore WHERE k = ?",[c])});ret
urn this._super(c)}});b.jStore.Engines.html5=jStoreHtml5;b.jStore.EngineOrder[0]="html5"})(jQuery);
+/**
+ * jStore IE Storage Engine
+ * Copyright (c) 2009 Eric Garside (http://eric.garside.name)
+ */
+(function(b){var a=b.jStore.Availability.ie=function(){return !!window.ActiveXObject};this.jStoreIE=StorageEngine.extend({init:function(d,c){this._super(d,c);this.type="IE";this.limit=64*1024},connect:function(){this.db=b('<div style="display:none;behavior:url(\'#default#userData\')" id="jstore-'+this.project+'"></div>').appendTo(document.body).get(0);this.delegate.trigger("engine-ready")},isAvailable:a,get:function(c){this.db.load(this.project);return this.db.getAttribute(c)},set:function(c,d){this.db.setAttribute(c,d);this.db.save(this.project);return d},rem:function(c){var d=this.get(c);this.db.removeAttribute(c);this.db.save(this.project);return d}});b.jStore.Engines.ie=jStoreIE;b.jStore.EngineOrder[4]="ie"})(jQuery);
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 static/scripts/packed/json2.js
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/static/scripts/packed/json2.js Wed Aug 19 17:51:46 2009 -0400
@@ -0,0 +1,1 @@
+var JSON=JSON||{};(function(){function f(n){return n<10?"0"+n:n}if(typeof Date.prototype.toJSON!=="function"){Date.prototype.toJSON=function(key){return isFinite(this.valueOf())?this.getUTCFullYear()+"-"+f(this.getUTCMonth()+1)+"-"+f(this.getUTCDate())+"T"+f(this.getUTCHours())+":"+f(this.getUTCMinutes())+":"+f(this.getUTCSeconds())+"Z":null};String.prototype.toJSON=Number.prototype.toJSON=Boolean.prototype.toJSON=function(key){return this.valueOf()}}var cx=/[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,escapable=/[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g,gap,indent,meta={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},rep;function quote(string){escapable.lastIndex=0;return escapable.test(string)?'"'+string.replace(escapable,function(a){var c=meta[a];return typeof c==="string"?c:"\\u"+("0000"+a.charCodeAt(0)
.toString(16)).slice(-4)})+'"':'"'+string+'"'}function str(key,holder){var i,k,v,length,mind=gap,partial,value=holder[key];if(value&&typeof value==="object"&&typeof value.toJSON==="function"){value=value.toJSON(key)}if(typeof rep==="function"){value=rep.call(holder,key,value)}switch(typeof value){case"string":return quote(value);case"number":return isFinite(value)?String(value):"null";case"boolean":case"null":return String(value);case"object":if(!value){return"null"}gap+=indent;partial=[];if(Object.prototype.toString.apply(value)==="[object Array]"){length=value.length;for(i=0;i<length;i+=1){partial[i]=str(i,value)||"null"}v=partial.length===0?"[]":gap?"[\n"+gap+partial.join(",\n"+gap)+"\n"+mind+"]":"["+partial.join(",")+"]";gap=mind;return v}if(rep&&typeof rep==="object"){length=rep.length;for(i=0;i<length;i+=1){k=rep[i];if(typeof k==="string"){v=str(k,value);if(v){partial.push(quote(k)+(gap?": ":":")+v)}}}}else{for(k in value){if(Object.hasOwnProperty.call(value,k)){v=str(
k,value);if(v){partial.push(quote(k)+(gap?": ":":")+v)}}}}v=partial.length===0?"{}":gap?"{\n"+gap+partial.join(",\n"+gap)+"\n"+mind+"}":"{"+partial.join(",")+"}";gap=mind;return v}}if(typeof JSON.stringify!=="function"){JSON.stringify=function(value,replacer,space){var i;gap="";indent="";if(typeof space==="number"){for(i=0;i<space;i+=1){indent+=" "}}else{if(typeof space==="string"){indent=space}}rep=replacer;if(replacer&&typeof replacer!=="function"&&(typeof replacer!=="object"||typeof replacer.length!=="number")){throw new Error("JSON.stringify")}return str("",{"":value})}}if(typeof JSON.parse!=="function"){JSON.parse=function(text,reviver){var j;function walk(holder,key){var k,v,value=holder[key];if(value&&typeof value==="object"){for(k in value){if(Object.hasOwnProperty.call(value,k)){v=walk(value,k);if(v!==undefined){value[k]=v}else{delete value[k]}}}}return reviver.call(holder,key,value)}cx.lastIndex=0;if(cx.test(text)){text=text.replace(cx,function(a){return"\\u"+("000
0"+a.charCodeAt(0).toString(16)).slice(-4)})}if(/^[\],:{}\s]*$/.test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,""))){j=eval("("+text+")");return typeof reviver==="function"?walk({"":j},""):j}throw new SyntaxError("JSON.parse")}}}());
\ No newline at end of file
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 templates/root/history.mako
--- a/templates/root/history.mako Wed Aug 19 11:08:58 2009 -0400
+++ b/templates/root/history.mako Wed Aug 19 17:51:46 2009 -0400
@@ -15,219 +15,242 @@
<meta http-equiv="Pragma" content="no-cache">
${h.css( "base", "history" )}
-${h.js( "jquery", "jquery.cookie", "cookie_set" )}
-
+${h.js( "jquery", "json2", "jquery.jstore-all" )}
+
<script type="text/javascript">
- $( document ).ready( function() {
- initShowHide();
- setupHistoryItem( $("div.historyItemWrapper") );
- // Collapse all
- $("#top-links").append( "| " ).append( $("<a href='#'>${_('collapse all')}</a>").click( function() {
- $( "div.historyItemBody:visible" ).each( function() {
- if ( $.browser.mozilla )
- {
- $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+$(function() {
+ // Load jStore for local storage
+ $.extend(jQuery.jStore.defaults, { project: 'galaxy', flash: '/static/jStore.Flash.html' })
+ $.jStore.load(); // Auto-select best storage
+
+ $.jStore.ready(function(engine) {
+ engine.ready(function() {
+ // Init stuff that requires the local storage to be running
+ initShowHide();
+ setupHistoryItem( $("div.historyItemWrapper") );
+ });
+ });
+
+ // Generate 'collapse all' link
+ $("#top-links").append( "| " ).append( $("<a href='#'>${_('collapse all')}</a>").click( function() {
+ $( "div.historyItemBody:visible" ).each( function() {
+ if ( $.browser.mozilla ) {
+ $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+ }
+ $(this).slideUp( "fast" );
+ });
+ $.jStore.remove("history_expand_state");
+ }));
+
+ $("#history-rename").click( function() {
+ var old_name = $("#history-name").text()
+ var t = $("<input type='text' value='" + old_name + "'></input>" );
+ t.blur( function() {
+ $(this).remove();
+ $("#history-name").show();
+ });
+ t.keyup( function( e ) {
+ if ( e.keyCode == 27 ) {
+ // Escape key
+ $(this).trigger( "blur" );
+ } else if ( e.keyCode == 13 ) {
+ // Enter key
+ new_value = this.value;
+ $(this).trigger( "blur" );
+ $.ajax({
+ url: "${h.url_for( controller='history', action='rename_async', id=history.id )}",
+ data: { "_": true, new_name: new_value },
+ error: function() { alert( "Rename failed" ) },
+ success: function() {
+ $("#history-name").text( new_value );
+ }
+ });
+ }
+ });
+ $("#history-name").hide();
+ $("#history-name-area").append( t );
+ t.focus();
+ return false;
+ });
+ // Updater
+ updater({
+ %for i, data in enumerate( reversed( datasets ) ):
+ %if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]:
+ %if i > 0:
+ ,
+ %endif
+ "${data.id}": "${data.state}"
+ %endif
+ %endfor
+ });
+});
+// Functionized so AJAX'd datasets can call them
+// Get shown/hidden state from cookie
+function initShowHide() {
+
+ // Load saved state and show as neccesary
+ try {
+ var stored = $.jStore.store("history_expand_state");
+ if (stored) {
+ var st = JSON.parse(stored);
+ for (var id in st) {
+ $("#" + id + " div.historyItemBody" ).show();
+ }
+ }
+ } catch(err) {
+ // Something was wrong with values in storage, so clear storage
+ $.jStore.remove("history_expand_state");
+ }
+
+ // If Mozilla, hide scrollbars in hidden items since they cause animation bugs
+ if ( $.browser.mozilla ) {
+ $( "div.historyItemBody" ).each( function() {
+ if ( ! $(this).is( ":visible" ) ) $(this).find( "pre.peek" ).css( "overflow", "hidden" );
+ })
+ }
+}
+// Add show/hide link and delete link to a history item
+function setupHistoryItem( query ) {
+ query.each( function() {
+ var id = this.id;
+ var body = $(this).children( "div.historyItemBody" );
+ var peek = body.find( "pre.peek" )
+ $(this).children( ".historyItemTitleBar" ).find( ".historyItemTitle" ).wrap( "<a href='#'></a>" ).click( function() {
+ if ( body.is(":visible") ) {
+ // Hiding stuff here
+ if ( $.browser.mozilla ) { peek.css( "overflow", "hidden" ) }
+ body.slideUp( "fast" );
+
+ // Save setting
+ var stored = $.jStore.store("history_expand_state")
+ var prefs = stored ? JSON.parse(stored) : null
+ if (prefs) {
+ delete prefs[id];
+ $.jStore.store("history_expand_state", JSON.stringify(prefs));
}
- $(this).slideUp( "fast" );
- })
- var state = new CookieSet( "galaxy.history.expand_state" );
- state.removeAll().save();
- return false;
- }));
- $("#history-rename").click( function() {
- var old_name = $("#history-name").text()
- var t = $("<input type='text' value='" + old_name + "'></input>" );
- t.blur( function() {
- $(this).remove();
- $("#history-name").show();
- });
- t.keyup( function( e ) {
- if ( e.keyCode == 27 ) {
- // Escape key
- $(this).trigger( "blur" );
- } else if ( e.keyCode == 13 ) {
- // Enter key
- new_value = this.value;
- $(this).trigger( "blur" );
- $.ajax({
- url: "${h.url_for( controller='history', action='rename_async', id=history.id )}",
- data: { "_": true, new_name: new_value },
- error: function() { alert( "Rename failed" ) },
- success: function() {
- $("#history-name").text( new_value );
- }
- });
- }
- });
- $("#history-name").hide();
- $("#history-name-area").append( t );
- t.focus();
- return false;
- });
- // Updater
- updater({
- %for i, data in enumerate( reversed( datasets ) ):
- %if data.visible and data.state not in [ "deleted", "empty", "error", "ok" ]:
- %if i > 0:
- ,
- %endif
- "${data.id}": "${data.state}"
- %endif
- %endfor
+ }
+ else {
+ // Showing stuff here
+ body.slideDown( "fast", function() {
+ if ( $.browser.mozilla ) { peek.css( "overflow", "auto" ); }
+ });
+
+ // Save setting
+ var stored = $.jStore.store("history_expand_state")
+ var prefs = stored ? JSON.parse(stored) : new Object;
+ prefs[id] = true;
+ $.jStore.store("history_expand_state", JSON.stringify(prefs));
+ }
+ return false;
});
- })
- //' Functionized so AJAX'd datasets can call them
- // Get shown/hidden state from cookie
- function initShowHide() {
- // $( "div.historyItemBody" ).hide();
- // Load saved state and show as neccesary
- var state = new CookieSet( "galaxy.history.expand_state" );
- for ( id in state.store ) {
- if ( id ) {
- $( "#" + id + " div.historyItemBody" ).show();
- }
- }
- // If Mozilla, hide scrollbars in hidden items since they cause animation bugs
- if ( $.browser.mozilla ) {
- $( "div.historyItemBody" ).each( function() {
- if ( ! $(this).is( ":visible" ) ) $(this).find( "pre.peek" ).css( "overflow", "hidden" );
- })
- }
- delete state;
- }
- // Add show/hide link and delete link to a history item
- function setupHistoryItem( query ) {
- query.each( function() {
- var id = this.id;
- var body = $(this).children( "div.historyItemBody" );
- var peek = body.find( "pre.peek" )
- $(this).children( ".historyItemTitleBar" ).find( ".historyItemTitle" ).wrap( "<a href='#'></a>" ).click( function() {
- if ( body.is(":visible") ) {
- if ( $.browser.mozilla ) { peek.css( "overflow", "hidden" ) }
- body.slideUp( "fast" );
- ## other instances of this could be editing the cookie, refetch
- var state = new CookieSet( "galaxy.history.expand_state" );
- state.remove( id ); state.save();
- delete state;
- }
- else {
- body.slideDown( "fast", function() {
- if ( $.browser.mozilla ) { peek.css( "overflow", "auto" ); }
- });
- var state = new CookieSet( "galaxy.history.expand_state" );
- state.add( id ); state.save();
- delete state;
- }
+ // Delete link
+ $(this).find( "div.historyItemButtons > .delete" ).each( function() {
+ var data_id = this.id.split( "-" )[1];
+ $(this).click( function() {
+ $( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
+ $.ajax({
+ url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { alert( "Delete failed" ) },
+ success: function() {
+ %if show_deleted:
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ %else:
+ $( "#historyItem-" + data_id ).fadeOut( "fast", function() {
+ $( "#historyItemContainer-" + data_id ).remove();
+ if ( $( "div.historyItemContainer" ).length < 1 ) {
+ $( "#emptyHistoryMessage" ).show();
+ }
+ });
+ %endif
+ }
+ });
return false;
});
- // Delete link
- $(this).find( "div.historyItemButtons > .delete" ).each( function() {
- var data_id = this.id.split( "-" )[1];
- $(this).click( function() {
- $( '#historyItem-' + data_id + "> div.historyItemTitleBar" ).addClass( "spinner" );
- $.ajax({
- url: "${h.url_for( action='delete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Delete failed" ) },
- success: function() {
- %if show_deleted:
- var to_update = {};
- to_update[data_id] = "none";
- updater( to_update );
- %else:
- $( "#historyItem-" + data_id ).fadeOut( "fast", function() {
- $( "#historyItemContainer-" + data_id ).remove();
- if ( $( "div.historyItemContainer" ).length < 1 ) {
- $( "#emptyHistoryMessage" ).show();
- }
- });
- %endif
- }
- });
- return false;
+ });
+ // Undelete link
+ $(this).find( "a.historyItemUndelete" ).each( function() {
+ var data_id = this.id.split( "-" )[1];
+ $(this).click( function() {
+ $( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
+ $.ajax({
+ url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
+ error: function() { alert( "Undelete failed" ) },
+ success: function() {
+ var to_update = {};
+ to_update[data_id] = "none";
+ updater( to_update );
+ }
});
- });
- // Undelete link
- $(this).find( "a.historyItemUndelete" ).each( function() {
- var data_id = this.id.split( "-" )[1];
- $(this).click( function() {
- $( '#historyItem-' + data_id + " > div.historyItemTitleBar" ).addClass( "spinner" );
- $.ajax({
- url: "${h.url_for( controller='dataset', action='undelete_async', id='XXX' )}".replace( 'XXX', data_id ),
- error: function() { alert( "Undelete failed" ) },
- success: function() {
- var to_update = {};
- to_update[data_id] = "none";
- updater( to_update );
- }
- });
- return false;
- });
+ return false;
});
});
- };
- // Looks for changes in dataset state using an async request. Keeps
- // calling itself (via setTimeout) until all datasets are in a terminal
- // state.
- var updater = function ( tracked_datasets ) {
- // Check if there are any items left to track
- var empty = true;
- for ( i in tracked_datasets ) {
- empty = false;
- break;
+ });
+};
+// Looks for changes in dataset state using an async request. Keeps
+// calling itself (via setTimeout) until all datasets are in a terminal
+// state.
+var updater = function ( tracked_datasets ) {
+ // Check if there are any items left to track
+ var empty = true;
+ for ( i in tracked_datasets ) {
+ empty = false;
+ break;
+ }
+ if ( ! empty ) {
+ // console.log( "Updater running in 3 seconds" );
+ setTimeout( function() { updater_callback( tracked_datasets ) }, 3000 );
+ } else {
+ // console.log( "Updater finished" );
+ }
+};
+var updater_callback = function ( tracked_datasets ) {
+ // Build request data
+ var ids = []
+ var states = []
+ var force_history_refresh = false
+ $.each( tracked_datasets, function ( id, state ) {
+ ids.push( id );
+ states.push( state );
+ });
+ // Make ajax call
+ $.ajax( {
+ type: "POST",
+ url: "${h.url_for( controller='root', action='history_item_updates' )}",
+ dataType: "json",
+ data: { ids: ids.join( "," ), states: states.join( "," ) },
+ success : function ( data ) {
+ $.each( data, function( id, val ) {
+ // Replace HTML
+ var container = $("#historyItemContainer-" + id);
+ container.html( val.html );
+ setupHistoryItem( container.children( ".historyItemWrapper" ) );
+ initShowHide();
+ // If new state was terminal, stop tracking
+ if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
+ if ( val.force_history_refresh ){
+ force_history_refresh = true;
+ }
+ delete tracked_datasets[ parseInt(id) ];
+ } else {
+ tracked_datasets[ parseInt(id) ] = val.state;
+ }
+ });
+ if ( force_history_refresh ) {
+ parent.frames.galaxy_history.location.reload();
+ }
+ else {
+ // Keep going (if there are still any items to track)
+ updater( tracked_datasets );
+ }
+ },
+ error: function() {
+ // Just retry, like the old method, should try to be smarter
+ updater( tracked_datasets );
}
- if ( ! empty ) {
- // console.log( "Updater running in 3 seconds" );
- setTimeout( function() { updater_callback( tracked_datasets ) }, 3000 );
- } else {
- // console.log( "Updater finished" );
- }
- };
- var updater_callback = function ( tracked_datasets ) {
- // Build request data
- var ids = []
- var states = []
- var force_history_refresh = false
- $.each( tracked_datasets, function ( id, state ) {
- ids.push( id );
- states.push( state );
- });
- // Make ajax call
- $.ajax( {
- type: "POST",
- url: "${h.url_for( controller='root', action='history_item_updates' )}",
- dataType: "json",
- data: { ids: ids.join( "," ), states: states.join( "," ) },
- success : function ( data ) {
- $.each( data, function( id, val ) {
- // Replace HTML
- var container = $("#historyItemContainer-" + id);
- container.html( val.html );
- setupHistoryItem( container.children( ".historyItemWrapper" ) );
- initShowHide();
- // If new state was terminal, stop tracking
- if (( val.state == "ok") || ( val.state == "error") || ( val.state == "empty") || ( val.state == "deleted" ) || ( val.state == "discarded" )) {
- if ( val.force_history_refresh ){
- force_history_refresh = true;
- }
- delete tracked_datasets[ parseInt(id) ];
- } else {
- tracked_datasets[ parseInt(id) ] = val.state;
- }
- });
- if ( force_history_refresh ) {
- parent.frames.galaxy_history.location.reload();
- }
- else {
- // Keep going (if there are still any items to track)
- updater( tracked_datasets );
- }
- },
- error: function() {
- // Just retry, like the old method, should try to be smarter
- updater( tracked_datasets );
- }
- });
- };
+ });
+};
</script>
<style>
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Wed Aug 19 11:08:58 2009 -0400
+++ b/templates/workflow/editor.mako Wed Aug 19 17:51:46 2009 -0400
@@ -31,6 +31,7 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.hover.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.form.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.json.js')}"> </script>
+ <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.jstore-all.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.base.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.workflow_editor.canvas.js')}"> </script>
@@ -47,6 +48,7 @@
canvas_manager = null;
// jQuery onReady
$( function() {
+
if ( window.lt_ie_7 ) {
show_modal(
"Browser not supported",
@@ -54,8 +56,14 @@
);
return;
}
+
+ // Load jStore for local storage
+ $.extend(jQuery.jStore.defaults, { project: 'galaxy', flash: '/static/jStore.Flash.html' })
+ $.jStore.load(); // Auto-select best storage
+
// Canvas overview management
canvas_manager = new CanvasManager( $("#canvas-viewport"), $("#overview") );
+
// Initialize workflow state
reset();
// Load the datatype info
@@ -121,17 +129,46 @@
canvas_manager.draw_overview();
});
- /* Lets the viewport be toggled visible and invisible, adjusting the arrows accordingly */
+ $.jStore.ready(function(engine) {
+ engine.ready(function() {
+ // On load, set the size to the pref stored in local storage if it exists
+ overview_size = $.jStore.store("overview-size");
+ if (overview_size) {
+ $("#overview-border").css( {
+ width: overview_size,
+ height: overview_size
+ });
+ }
+
+ // Show viewport on load unless pref says it's off
+ $.jStore.store("overview-off") ? hide_overview() : show_overview()
+ });
+ });
+
+ // Stores the size of the overview into local storage when it's resized
+ $("#overview-border").bind( "dragend", function( e ) {
+ var op = $(this).offsetParent();
+ var opo = op.offset();
+ var new_size = Math.max( op.width() - ( e.offsetX - opo.left ),
+ op.height() - ( e.offsetY - opo.top ) );
+ $.jStore.store("overview-size", new_size + "px");
+ });
+
+ function show_overview() {
+ $.jStore.remove("overview-off");
+ $("#overview-border").css("right", "0px");
+ $("#close-viewport").css("background-position", "0px 0px");
+ }
+
+ function hide_overview() {
+ $.jStore.store("overview-off", true);
+ $("#overview-border").css("right", "20000px");
+ $("#close-viewport").css("background-position", "12px 0px");
+ }
+
+ // Lets the overview be toggled visible and invisible, adjusting the arrows accordingly
$("#close-viewport").click( function() {
- if ( $("#overview-border").css("right") == "0px" ) {
- $("#overview-border").css("right", "20000px");
- $("#close-viewport").css("background-position", "12px 0px");
-
- } else {
- $("#overview-border").css("right", "0px");
- $("#close-viewport").css("background-position", "0px 0px");
- }
-
+ $("#overview-border").css("right") == "0px" ? hide_overview() : show_overview();
});
// Unload handler
@@ -245,10 +282,6 @@
beforeSubmit: function( data ) {
data.push( { name: 'tool_state', value: node.tool_state } );
data.push( { name: '_', value: "true" } );
- $("#tool-form-save-button").each( function() {
- this.value = "Saving...";
- this.disabled = true;
- });
}
}).each( function() {
form = this;
@@ -275,6 +308,7 @@
var close_editor = function() {
<% next_url = h.url_for( controller='workflow', action='index' ) %>
+ workflow.check_changes_in_active_form();
if ( workflow && workflow.has_changes ) {
do_close = function() {
window.onbeforeunload = undefined;
@@ -297,39 +331,46 @@
var save_current_workflow = function ( success_callback ) {
show_modal( "Saving workflow", "progress" );
- $.ajax( {
- url: "${h.url_for( action='save_workflow' )}",
- type: "POST",
- data: {
- id: "${trans.security.encode_id( workflow_id )}",
- workflow_data: $.toJSON( workflow.to_simple() ),
- "_": "true"
- },
- dataType: 'json',
- success: function( data ) {
- var body = $("<div></div>").text( data.message );
- if ( data.errors ) {
- body.addClass( "warningmark" )
- var errlist = $( "<ul/>" );
- $.each( data.errors, function( i, v ) {
- $("<li></li>").text( v ).appendTo( errlist );
- });
- body.append( errlist );
- } else {
- body.addClass( "donemark" );
+ workflow.check_changes_in_active_form();
+ // We bind to ajaxStop because of auto-saving, since the form submission ajax
+ // call needs to be completed so that the new data is saved
+ $(document).bind('ajaxStop.save_workflow', function() {
+ $(document).unbind('ajaxStop.save_workflow');
+ $.ajax( {
+ url: "${h.url_for( action='save_workflow' )}",
+ type: "POST",
+ data: {
+ id: "${trans.security.encode_id( workflow_id )}",
+ workflow_data: function() { return $.toJSON( workflow.to_simple() ) },
+ "_": "true"
+ },
+ dataType: 'json',
+ success: function( data ) {
+ var body = $("<div></div>").text( data.message );
+ if ( data.errors ) {
+ body.addClass( "warningmark" )
+ var errlist = $( "<ul/>" );
+ $.each( data.errors, function( i, v ) {
+ $("<li></li>").text( v ).appendTo( errlist );
+ });
+ body.append( errlist );
+ } else {
+ body.addClass( "donemark" );
+ }
+ workflow.name = data.name;
+ workflow.has_changes = false;
+ workflow.stored = true;
+ if ( success_callback ) {
+ success_callback();
+ }
+ if ( data.errors ) {
+ show_modal( "Saving workflow", body, { "Ok" : hide_modal } );
+ } else {
+ hide_modal();
+ }
}
- workflow.name = data.name;
- workflow.has_changes = false;
- workflow.stored = true;
- if ( success_callback ) {
- success_callback();
- }
- if ( data.errors ) {
- show_modal( "Saving workflow", body, { "Ok" : hide_modal } );
- } else {
- hide_modal();
- }
- }
+ });
+ $(document).unbind('ajaxStop.save_workflow'); // IE7 needs it here
});
}
@@ -642,7 +683,7 @@
<div id="canvas-viewport" style="width: 100%; height: 100%; position: absolute; overflow: hidden; background: #EEEEEE; background: white url(${h.url_for('/static/images/light_gray_grid.gif')}) repeat;">
<div id="canvas-container" style="position: absolute; width: 100%; height: 100%;"></div>
</div>
- <div id="overview-border" style="position: absolute; width: 150px; height: 150px; right: 0px; bottom: 0px; border-top: solid gray 1px; border-left: solid grey 1px; padding: 7px 0 0 7px; background: #EEEEEE no-repeat url(${h.url_for('/static/images/resizable.png')}); z-index: 20000; overflow: hidden; max-width: 300px; max-height: 300px; min-width: 50px; min-height: 50px">
+ <div id="overview-border" style="position: absolute; width: 150px; height: 150px; right: 20000px; bottom: 0px; border-top: solid gray 1px; border-left: solid grey 1px; padding: 7px 0 0 7px; background: #EEEEEE no-repeat url(${h.url_for('/static/images/resizable.png')}); z-index: 20000; overflow: hidden; max-width: 300px; max-height: 300px; min-width: 50px; min-height: 50px">
<div style="position: relative; overflow: hidden; width: 100%; height: 100%; border-top: solid gray 1px; border-left: solid grey 1px;">
<div id="overview" style="position: absolute;">
<canvas width="0" height="0" style="background: white; width: 100%; height: 100%;" id="overview-canvas"></canvas>
@@ -650,7 +691,7 @@
</div>
</div>
</div>
- <div id="close-viewport" style="border-left: 1px solid #999; border-top: 1px solid #999; background: #ddd url(${h.url_for('/static/images/overview_arrows.png')}); position: absolute; right: 0px; bottom: 0px; width: 12px; height: 12px; z-index: 25000;"></div>
+ <div id="close-viewport" style="border-left: 1px solid #999; border-top: 1px solid #999; background: #ddd url(${h.url_for('/static/images/overview_arrows.png')}) 12px 0px; position: absolute; right: 0px; bottom: 0px; width: 12px; height: 12px; z-index: 25000;"></div>
</div>
</%def>
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 templates/workflow/editor_generic_form.mako
--- a/templates/workflow/editor_generic_form.mako Wed Aug 19 11:08:58 2009 -0400
+++ b/templates/workflow/editor_generic_form.mako Wed Aug 19 17:51:46 2009 -0400
@@ -15,7 +15,7 @@
${input.label}:
</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <input type="${input.type}" name="${input.name}" value="${input.value}" size="40">
+ <input type="${input.type}" name="${input.name | h}" value="${input.value | h}" size="30">
</div>
%if input.error:
<div style="float: left; color: red; font-weight: bold; padding-top: 1px; padding-bottom: 3px;">
@@ -33,11 +33,14 @@
</div>
%endfor
- <div class="form-row"><input type="submit" value="${form.submit_text}"></div>
%else:
<div class="form-row"><i>No options</i></div>
%endif
</table>
</form>
</div>
-</div>
\ No newline at end of file
+</div>
+
+<script type="text/javascript">
+ workflow.enable_auto_save();
+</script>
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 templates/workflow/editor_tool_form.mako
--- a/templates/workflow/editor_tool_form.mako Wed Aug 19 11:08:58 2009 -0400
+++ b/templates/workflow/editor_tool_form.mako Wed Aug 19 17:51:46 2009 -0400
@@ -93,7 +93,7 @@
<div style="clear: both"></div>
</div>
</%def>
-
+
<div class="toolForm">
<div class="toolFormTitle">Tool: ${tool.name}</div>
<div class="toolFormBody">
@@ -105,9 +105,11 @@
%endif
${do_inputs( inputs, values, errors, "" )}
%endfor
- <div class="form-row">
- <input type="submit" id="tool-form-save-button" value="Save"></input>
- </div>
</form>
</div>
-</div>
\ No newline at end of file
+</div>
+
+<script type="text/javascript">
+ workflow.enable_auto_save();
+</script>
+
diff -r 7d48dc7e60b4 -r 3353b15d0fb5 tools/new_operations/cluster.xml
--- a/tools/new_operations/cluster.xml Wed Aug 19 11:08:58 2009 -0400
+++ b/tools/new_operations/cluster.xml Wed Aug 19 17:51:46 2009 -0400
@@ -74,7 +74,7 @@
**Syntax**
- **Maximum distance** is greatest distance in base pairs allowed between intervals that will be considered "clustered". **Negative** values for distance are allowed, and are useful for clustering intervals that overlap.
-- **Minimum intervals per cluster** allow a threshold to be set on the minimum number of intervals to be considered a cluster. Any area with less than this minimum will not be included in the ouput.
+- **Minimum intervals per cluster** allow a threshold to be set on the minimum number of intervals to be considered a cluster. Any area with less than this minimum will not be included in the output.
- **Merge clusters into single intervals** outputs intervals that span the entire cluster.
- **Find cluster intervals; preserve comments and order** filters out non-cluster intervals while maintaining the original ordering and comments in the file.
- **Find cluster intervals; output grouped by clusters** filters out non-cluster intervals, but outputs the cluster intervals so that they are grouped together. Comments and original ordering in the file are lost.
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f3d25adcace6
changeset: 2584:f3d25adcace6
user: rc
date: Wed Aug 12 10:31:33 2009 -0400
description:
Improved the form rendering method
- now supports AddressField
4 file(s) affected in this change:
lib/galaxy/web/controllers/admin.py
lib/galaxy/web/controllers/forms.py
lib/galaxy/web/controllers/library.py
lib/galaxy/web/controllers/requests.py
diffs (225 lines):
diff -r 2630316ff75e -r f3d25adcace6 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Tue Aug 11 16:56:47 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Wed Aug 12 10:31:33 2009 -0400
@@ -759,11 +759,7 @@
# See if we have any field contents
info = info_association.info
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
@@ -960,11 +956,7 @@
# See if we have any field contents
info = info_association.info
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
@@ -1216,11 +1208,7 @@
info = info_association.info
log.debug("####In library_dataset_dataset_association, info.content: %s" % str( info.content))
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
diff -r 2630316ff75e -r f3d25adcace6 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Tue Aug 11 16:56:47 2009 -0400
+++ b/lib/galaxy/web/controllers/forms.py Wed Aug 12 10:31:33 2009 -0400
@@ -419,22 +419,34 @@
else:
fdc_list = trans.app.model.FormDefinitionCurrent.query().all()
return [ fdc.latest_form for fdc in fdc_list ]
-def get_form_widgets( trans, form, contents={} ):
+
+
+def get_form_widgets( trans, form, contents=[], **kwd ):
'''
Return the list of widgets that comprise a form definition,
including field contents if any.
'''
+ params = util.Params( kwd )
widgets = []
for index, field in enumerate( form.fields ):
field_name = 'field_%i' % index
- if field_name in contents:
- value = contents[ field_name ]
- elif field[ 'type' ] == 'CheckboxField':
- # Since we do not have contents, set checkbox value to False
- value = False
+ # determine the value of the field
+ if field_name in kwd:
+ # the user had already filled out this field and the same form is re-rendered
+ # due to some reason like required fields have been left out.
+ value = util.restore_text( params.get( field_name, '' ) )
+ elif contents:
+ # this field has a saved value
+ value = str(contents[ index ])
else:
- # Set other field types to empty string
- value = ''
+ # if none of the above, then leave the field empty
+ if field[ 'type' ] == 'CheckboxField':
+ # Since we do not have contents, set checkbox value to False
+ value = False
+ else:
+ # Set other field types to empty string
+ value = ''
+ # create the field widget
field_widget = eval( field[ 'type' ] )( field_name )
if field[ 'type' ] == 'TextField':
field_widget.set_size( 40 )
@@ -442,6 +454,10 @@
elif field[ 'type' ] == 'TextArea':
field_widget.set_size( 3, 40 )
field_widget.value = value
+ elif field['type'] == 'AddressField':
+ field_widget.user = trans.user
+ field_widget.value = value
+ field_widget.params = params
elif field[ 'type' ] == 'SelectField':
for option in field[ 'selectlist' ]:
if option == value:
diff -r 2630316ff75e -r f3d25adcace6 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Tue Aug 11 16:56:47 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Wed Aug 12 10:31:33 2009 -0400
@@ -137,11 +137,7 @@
# See if we have any field contents
info = library.info_association[0].info
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
@@ -475,11 +471,7 @@
# See if we have any field contents
info = info_association.info
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
@@ -996,11 +988,7 @@
# See if we have any field contents
info = info_association.info
if info:
- field_contents = {}
- for index, value in enumerate( info.content ):
- key = 'field_%i' % index
- field_contents[ key ] = value
- widgets = get_form_widgets( trans, template, field_contents )
+ widgets = get_form_widgets( trans, template, info.content )
else:
widgets = get_form_widgets( trans, template )
else:
diff -r 2630316ff75e -r f3d25adcace6 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Tue Aug 11 16:56:47 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Wed Aug 12 10:31:33 2009 -0400
@@ -8,6 +8,7 @@
from galaxy.web.form_builder import *
from datetime import datetime, timedelta
from cgi import escape, FieldStorage
+from galaxy.web.controllers.forms import get_form_widgets
log = logging.getLogger( __name__ )
@@ -464,9 +465,7 @@
libraries = self.get_authorized_libs(trans)
libui = self.__library_ui(libraries, **kwd)
widgets = widgets + libui
- widgets = self.__create_form(trans, request_type.request_form_id, widgets,
- form_values, **kwd)
- title = 'Add a new request of type: %s' % request_type.name
+ widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], **kwd)
return trans.fill_template( '/requests/new_request.mako',
select_request_type=select_request_type,
request_type=request_type,
@@ -507,51 +506,6 @@
return [widget, new_lib]
else:
return [widget]
-
- def __create_form(self, trans, form_id, widgets=[], form_values=None, **kwd):
- # TODO: RC - replace this method by importing as follows:
- # from galaxy.web.controllers.forms import get_form_widgets
- params = util.Params( kwd )
- form = trans.app.model.FormDefinition.get(form_id)
- # form fields
- for index, field in enumerate(form.fields):
- # value of the field
- if field['type'] == 'CheckboxField':
- value = util.restore_text( params.get( 'field_%i' % index, False ) )
- else:
- value = util.restore_text( params.get( 'field_%i' % index, '' ) )
- if not value:
- if form_values:
- value = str(form_values.content[index])
- # create the field
- fw = eval(field['type'])('field_%i' % index)
- if field['type'] == 'TextField':
- fw.set_size(40)
- fw.value = value
- elif field['type'] == 'TextArea':
- fw.set_size(3, 40)
- fw.value = value
- elif field['type'] == 'AddressField':
- fw.user = trans.user
- fw.value = value
- fw.params = params
- elif field['type'] == 'SelectField':
- for option in field['selectlist']:
- if option == value:
- fw.add_option(option, option, selected=True)
- else:
- fw.add_option(option, option)
- elif field['type'] == 'CheckboxField':
- fw.checked = value
- # require/optional
- if field['required'] == 'required':
- req = 'Required'
- else:
- req = 'Optional'
- widgets.append(dict(label=field['label'],
- widget=fw,
- helptext=field['helptext']+' ('+req+')'))
- return widgets
def __validate(self, trans, request):
'''
Validates the request entered by the user
@@ -706,8 +660,7 @@
libraries = self.get_authorized_libs(trans)
libui = self.__library_ui(libraries, request, **kwd)
widgets = widgets + libui
- widgets = self.__create_form(trans, request.type.request_form_id, widgets,
- request.values, **kwd)
+ widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, **kwd)
return trans.fill_template( '/requests/edit_request.mako',
select_request_type=select_request_type,
request_type=request.type,
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/f6e0863862ef
changeset: 2583:f6e0863862ef
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Aug 20 10:49:54 2009 -0400
description:
Real Job(tm) upload support
12 file(s) affected in this change:
lib/galaxy/jobs/__init__.py
lib/galaxy/tools/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/tools/parameters/grouping.py
lib/galaxy/util/__init__.py
lib/galaxy/web/controllers/tool_runner.py
lib/galaxy/web/framework/base.py
templates/base_panels.mako
test/base/twilltestcase.py
tools/data_source/upload.py
tools/data_source/upload.xml
diffs (1505 lines):
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 20 10:49:54 2009 -0400
@@ -6,6 +6,8 @@
from galaxy.datatypes.tabular import *
from galaxy.datatypes.interval import *
from galaxy.datatypes import metadata
+from galaxy.util.json import from_json_string
+from galaxy.util.expressions import ExpressionContext
import pkg_resources
pkg_resources.require( "PasteDeploy" )
@@ -18,6 +20,12 @@
# States for running a job. These are NOT the same as data states
JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_OK, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ok', 'ready', 'deleted', 'admin_deleted'
+
+# This file, if created in the job's working directory, will be used for
+# setting advanced metadata properties on the job and its associated outputs.
+# This interface is currently experimental, is only used by the upload tool,
+# and should eventually become API'd
+TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
class JobManager( object ):
"""
@@ -320,6 +328,7 @@
self.working_directory = \
os.path.join( self.app.config.job_working_directory, str( self.job_id ) )
self.output_paths = None
+ self.tool_provided_job_metadata = None
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job ) #wrapper holding the info required to restore and clean up from files used for setting metadata externally
def get_param_dict( self ):
@@ -422,6 +431,8 @@
dataset.blurb = 'tool error'
dataset.info = message
dataset.set_size()
+ if dataset.ext == 'auto':
+ dataset.extension = 'data'
dataset.flush()
job.state = model.Job.states.ERROR
job.command_line = self.command_line
@@ -486,16 +497,28 @@
except ( IOError, OSError ):
self.fail( "Job %s's output dataset(s) could not be read" % job.id )
return
+ job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
for dataset_assoc in job.output_datasets:
+ context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
+ if context.get( 'path', None ):
+ # The tool can set an alternate output path for the dataset.
+ try:
+ shutil.move( context['path'], dataset.file_name )
+ except ( IOError, OSError ):
+ if not context['stderr']:
+ context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = stdout + stderr
+ dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
- if stderr:
+ if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
+ # If the tool was expected to set the extension, attempt to retrieve it
+ if dataset.ext == 'auto':
+ dataset.extension = context.get( 'ext', 'data' )
#if a dataset was copied, it won't appear in our dictionary:
#either use the metadata from originating output dataset, or call set_meta on the copies
#it would be quicker to just copy the metadata from the originating output dataset,
@@ -510,18 +533,39 @@
#the metadata that was stored to disk for use via the external process,
#and the changes made by the user will be lost, without warning or notice
dataset.metadata.from_JSON_dict( self.external_output_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
- if self.tool.is_multi_byte:
- dataset.set_multi_byte_peek()
- else:
- dataset.set_peek()
+ try:
+ assert context.get( 'line_count', None ) is not None
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek( line_count=context['line_count'] )
+ else:
+ dataset.set_peek( line_count=context['line_count'] )
+ except:
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek()
+ else:
+ dataset.set_peek()
+ try:
+ # set the name if provided by the tool
+ dataset.name = context['name']
+ except:
+ pass
else:
dataset.blurb = "empty"
+ if dataset.ext == 'auto':
+ dataset.extension = 'txt'
dataset.flush()
- if stderr:
+ if context['stderr']:
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
- dataset_assoc.dataset.dataset.flush()
+ # If any of the rest of the finish method below raises an
+ # exception, the fail method will run and set the datasets to
+ # ERROR. The user will never see that the datasets are in error if
+ # they were flushed as OK here, since upon doing so, the history
+ # panel stops checking for updates. So allow the
+ # mapping.context.current.flush() at the bottom of this method set
+ # the state instead.
+ #dataset_assoc.dataset.dataset.flush()
# Save stdout and stderr
if len( stdout ) > 32768:
@@ -591,7 +635,8 @@
return self.output_paths
class DatasetPath( object ):
- def __init__( self, real_path, false_path = None ):
+ def __init__( self, dataset_id, real_path, false_path = None ):
+ self.dataset_id = dataset_id
self.real_path = real_path
self.false_path = false_path
def __str__( self ):
@@ -605,10 +650,55 @@
self.output_paths = []
for name, data in [ ( da.name, da.dataset.dataset ) for da in job.output_datasets ]:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % data.id ) )
- self.output_paths.append( DatasetPath( data.file_name, false_path ) )
+ self.output_paths.append( DatasetPath( data.id, data.file_name, false_path ) )
else:
- self.output_paths = [ DatasetPath( da.dataset.file_name ) for da in job.output_datasets ]
+ self.output_paths = [ DatasetPath( da.dataset.dataset.id, da.dataset.file_name ) for da in job.output_datasets ]
return self.output_paths
+
+ def get_output_file_id( self, file ):
+ if self.output_paths is None:
+ self.get_output_fnames()
+ for dp in self.output_paths:
+ if self.app.config.outputs_to_working_directory and os.path.basename( dp.false_path ) == file:
+ return dp.dataset_id
+ elif os.path.basename( dp.real_path ) == file:
+ return dp.dataset_id
+ return None
+
+ def get_tool_provided_job_metadata( self ):
+ if self.tool_provided_job_metadata is not None:
+ return self.tool_provided_job_metadata
+
+ # Look for JSONified job metadata
+ self.tool_provided_job_metadata = []
+ meta_file = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+ if os.path.exists( meta_file ):
+ for line in open( meta_file, 'r' ):
+ try:
+ line = from_json_string( line )
+ assert 'type' in line
+ except:
+ log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id )
+ log.debug( 'Offending data was: %s' % line )
+ continue
+ # Set the dataset id if it's a dataset entry and isn't set.
+ # This isn't insecure. We loop the job's output datasets in
+ # the finish method, so if a tool writes out metadata for a
+ # dataset id that it doesn't own, it'll just be ignored.
+ if line['type'] == 'dataset' and 'dataset_id' not in line:
+ try:
+ line['dataset_id'] = self.get_output_file_id( line['dataset'] )
+ except KeyError:
+ log.warning( '(%s) Tool provided job dataset-specific metadata without specifying a dataset' % self.job_id )
+ continue
+ self.tool_provided_job_metadata.append( line )
+ return self.tool_provided_job_metadata
+
+ def get_dataset_finish_context( self, job_context, dataset ):
+ for meta in self.get_tool_provided_job_metadata():
+ if meta['type'] == 'dataset' and meta['dataset_id'] == dataset.id:
+ return ExpressionContext( meta, job_context )
+ return job_context
def check_output_sizes( self ):
sizes = []
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 20 10:49:54 2009 -0400
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil
+import logging, os, string, sys, tempfile, glob, shutil, types
import simplejson
import binascii
from UserDict import DictMixin
@@ -415,6 +415,7 @@
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = util.xml_text( data_elem, "label" )
+ output.count = int( data_elem.get("count", 1) )
output.filters = data_elem.findall( 'filter' )
self.outputs[ output.name ] = output
# Any extra generated config files for the tool
@@ -816,7 +817,11 @@
# If we've completed the last page we can execute the tool
elif state.page == self.last_page:
out_data = self.execute( trans, incoming=params )
- return 'tool_executed.mako', dict( out_data=out_data )
+ try:
+ assert type( out_data ) is types.DictType
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ return 'message.mako', dict( message_type='error', message=out_data, refresh_frames=[] )
# Otherwise move on to the next page
else:
state.page += 1
@@ -824,15 +829,26 @@
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
else:
- if filter( lambda x: isinstance( x, FieldStorage ) and x.file, state.inputs.values() ):
+ try:
+ self.find_fieldstorage( state.inputs )
+ except InterruptedUpload:
# If inputs contain a file it won't persist. Most likely this
# is an interrupted upload. We should probably find a more
# standard method of determining an incomplete POST.
return self.handle_interrupted( trans, state.inputs )
- else:
- # Just a refresh, render the form with updated state and errors.
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ except:
+ pass
+ # Just a refresh, render the form with updated state and errors.
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def find_fieldstorage( self, x ):
+ if isinstance( x, FieldStorage ):
+ raise InterruptedUpload( None )
+ elif type( x ) is types.DictType:
+ [ self.find_fieldstorage( y ) for y in x.values() ]
+ elif type( x ) is types.ListType:
+ [ self.find_fieldstorage( y ) for y in x ]
+
def handle_interrupted( self, trans, inputs ):
"""
Upon handling inputs, if it appears that we have received an incomplete
@@ -1704,3 +1720,6 @@
return value
else:
return incoming.get( key, default )
+
+class InterruptedUpload( Exception ):
+ pass
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 10:49:54 2009 -0400
@@ -1,8 +1,10 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from cgi import FieldStorage
from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
+from galaxy.util.json import to_json_string
import sys, traceback
@@ -11,14 +13,28 @@
class UploadToolAction( ToolAction ):
# Action for uploading files
- def __init__( self ):
- self.empty = False
- self.line_count = None
- def remove_tempfile( self, filename ):
- try:
- os.unlink( filename )
- except:
- log.exception( 'failure removing temporary file: %s' % filename )
+ def persist_uploads( self, incoming ):
+ if 'files' in incoming:
+ new_files = []
+ temp_files = []
+ for upload_dataset in incoming['files']:
+ f = upload_dataset['file_data']
+ if isinstance( f, FieldStorage ):
+ # very small files can be StringIOs
+ if 'name' in dir( f.file ) and f.file.name != '<fdopen>':
+ local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+ f.file.close()
+ else:
+ local_filename = datatypes.sniff.stream_to_file( f.file, prefix="strio_upload_file_" )[0]
+ upload_dataset['file_data'] = dict( filename = f.filename,
+ local_filename = local_filename )
+ if upload_dataset['url_paste'].strip() != '':
+ upload_dataset['url_paste'] = datatypes.sniff.stream_to_file( StringIO.StringIO( upload_dataset['url_paste'] ), prefix="strio_url_paste_" )[0]
+ else:
+ upload_dataset['url_paste'] = None
+ new_files.append( upload_dataset )
+ incoming['files'] = new_files
+ return incoming
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
@@ -42,330 +58,100 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
+
data_list = []
+
+ incoming = self.persist_uploads( incoming )
+
+ json_file = tempfile.mkstemp()
+ json_file_path = json_file[1]
+ json_file = os.fdopen( json_file[0], 'w' )
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
for uploaded_dataset in uploaded_datasets:
- precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
- dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset, metadata = uploaded_dataset.metadata, uploaded_dataset = uploaded_dataset )
- #dataset state is now set, we should not do anything else to this dataset
- data_list.append( dataset )
- #clean up extra temp names
- uploaded_dataset.clean_up_temp_files()
-
+ data = self.get_precreated_dataset( uploaded_dataset.name )
+ if not data:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
+ data.name = uploaded_dataset.name
+ data.state = data.states.QUEUED
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.add_dataset( data, genome_build = uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ else:
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.genome_build = uploaded_dataset.dbkey
+ if uploaded_dataset.type == 'composite':
+ # we need to init metadata before the job is dispatched
+ data.init_meta()
+ for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
+ setattr( data.metadata, meta_name, meta_value )
+ data.flush()
+ json = dict( file_type = uploaded_dataset.file_type,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ metadata = uploaded_dataset.metadata,
+ primary_file = uploaded_dataset.primary_file,
+ extra_files_path = data.extra_files_path,
+ composite_file_paths = uploaded_dataset.composite_files,
+ composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ else:
+ try:
+ is_binary = uploaded_dataset.datatype.is_binary
+ except:
+ is_binary = None
+ json = dict( file_type = uploaded_dataset.file_type,
+ name = uploaded_dataset.name,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ is_binary = is_binary,
+ space_to_tab = uploaded_dataset.space_to_tab,
+ path = uploaded_dataset.path )
+ json_file.write( to_json_string( json ) + '\n' )
+ data_list.append( data )
+ json_file.close()
+
#cleanup unclaimed precreated datasets:
for data in self.precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
- if data_list:
- trans.app.model.flush()
+ if not data_list:
+ try:
+ os.remove( json_file_path )
+ except:
+ pass
+ return 'No data was entered in the upload form, please go back and choose data to upload.'
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
job.history_id = trans.history.id
job.tool_id = tool.id
- try:
- # For backward compatibility, some tools may not have versions yet.
- job.tool_version = tool.version
- except:
- job.tool_version = "1.0.1"
+ job.tool_version = tool.version
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
+
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
+ for i, dataset in enumerate( data_list ):
+ job.add_output_dataset( i, dataset )
+ trans.app.model.flush()
- #if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as output for the job, why?
- job.state = trans.app.model.Job.states.OK
- file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
- job.info = "%s, size: %s" % ( hda.info, file_size_str )
- job.add_output_dataset( hda.name, hda )
- job.flush()
- log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
- trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
- return dict( output=hda )
-
- def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( create_dataset=True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
- data.name = err_code
- data.extension = "txt"
- data.dbkey = "?"
- data.info = err_msg
- data.file_size = 0
- data.state = data.states.EMPTY
- data.flush()
- if precreated_dataset is None:
- trans.history.add_dataset( data )
- trans.app.model.flush()
- # Indicate job failure by setting state and info
- job.state = trans.app.model.Job.states.ERROR
- job.info = err_msg
- job.add_output_dataset( data.name, data )
- job.flush()
- log.info( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ) )
- trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
- return dict( output=data )
-
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None, metadata = {}, uploaded_dataset = None ):
- def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
- data.info = "No data: %s." % message
- data.state = data.states.ERROR
- if data.extension is None:
- data.extension = 'data'
- return data
- data_type = None
-
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
-
- # See if we have an empty file
- if not os.path.getsize( temp_name ) > 0:
- return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
- #raise BadFileException( "you attempted to upload an empty file." )
- if is_multi_byte:
- ext = sniff.guess_ext( temp_name, is_multi_byte=True )
- else:
- if not data_type: #at this point data_type is always None (just initialized above), so this is always True...lots of cleanup needed here
- # See if we have a gzipped file, which, if it passes our restrictions,
- # we'll decompress on the fly.
- is_gzipped, is_valid = self.check_gzip( temp_name )
- if is_gzipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_gzipped and is_valid:
- # We need to uncompress the temp_name file
- CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( temp_name )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
- #raise BadFileException( 'problem decompressing gzipped data.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file
- shutil.move( uncompressed, temp_name )
- file_name = file_name.rstrip( '.gz' )
- data_type = 'gzip'
- ext = ''
- if not data_type:
- # See if we have a zip archive
- is_zipped, is_valid, test_ext = self.check_zip( temp_name )
- if is_zipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_zipped and is_valid:
- # Currently, we force specific tools to handle this case. We also require the user
- # to manually set the incoming file_type
- if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
- elif test_ext == 'txt' and file_type != 'txtseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
- if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
- #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
- data_type = 'zip'
- ext = file_type
- if not data_type:
- if self.check_binary( temp_name ):
- if uploaded_dataset and uploaded_dataset.datatype and uploaded_dataset.datatype.is_binary:
- #we need a more generalized way of checking if a binary upload is of the right format for a datatype...magic number, etc
- data_type = 'binary'
- ext = uploaded_dataset.file_type
- else:
- parts = file_name.split( "." )
- if len( parts ) > 1:
- ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- if ext == 'ab1' and file_type != 'ab1':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
- elif ext == 'scf' and file_type != 'scf':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
- data_type = 'binary'
- if not data_type:
- # We must have a text file
- if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- #if data_type != 'binary' and data_type != 'zip' and not trans.app.datatypes_registry.get_datatype_by_extension( ext ).is_binary:
- if data_type != 'binary' and data_type != 'zip':
- if space_to_tab:
- self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
- else:
- self.line_count = sniff.convert_newlines( temp_name )
- if file_type == 'auto':
- ext = sniff.guess_ext( temp_name, sniff_order=trans.app.datatypes_registry.sniff_order )
- else:
- ext = file_type
- data_type = ext
- if info is None:
- info = 'uploaded %s file' %data_type
- data.extension = ext
- data.name = file_name
- data.dbkey = dbkey
- data.info = info
- data.flush()
- shutil.move( temp_name, data.file_name )
- dataset_state = data.states.OK #don't set actual state here, only set to OK when finished setting attributes of the dataset
- data.set_size()
- data.init_meta()
- #need to set metadata, has to be done after extention is set
- for meta_name, meta_value in metadata.iteritems():
- setattr( data.metadata, meta_name, meta_value )
- if self.line_count is not None:
- try:
- if is_multi_byte:
- data.set_multi_byte_peek( line_count=self.line_count )
- else:
- data.set_peek( line_count=self.line_count )
- except:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
- else:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
-
- # validate incomming data
- # Commented by greg on 3/14/07
- # for error in data.datatype.validate( data ):
- # data.add_validation_error(
- # model.ValidationError( message=str( error ), err_type=error.__class__.__name__, attributes=util.object_to_string( error.__dict__ ) ) )
- if data.missing_meta():
- data.datatype.set_meta( data )
- dbkey_to_store = dbkey
- if type( dbkey_to_store ) == type( [] ):
- dbkey_to_store = dbkey[0]
- if precreated_dataset is not None:
- trans.history.genome_build = dbkey_to_store
- else:
- trans.history.add_dataset( data, genome_build=dbkey_to_store )
- #set up composite files
- if uploaded_dataset is not None:
- composite_files = data.datatype.get_composite_files( data )
- if composite_files:
- os.mkdir( data.extra_files_path ) #make extra files path
- for name, value in composite_files.iteritems():
- if uploaded_dataset.composite_files[ value.name ] is None and not value.optional:
- data.info = "A required composite data file was not provided (%s)" % name
- dataset_state = data.states.ERROR
- break
- elif uploaded_dataset.composite_files[ value.name] is not None:
- if not value.is_binary:
- if uploaded_dataset.composite_files[ value.name ].space_to_tab:
- sniff.convert_newlines_sep2tabs( uploaded_dataset.composite_files[ value.name ].filename )
- else:
- sniff.convert_newlines( uploaded_dataset.composite_files[ value.name ].filename )
- shutil.move( uploaded_dataset.composite_files[ value.name ].filename, os.path.join( data.extra_files_path, name ) )
- if data.datatype.composite_type == 'auto_primary_file':
- #now that metadata was set above, we should create the primary file as required
- open( data.file_name, 'wb+' ).write( data.datatype.generate_primary_file( dataset = data ) )
- data.state = dataset_state #Always set dataset state LAST
- trans.app.model.flush()
- trans.log_event( "Added dataset %d to history %d" %( data.id, trans.history.id ), tool_id="upload" )
- return data
-
- def check_gzip( self, temp_name ):
- temp = open( temp_name, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != util.gzip_magic:
- return ( False, False )
- CHUNK_SIZE = 2**15 # 32Kb
- gzipped_file = gzip.GzipFile( temp_name )
- chunk = gzipped_file.read( CHUNK_SIZE )
- gzipped_file.close()
- if self.check_html( temp_name, chunk=chunk ) or self.check_binary( temp_name, chunk=chunk ):
- return( True, False )
- return ( True, True )
-
- def check_zip( self, temp_name ):
- if not zipfile.is_zipfile( temp_name ):
- return ( False, False, None )
- zip_file = zipfile.ZipFile( temp_name, "r" )
- # Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf or .txt files
- # 2. All file extensions within an archive must be the same
- name = zip_file.namelist()[0]
- test_ext = name.split( "." )[1].strip().lower()
- if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
- return ( True, False, test_ext )
- for name in zip_file.namelist():
- ext = name.split( "." )[1].strip().lower()
- if ext != test_ext:
- return ( True, False, test_ext )
- return ( True, True, test_ext )
-
- def check_html( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open(temp_name, "U")
- else:
- temp = chunk
- regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
- regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
- regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
- regexp4 = re.compile( "<META[^>]*>", re.I )
- lineno = 0
- for line in temp:
- lineno += 1
- matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
- if matches:
- if chunk is None:
- temp.close()
- return True
- if lineno > 100:
- break
- if chunk is None:
- temp.close()
- return False
- def check_binary( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open( temp_name, "U" )
- else:
- temp = chunk
- lineno = 0
- for line in temp:
- lineno += 1
- line = line.strip()
- if line:
- if util.is_multi_byte( line ):
- return False
- for char in line:
- if ord( char ) > 128:
- if chunk is None:
- temp.close()
- return True
- if lineno > 10:
- break
- if chunk is None:
- temp.close()
- return False
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return dict( [ ( i, v ) for i, v in enumerate( data_list ) ] )
def get_precreated_dataset( self, name ):
"""
@@ -378,7 +164,3 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
-class BadFileException( Exception ):
- pass
-
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Aug 20 10:49:54 2009 -0400
@@ -304,21 +304,22 @@
def get_html_field( self, trans=None, value=None, other_values={} ):
return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
+ # TODO: Fix nginx upload module support
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
- if type( value ) == dict:
- upload_location = self.tool.app.config.nginx_upload_location
- assert upload_location, \
- "Request appears to have been processed by nginx_upload_module \
- but Galaxy is not configured to recgonize it"
- # Check that the file is in the right location
- local_filename = os.path.abspath( value['path'] )
- assert local_filename.startswith( upload_location ), \
- "Filename provided by nginx is not in correct directory"
- value = Bunch(
- filename = value["name"],
- local_filename = local_filename
- )
+ #if type( value ) == dict:
+ # upload_location = self.tool.app.config.nginx_upload_location
+ # assert upload_location, \
+ # "Request appears to have been processed by nginx_upload_module \
+ # but Galaxy is not configured to recgonize it"
+ # # Check that the file is in the right location
+ # local_filename = os.path.abspath( value['path'] )
+ # assert local_filename.startswith( upload_location ), \
+ # "Filename provided by nginx is not in correct directory"
+ # value = Bunch(
+ # filename = value["name"],
+ # local_filename = local_filename
+ # )
return value
def get_required_enctype( self ):
"""
@@ -330,10 +331,18 @@
return None
elif isinstance( value, unicode ) or isinstance( value, str ):
return value
+ elif isinstance( value, dict ):
+ # or should we jsonify?
+ try:
+ return value['local_filename']
+ except:
+ return None
raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
else:
raise Exception( "FileToolParameter cannot be persisted" )
def get_initial_value( self, trans, context ):
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 10:49:54 2009 -0400
@@ -12,6 +12,7 @@
from galaxy.datatypes import sniff
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
+from galaxy.util import json
class Group( object ):
def __init__( self ):
@@ -167,33 +168,30 @@
rval.append( rval_dict )
return rval
def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
- def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, override_name = None, override_info = None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
- if 'local_filename' in dir( data_file ):
+ try:
# Use the existing file
- return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
- elif 'filename' in dir( data_file ):
- #create a new tempfile
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- precreated_name = get_file_name( data_file.filename )
- if not dataset_name:
- dataset_name = precreated_name
- if not dataset_info:
- dataset_info = 'uploaded file'
- return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_temp_file( temp_name )
- return None, None, is_multi_byte, None, None
- def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ if not dataset_name and 'filename' in data_file:
+ dataset_name = get_file_name( data_file['filename'] )
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
+ except:
+ # The uploaded file should've been persisted by the upload tool action
+ return Bunch( type=None, path=None, name=None )
+ #return None, None, None, None, None
+ def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ):
filenames = []
- if url_paste not in [ None, "" ]:
+ url_paste_file = group_incoming.get( 'url_paste', None )
+ if url_paste_file is not None:
+ url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
@@ -208,114 +206,54 @@
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- temp_name = None
- precreated_name = str( e )
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
+ yield Bunch( type='url', path=line, name=precreated_name )
+ #yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
- is_valid = False
- for line in url_paste: #Trim off empty lines from begining
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- temp_name = None
- precreated_name = str( e )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
-
+ yield Bunch( type='file', path=url_paste_file, name=precreated_name )
+ #yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
if url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
else: #we need to use url_paste
- #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
- if temp_name:
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
break
- ###this check will cause an additional file to be retrieved and created...so lets not do that
- #try: #check to see if additional paste contents were available
- # file_names.next()
- # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
- #except StopIteration:
- # pass
- return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
-
+ return file_bunch, warnings
def get_filenames( context ):
rval = []
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
- warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ warnings = []
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
+ #rval.append( ( type, temp_name, precreated_name, space_to_tab, dataset_name, dataset_info ) )
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
return rval
- class UploadedDataset( Bunch ):
- def __init__( self, **kwd ):
- Bunch.__init__( self, **kwd )
- self.primary_file = None
- self.composite_files = odict()
- self.dbkey = None
- self.warnings = []
- self.metadata = {}
-
- self._temp_filenames = [] #store all created filenames here, delete on cleanup
- def register_temp_file( self, filename ):
- if isinstance( filename, list ):
- self._temp_filenames.extend( filename )
- else:
- self._temp_filenames.append( filename )
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- pass
- #log.warning( str( e ) )
- def clean_up_temp_files( self ):
- for filename in self._temp_filenames:
- self.remove_temp_file( filename )
-
file_type = self.get_file_type( context )
d_type = self.get_datatype( trans, context )
dbkey = context.get( 'dbkey', None )
@@ -325,51 +263,50 @@
for group_incoming in context.get( self.name, [] ):
i = int( group_incoming['__index__'] )
groups_incoming[ i ] = group_incoming
-
if d_type.composite_type is not None:
#handle uploading of composite datatypes
#Only one Dataset can be created
+ '''
dataset = UploadedDataset()
+ dataset.datatype = d_type
+ '''
+ dataset = Bunch()
+ dataset.type = 'composite'
dataset.file_type = file_type
+ dataset.dbkey = dbkey
dataset.datatype = d_type
- dataset.dbkey = dbkey
+ dataset.warnings = []
+ dataset.metadata = {}
+ dataset.composite_files = {}
#load metadata
files_metadata = context.get( self.metadata_ref, {} )
- for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+ for meta_name, meta_spec in d_type.metadata_spec.iteritems():
if meta_spec.set_in_upload:
if meta_name in files_metadata:
dataset.metadata[ meta_name ] = files_metadata[ meta_name ]
-
- temp_name = None
- precreated_name = None
- is_multi_byte = False
- space_to_tab = False
- warnings = []
dataset_name = None
dataset_info = None
if dataset.datatype.composite_type == 'auto_primary_file':
#replace sniff here with just creating an empty file
temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
- precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ dataset.primary_file = temp_name
+ dataset.space_to_tab = False
+ dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
if dataset.datatype.composite_type:
precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
writable_files_offset = 1
- if temp_name is None:#remove this before finish, this should create an empty dataset
+ dataset.primary_file = file_bunch.path
+ dataset.space_to_tab = file_bunch.space_to_tab
+ dataset.precreated_name = file_bunch.precreated_name
+ dataset.name = file_bunch.precreated_name
+ dataset.warnings.extend( file_bunch.warnings )
+ if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
raise Exception( 'No primary dataset file was available for composite upload' )
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.precreated_name = precreated_name
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.warnings.extend( warnings )
- dataset.register_temp_file( temp_name )
-
keys = [ value.name for value in writable_files.values() ]
for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
key = keys[ i + writable_files_offset ]
@@ -377,37 +314,22 @@
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
dataset.composite_files[ key ] = None
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
- if temp_name:
- dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
- dataset.register_temp_file( temp_name )
+ file_bunch, warnings = get_one_filename( group_incoming )
+ if file_bunch.path:
+ dataset.composite_files[ key ] = file_bunch.__dict__
else:
dataset.composite_files[ key ] = None
if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
return [ dataset ]
else:
+ datasets = get_filenames( context[ self.name ][0] )
rval = []
- for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
- dataset = UploadedDataset()
+ for dataset in datasets:
dataset.file_type = file_type
- dataset.datatype = d_type
dataset.dbkey = dbkey
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.precreated_name = precreated_name
- dataset.register_temp_file( temp_name )
rval.append( dataset )
- return rval
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- log.warning( str( e ) )
-
+ return rval
class Conditional( Group ):
type = "conditional"
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Aug 20 10:49:54 2009 -0400
@@ -3,7 +3,7 @@
"""
import logging
-import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys
+import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile
# Older py compatibility
try:
@@ -454,6 +454,26 @@
out_dict[ str( key ) ] = value
return out_dict
+def mkstemp_ln( src, prefix='mkstemp_ln_' ):
+ """
+ From tempfile._mkstemp_inner, generate a hard link in the same dir with a
+ random name. Created so we can persist the underlying file of a
+ NamedTemporaryFile upon its closure.
+ """
+ dir = os.path.dirname(src)
+ names = tempfile._get_candidate_names()
+ for seq in xrange(tempfile.TMP_MAX):
+ name = names.next()
+ file = os.path.join(dir, prefix + name)
+ try:
+ linked_path = os.link( src, file )
+ return (os.path.abspath(file))
+ except OSError, e:
+ if e.errno == errno.EEXIST:
+ continue # try again
+ raise
+ raise IOError, (errno.EEXIST, "No usable temporary file name found")
+
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) ) #this list is used in edit attributes and the upload tool
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) ) #this list is used in history.tmpl
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 10:49:54 2009 -0400
@@ -136,6 +136,7 @@
"""
Precreate datasets for asynchronous uploading.
"""
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name, history ):
data = trans.app.model.HistoryDatasetAssociation( create_dataset = True )
data.name = name
@@ -143,6 +144,7 @@
data.history = history
data.flush()
history.add_dataset( data )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
return data
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
diff -r 5fa8803716fd -r f6e0863862ef lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Wed Aug 19 18:07:55 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Thu Aug 20 10:49:54 2009 -0400
@@ -212,6 +212,17 @@
else:
return None
+# For request.params, override cgi.FieldStorage.make_file to create persistent
+# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
+# but for performance reasons it's way better to use Paste's tempfile than to
+# create a new one and copy.
+import cgi
+class FieldStorage( cgi.FieldStorage ):
+ def make_file(self, binary=None):
+ import tempfile
+ return tempfile.NamedTemporaryFile()
+cgi.FieldStorage = FieldStorage
+
class Request( webob.Request ):
"""
Encapsulates an HTTP request.
diff -r 5fa8803716fd -r f6e0863862ef templates/base_panels.mako
--- a/templates/base_panels.mako Wed Aug 19 18:07:55 2009 -0400
+++ b/templates/base_panels.mako Thu Aug 20 10:49:54 2009 -0400
@@ -72,9 +72,6 @@
<script type="text/javascript">
jQuery( function() {
$("iframe#galaxy_main").load( function() {
- ##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
- ##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
- ##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
$(this).contents().find("form").each( function() {
if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
$(this).submit( function() {
diff -r 5fa8803716fd -r f6e0863862ef test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Aug 19 18:07:55 2009 -0400
+++ b/test/base/twilltestcase.py Thu Aug 20 10:49:54 2009 -0400
@@ -93,6 +93,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when uploading file %s" % ( hid, filename )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
@@ -112,6 +114,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
# Functions associated with histories
def check_history_for_errors( self ):
diff -r 5fa8803716fd -r f6e0863862ef tools/data_source/upload.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/upload.py Thu Aug 20 10:49:54 2009 -0400
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+#Processes uploads from the user.
+
+# WARNING: Changes in this tool (particularly as related to parsing) may need
+# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
+
+import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile
+from galaxy import eggs
+# need to import model before sniff to resolve a circular import dependency
+import galaxy.model
+from galaxy.datatypes import sniff
+from galaxy import util
+from galaxy.util.json import *
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def stop_err( msg, ret=1 ):
+ sys.stderr.write( msg )
+ sys.exit( ret )
+
+def file_err( msg, dataset, json_file ):
+ json_file.write( to_json_string( dict( type = 'dataset',
+ ext = 'data',
+ dataset_id = dataset.dataset_id,
+ stderr = msg ) ) + "\n" )
+ try:
+ os.remove( dataset.path )
+ except:
+ pass
+
+def safe_dict(d):
+ """
+ Recursively clone json structure with UTF-8 dictionary keys
+ http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-ke…
+ """
+ if isinstance(d, dict):
+ return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
+ elif isinstance(d, list):
+ return [safe_dict(x) for x in d]
+ else:
+ return d
+
+def check_html( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open(temp_name, "U")
+ else:
+ temp = chunk
+ regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
+ regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
+ regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
+ regexp4 = re.compile( "<META[^>]*>", re.I )
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
+ if matches:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 100:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_binary( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open( temp_name, "U" )
+ else:
+ temp = chunk
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ line = line.strip()
+ if line:
+ for char in line:
+ if ord( char ) > 128:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 10:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_gzip( temp_name ):
+ temp = open( temp_name, "U" )
+ magic_check = temp.read( 2 )
+ temp.close()
+ if magic_check != util.gzip_magic:
+ return ( False, False )
+ CHUNK_SIZE = 2**15 # 32Kb
+ gzipped_file = gzip.GzipFile( temp_name )
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ gzipped_file.close()
+ if check_html( temp_name, chunk=chunk ) or check_binary( temp_name, chunk=chunk ):
+ return( True, False )
+ return ( True, True )
+
+def check_zip( temp_name ):
+ if not zipfile.is_zipfile( temp_name ):
+ return ( False, False, None )
+ zip_file = zipfile.ZipFile( temp_name, "r" )
+ # Make sure the archive consists of valid files. The current rules are:
+ # 1. Archives can only include .ab1, .scf or .txt files
+ # 2. All file extensions within an archive must be the same
+ name = zip_file.namelist()[0]
+ test_ext = name.split( "." )[1].strip().lower()
+ if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+ return ( True, False, test_ext )
+ for name in zip_file.namelist():
+ ext = name.split( "." )[1].strip().lower()
+ if ext != test_ext:
+ return ( True, False, test_ext )
+ return ( True, True, test_ext )
+
+def add_file( dataset, json_file ):
+ data_type = None
+ line_count = None
+
+ if dataset.type == 'url':
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ except Exception, e:
+ file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
+ return
+ dataset.path = temp_name
+ dataset.is_multi_byte = is_multi_byte
+
+ # See if we have an empty file
+ if not os.path.exists( dataset.path ):
+ file_err( 'Uploaded temporary file (%s) does not exist. Please' % dataset.path, dataset, json_file )
+ return
+ if not os.path.getsize( dataset.path ) > 0:
+ file_err( 'The uploaded file is empty', dataset, json_file )
+ return
+ if 'is_multi_byte' not in dir( dataset ):
+ dataset.is_multi_byte = util.is_multi_byte( open( dataset.path, 'r' ).read( 1024 )[:100] )
+ if dataset.is_multi_byte:
+ ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
+ data_type = ext
+ else:
+ # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+ is_gzipped, is_valid = check_gzip( dataset.path )
+ if is_gzipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_gzipped and is_valid:
+ # We need to uncompress the temp_name file
+ CHUNK_SIZE = 2**20 # 1Mb
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ) )
+ gzipped_file = gzip.GzipFile( dataset.path )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing gzipped data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ # Replace the gzipped file with the decompressed file
+ shutil.move( uncompressed, dataset.path )
+ dataset.name = dataset.name.rstrip( '.gz' )
+ data_type = 'gzip'
+ if not data_type:
+ # See if we have a zip archive
+ is_zipped, is_valid, test_ext = check_zip( dataset.path )
+ if is_zipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_zipped and is_valid:
+ # Currently, we force specific tools to handle this case. We also require the user
+ # to manually set the incoming file_type
+ if ( test_ext == 'ab1' or test_ext == 'scf' ) and dataset.file_type != 'binseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
+ return
+ elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'", dataset, json_file )
+ return
+ if not ( dataset.file_type == 'binseq.zip' or dataset.file_type == 'txtseq.zip' ):
+ file_err( "You must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files", dataset, json_file )
+ return
+ data_type = 'zip'
+ ext = dataset.file_type
+ if not data_type:
+ if check_binary( dataset.path ):
+ if dataset.is_binary is not None:
+ data_type = 'binary'
+ ext = dataset.file_type
+ else:
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[1].strip().lower()
+ if not( ext == 'ab1' or ext == 'scf' ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if ext == 'ab1' and dataset.file_type != 'ab1':
+ file_err( "You must manually set the 'File Format' to 'Ab1' when uploading ab1 files.", dataset, json_file )
+ return
+ elif ext == 'scf' and dataset.file_type != 'scf':
+ file_err( "You must manually set the 'File Format' to 'Scf' when uploading scf files.", dataset, json_file )
+ return
+ data_type = 'binary'
+ if not data_type:
+ # We must have a text file
+ if check_html( dataset.path ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if data_type != 'binary' and data_type != 'zip':
+ if dataset.space_to_tab:
+ line_count = sniff.convert_newlines_sep2tabs( dataset.path )
+ else:
+ line_count = sniff.convert_newlines( dataset.path )
+ if dataset.file_type == 'auto':
+ ext = sniff.guess_ext( dataset.path )
+ else:
+ ext = dataset.file_type
+ data_type = ext
+ # Save job info for the framework
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.path,
+ ext = ext,
+ stdout = 'uploaded %s file' % data_type,
+ name = dataset.name,
+ line_count = line_count )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def add_composite_file( dataset, json_file ):
+ if dataset.composite_files:
+ os.mkdir( dataset.extra_files_path )
+ for name, value in dataset.composite_files.iteritems():
+ value = util.bunch.Bunch( **value )
+ if dataset.composite_file_paths[ value.name ] is None and not value.optional:
+ file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
+ break
+ elif dataset.composite_file_paths[value.name] is not None:
+ if not value.is_binary:
+ if uploaded_dataset.composite_files[ value.name ].space_to_tab:
+ sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ else:
+ sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.primary_file,
+ stdout = 'uploaded %s file' % dataset.file_type )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def __main__():
+
+ if len( sys.argv ) != 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ sys.exit( 1 )
+
+ json_file = open( 'galaxy.json', 'w' )
+
+ for line in open( sys.argv[1], 'r' ):
+ dataset = from_json_string( line )
+ dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+
+ if dataset.type == 'composite':
+ add_composite_file( dataset, json_file )
+ else:
+ add_file( dataset, json_file )
+
+ # clean up paramfile
+ try:
+ os.remove( sys.argv[1] )
+ except:
+ pass
+
+if __name__ == '__main__':
+ __main__()
diff -r 5fa8803716fd -r f6e0863862ef tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Wed Aug 19 18:07:55 2009 -0400
+++ b/tools/data_source/upload.xml Thu Aug 20 10:49:54 2009 -0400
@@ -1,10 +1,13 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1" version="1.0.2">
+<tool name="Upload File" id="upload1" version="1.0.3">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
+ <command interpreter="python">
+ upload.py $paramfile
+ </command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/5b6146705e80
changeset: 2577:5b6146705e80
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 13 13:44:04 2009 -0400
description:
Merge trunk
14 file(s) affected in this change:
templates/admin/library/create_info_template.mako
templates/admin/library/edit_info_template.mako
templates/admin/library/info_permissions.mako
templates/admin/library/info_template_permissions.mako
templates/admin/library/new_info.mako
templates/admin/library/new_info_template.mako
templates/history/grid.mako
templates/history/list_shared.mako
templates/library/create_info_template.mako
templates/library/edit_info_template.mako
templates/library/info_permissions.mako
templates/library/info_template_permissions.mako
templates/library/new_info.mako
templates/library/new_info_template.mako
diffs (truncated from 7699 to 3000 lines):
diff -r f1f2d1de5f3e -r 5b6146705e80 cron/updateucsc.sh.sample
--- a/cron/updateucsc.sh.sample Thu Aug 13 13:43:25 2009 -0400
+++ b/cron/updateucsc.sh.sample Thu Aug 13 13:44:04 2009 -0400
@@ -9,8 +9,20 @@
export PYTHONPATH=${GALAXY}/lib
# setup directories
-mkdir ${GALAXY}/tool-data/shared/ucsc/new
-mkdir ${GALAXY}/tool-data/shared/ucsc/chrom/new
+echo "Creating required directories."
+DIRS="
+${GALAXY}/tool-data/shared/ucsc/new
+${GALAXY}/tool-data/shared/ucsc/chrom
+${GALAXY}/tool-data/shared/ucsc/chrom/new
+"
+for dir in $DIRS; do
+ if [ ! -d $dir ]; then
+ echo "Creating $dir"
+ mkdir $dir
+ else
+ echo "$dir already exists, continuing."
+ fi
+done
date
echo "Updating UCSC shared data tables."
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/app.py
--- a/lib/galaxy/app.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/app.py Thu Aug 13 13:44:04 2009 -0400
@@ -37,6 +37,8 @@
self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path, self )
# Load datatype converters
self.datatypes_registry.load_datatype_converters( self.toolbox )
+ #load external metadata tool
+ self.datatypes_registry.load_external_metadata_tool( self.toolbox )
# Load datatype indexers
self.datatypes_registry.load_datatype_indexers( self.toolbox )
#Load security policy
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,7 +1,7 @@
"""
Provides mapping between extensions and datatypes, mime-types, etc.
"""
-import os
+import os, tempfile
import logging
import data, tabular, interval, images, sequence, qualityscore, genetics, xml, coverage, tracks, chrominfo
import galaxy.util
@@ -18,6 +18,7 @@
self.datatype_converters = odict()
self.datatype_indexers = odict()
self.converters = []
+ self.set_external_metadata_tool = None
self.indexers = []
self.sniff_order = []
self.upload_file_formats = []
@@ -251,6 +252,31 @@
self.datatype_converters[source_datatype][target_datatype] = converter
self.log.debug( "Loaded converter: %s", converter.id )
+ def load_external_metadata_tool( self, toolbox ):
+ """Adds a tool which is used to set external metadata"""
+ #we need to be able to add a job to the queue to set metadata. The queue will currently only accept jobs with an associated tool.
+ #We'll create a special tool to be used for Auto-Detecting metadata; this is less than ideal, but effective
+ #Properly building a tool without relying on parsing an XML file is near impossible...so we'll create a temporary file
+ tool_xml_text = """
+ <tool id="__SET_METADATA__" name="Set External Metadata" version="1.0.0" tool_type="set_metadata">
+ <type class="SetMetadataTool" module="galaxy.tools"/>
+ <action module="galaxy.tools.actions.metadata" class="SetMetadataToolAction"/>
+ <command>$__SET_EXTERNAL_METADATA_COMMAND_LINE__</command>
+ <inputs>
+ <param format="data" name="input1" type="data" label="File to set metadata on."/>
+ <param name="__ORIGINAL_DATASET_STATE__" type="hidden" value=""/>
+ <param name="__SET_EXTERNAL_METADATA_COMMAND_LINE__" type="hidden" value=""/>
+ </inputs>
+ </tool>
+ """
+ tmp_name = tempfile.NamedTemporaryFile()
+ tmp_name.write( tool_xml_text )
+ tmp_name.flush()
+ set_meta_tool = toolbox.load_tool( tmp_name.name )
+ toolbox.tools_by_id[ set_meta_tool.id ] = set_meta_tool
+ self.set_external_metadata_tool = set_meta_tool
+ self.log.debug( "Loaded external metadata tool: %s", self.set_external_metadata_tool.id )
+
def load_datatype_indexers( self, toolbox ):
"""Adds indexers from self.indexers to the toolbox from app"""
for elem in self.indexers:
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/datatypes/sequence.py
--- a/lib/galaxy/datatypes/sequence.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/datatypes/sequence.py Thu Aug 13 13:44:04 2009 -0400
@@ -334,28 +334,37 @@
"""
#these metadata values are not accessable by users, always overwrite
+ try:
+ maf_reader = bx.align.maf.Reader( open( dataset.file_name ) )
+ except:
+ return #not a maf file
species = []
species_chromosomes = {}
indexes = bx.interval_index_file.Indexes()
- try:
- maf_reader = bx.align.maf.Reader( open( dataset.file_name ) )
- while True:
- pos = maf_reader.file.tell()
- block = maf_reader.next()
- if block is None: break
- for c in block.components:
- spec = c.src
- chrom = None
- if "." in spec:
- spec, chrom = spec.split( ".", 1 )
- if spec not in species:
- species.append(spec)
- species_chromosomes[spec] = []
- if chrom and chrom not in species_chromosomes[spec]:
- species_chromosomes[spec].append( chrom )
- indexes.add( c.src, c.forward_strand_start, c.forward_strand_end, pos, max=c.src_size )
- except: #bad MAF file
- pass
+ while True:
+ pos = maf_reader.file.tell()
+ block = maf_reader.next()
+ if block is None: break
+ for c in block.components:
+ spec = c.src
+ chrom = None
+ if "." in spec:
+ spec, chrom = spec.split( ".", 1 )
+ if spec not in species:
+ species.append(spec)
+ species_chromosomes[spec] = []
+ if chrom and chrom not in species_chromosomes[spec]:
+ species_chromosomes[spec].append( chrom )
+ forward_strand_start = c.forward_strand_start
+ forward_strand_end = c.forward_strand_end
+ try:
+ forward_strand_start = int( forward_strand_start )
+ forward_strand_end = int( forward_strand_end )
+ except ValueError:
+ continue #start and end are not integers, can't add component to index, goto next component
+ if forward_strand_end > forward_strand_start:
+ #require positive length; i.e. certain lines have start = end = 0 and cannot be indexed
+ indexes.add( c.src, forward_strand_start, forward_strand_end, pos, max=c.src_size )
dataset.metadata.species = species
#only overwrite the contents if our newly determined chromosomes don't match stored
chrom_file = dataset.metadata.species_chromosomes
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 13 13:44:04 2009 -0400
@@ -274,7 +274,7 @@
elif idata.state == idata.states.ERROR:
job_wrapper.fail( "input data %d is in error state" % ( idata.hid ) )
return JOB_INPUT_ERROR
- elif idata.state != idata.states.OK:
+ elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
# need to requeue
return JOB_WAIT
return JOB_READY
@@ -543,7 +543,7 @@
# Certain tools require tasks to be completed after job execution
# ( this used to be performed in the "exec_after_process" hook, but hooks are deprecated ).
if self.tool.tool_type is not None:
- self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict )
+ self.tool.exec_after_process( self.queue.app, inp_data, out_data, param_dict, job = job )
# Call 'exec_after_process' hook
self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
out_data=out_data, param_dict=param_dict,
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/jobs/runners/local.py Thu Aug 13 13:44:04 2009 -0400
@@ -104,7 +104,7 @@
#run the metadata setting script here
#this is terminatable when output dataset/job is deleted
#so that long running set_meta()s can be cancelled without having to reboot the server
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally:
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths:
external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(), kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
log.debug( 'executing external set_meta script for job %d: %s' % ( job_wrapper.job_id, external_metadata_script ) )
external_metadata_proc = subprocess.Popen( args = external_metadata_script,
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/__init__.py Thu Aug 13 13:44:04 2009 -0400
@@ -310,30 +310,6 @@
raise "Invalid LibraryDatasetDatasetAssociation specified: %s" % library_item.__class__.__name__
self.role = role
-class LibraryItemInfoPermissions( object ):
- def __init__( self, action, library_item, role ):
- # LIBRARY_ADD -> Not Applicable
- # LIBRARY_MODIFY -> Can modify LibraryItemInfoElement.contents
- # LIBRARY_MANAGE -> Can change permissions on LibraryItemInfo
- self.action = action
- if isinstance( library_item, LibraryItemInfo ):
- self.library_item_info = library_item
- else:
- raise "Invalid LibraryItemInfo specified: %s" % library_item.__class__.__name__
- self.role = role
-
-class LibraryItemInfoTemplatePermissions( object ):
- def __init__( self, action, library_item, role ):
- # LIBRARY_ADD -> Not Applicable
- # LIBRARY_MODIFY -> Can add or delete LibraryItemInfoTemplateElements
- # LIBRARY_MANAGE -> Can change permissions on LibraryItemInfoTemplate
- self.action = action
- if isinstance( library_item, LibraryItemInfoTemplate ):
- self.library_item_info_template = library_item
- else:
- raise "Invalid LibraryItemInfoTemplate specified: %s" % library_item.__class__.__name__
- self.role = role
-
class DefaultUserPermissions( object ):
def __init__( self, user, action, role ):
self.user = user
@@ -354,7 +330,8 @@
OK = 'ok',
EMPTY = 'empty',
ERROR = 'error',
- DISCARDED = 'discarded' )
+ DISCARDED = 'discarded',
+ SETTING_METADATA = 'setting_metadata' )
permitted_actions = get_permitted_actions( filter='DATASET' )
file_path = "/tmp/"
engine = None
@@ -701,12 +678,10 @@
self.name = name or "Unnamed library"
self.description = description
self.root_folder = root_folder
- def get_library_item_info_templates( self, template_list=[], restrict=False ):
- # We only want the next available template in the inheritable hierarchy, so we'll only extend
- # template_list if it is empty
- if not template_list and self.library_info_template_associations:
- template_list.extend( [ lita.library_item_info_template for lita in self.library_info_template_associations if lita.library_item_info_template not in template_list ] )
- return template_list
+ def get_info_association( self, restrict=False ):
+ if self.info_association:
+ return self.info_association[0]
+ return None
class LibraryFolder( object ):
def __init__( self, name=None, description=None, item_count=0, order_id=None ):
@@ -725,18 +700,19 @@
folder.parent_id = self.id
folder.order_id = self.item_count
self.item_count += 1
- def get_library_item_info_templates( self, template_list=[], restrict=False ):
- # If restrict is True, we'll return only those templates directly associated with this Folder
- # We only want the next available template in the inheritable hierarchy, so we'll only extend
- # template_list if it is empty
- if not template_list and self.library_folder_info_template_associations:
- template_list.extend( [ lfita.library_item_info_template for lfita in self.library_folder_info_template_associations if lfita.library_item_info_template not in template_list ] )
- if not template_list and restrict not in [ 'True', True ] and self.parent:
- self.parent.get_library_item_info_templates( template_list )
- elif not template_list and restrict not in [ 'True', True, 'folder' ] and self.library_root:
- for library_root in self.library_root:
- library_root.get_library_item_info_templates( template_list )
- return template_list
+ def get_info_association( self, restrict=False ):
+ # If restrict is True, we will return this folder's info_association whether it
+ # exists or not. If restrict is False, we'll return the next available info_association
+ # in the inheritable hierarchy
+ if self.info_association:
+ return self.info_association[0]
+ if restrict:
+ return None
+ if self.parent:
+ return self.parent.get_info_association()
+ if self.library_root:
+ return self.library_root[0].get_info_association()
+ return None
@property
def active_components( self ):
return list( self.active_folders ) + list( self.active_datasets )
@@ -796,15 +772,6 @@
if not purged and self.purged:
raise Exception( "Cannot unpurge once purged" )
purged = property( get_purged, set_purged )
- def get_library_item_info_templates( self, template_list=[], restrict=False ):
- # If restrict is True, we'll return only those templates directly associated with this LibraryDataset
- # We only want the next available template in the inheritable hierarchy, so we'll only extend
- # template_list if it is empty
- if not template_list and self.library_dataset_info_template_associations:
- template_list.extend( [ ldita.library_item_info_template for ldita in self.library_dataset_info_template_associations if ldita.library_item_info_template not in template_list ] )
- if not template_list and restrict not in [ 'True', True ]:
- self.folder.get_library_item_info_templates( template_list, restrict )
- return template_list
class LibraryDatasetDatasetAssociation( DatasetInstance ):
def __init__( self,
@@ -867,105 +834,34 @@
return ldda
def clear_associated_files( self, metadata_safe = False, purge = False ):
return
- def get_library_item_info_templates( self, template_list=[], restrict=False ):
- # If restrict is True, we'll return only those templates directly associated with this LibraryDatasetDatasetAssociation
- # We only want the next available template in the inheritable hierarchy, so we'll only extend
- # template_list if it is empty
- if not template_list and self.library_dataset_dataset_info_template_associations:
- template_list.extend( [ lddita.library_item_info_template for lddita in self.library_dataset_dataset_info_template_associations if lddita.library_item_info_template not in template_list ] )
- if not template_list:
- self.library_dataset.get_library_item_info_templates( template_list, restrict )
- return template_list
-
-class LibraryInfoTemplateAssociation( object ):
- pass
-
-class LibraryFolderInfoTemplateAssociation( object ):
- pass
-
-class LibraryDatasetInfoTemplateAssociation( object ):
- pass
-
-class LibraryDatasetDatasetInfoTemplateAssociation( object ):
- pass
-
-class LibraryItemInfoTemplate( object ):
- def add_element( self, element = None, name = None, description = None ):
- if element:
- raise "undefined"
- else:
- new_elem = LibraryItemInfoTemplateElement()
- new_elem.name = name
- new_elem.description = description
- new_elem.order_id = self.item_count
- self.item_count += 1
- self.flush()
- new_elem.library_item_info_template_id = self.id
- new_elem.flush()
- return new_elem
-
-class LibraryItemInfoTemplateElement( object ):
- pass
+ def get_info_association( self, restrict=False ):
+ # If restrict is True, we will return this ldda's info_association whether it
+ # exists or not. If restrict is False, we'll return the next available info_association
+ # in the inheritable hierarchy
+ if self.info_association:
+ return self.info_association[0]
+ if restrict:
+ return None
+ return self.library_dataset.folder.get_info_association()
class LibraryInfoAssociation( object ):
- def __init__( self, user=None ):
- self.user = user
- def set_library_item( self, library_item ):
- if isinstance( library_item, Library ):
- self.library = library_item
- else:
- raise "Invalid Library specified: %s" % library_item.__class__.__name__
+ def __init__( self, library, form_definition, info ):
+ self.library = library
+ self.template = form_definition
+ self.info = info
class LibraryFolderInfoAssociation( object ):
- def __init__( self, user=None ):
- self.user = user
- def set_library_item( self, library_item ):
- if isinstance( library_item, LibraryFolder ):
- self.folder = library_item
- else:
- raise "Invalid Library specified: %s" % library_item.__class__.__name__
-
-class LibraryDatasetInfoAssociation( object ):
- def __init__( self, user=None ):
- self.user = user
- def set_library_item( self, library_item ):
- if isinstance( library_item, LibraryDataset ):
- self.library_dataset = library_item
- else:
- raise "Invalid Library specified: %s" % library_item.__class__.__name__
+ def __init__( self, folder, form_definition, info ):
+ self.folder = folder
+ self.template = form_definition
+ self.info = info
class LibraryDatasetDatasetInfoAssociation( object ):
- def __init__( self, user=None ):
- self.user = user
- def set_library_item( self, library_item ):
- if isinstance( library_item, LibraryDatasetDatasetAssociation ):
- self.library_dataset_dataset_association = library_item
- else:
- raise "Invalid Library specified: %s" % library_item.__class__.__name__
+ def __init__( self, library_dataset_dataset_association, form_definition, info ):
+ self.library_dataset_dataset_association = library_dataset_dataset_association
+ self.template = form_definition
+ self.info = info
-class LibraryItemInfo( object ):
- def __init__( self, user=None ):
- self.user = user
- def get_element_by_template_element( self, template_element, create_element=False ):
- for element in self.elements:
- if element.library_item_info_template_element == template_element:
- return element
- if create_element:
- # Template elements may have been added to the template after the
- # library item initially inherited it, so we'll add the additional
- # element to the library item
- element = LibraryItemInfoElement()
- element.library_item_info_template_element = template_element
- element.library_item_info = self
- element.flush()
- self.elements.append( element )
- return element
- else:
- return None
-
-class LibraryItemInfoElement( object ):
- pass
-
class ValidationError( object ):
def __init__( self, message=None, err_type=None, attributes=None ):
self.message = message
@@ -1102,8 +998,7 @@
raise
# Return filename inside hashed directory
return os.path.abspath( os.path.join( path, "metadata_%d.dat" % self.id ) )
-
-
+
class FormDefinition( object ):
def __init__(self, name=None, desc=None, fields=[], current_form=None):
self.name = name
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/mapping.py Thu Aug 13 13:44:04 2009 -0400
@@ -216,22 +216,6 @@
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
-LibraryItemInfoPermissions.table = Table( "library_item_info_permissions", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "action", TEXT ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), nullable=True, index=True ),
- Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
-
-LibraryItemInfoTemplatePermissions.table = Table( "library_item_info_template_permissions", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "action", TEXT ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), nullable=True, index=True ),
- Column( "role_id", Integer, ForeignKey( "role.id" ), index=True ) )
-
DefaultUserPermissions.table = Table( "default_user_permissions", metadata,
Column( "id", Integer, primary_key=True ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
@@ -299,105 +283,23 @@
Column( "purged", Boolean, index=True, default=False ),
Column( "genome_build", TrimmedString( 40 ) ) )
-LibraryItemInfoTemplateElement.table = Table( "library_item_info_template_element", metadata,
+LibraryInfoAssociation.table = Table( 'library_info_association', metadata,
Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "optional", Boolean, index=True, default=True ),
- Column( "deleted", Boolean, index=True, default=False ),
- Column( "name", TEXT ),
- Column( "description", TEXT ),
- Column( "type", TEXT, default='string' ),
- Column( "order_id", Integer ),
- Column( "options", JSONType() ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), index=True ) )
-
-LibraryItemInfoTemplate.table = Table( "library_item_info_template", metadata,
+ Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+LibraryFolderInfoAssociation.table = Table( 'library_folder_info_association', metadata,
Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "optional", Boolean, index=True, default=True ),
- Column( "deleted", Boolean, index=True, default=False ),
- Column( "name", TEXT ),
- Column( "description", TEXT ),
- Column( "item_count", Integer, default=0 ) )
+ Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
-LibraryInfoTemplateAssociation.table = Table( "library_info_template_association", metadata,
+LibraryDatasetDatasetInfoAssociation.table = Table( 'library_dataset_dataset_info_association', metadata,
Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), index=True ) )
-
-LibraryFolderInfoTemplateAssociation.table = Table( "library_folder_info_template_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), index=True ) )
-
-LibraryDatasetInfoTemplateAssociation.table = Table( "library_dataset_info_template_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), index=True ) )
-
-LibraryDatasetDatasetInfoTemplateAssociation.table = Table( "library_dataset_dataset_info_template_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), index=True ) )
-
-LibraryItemInfoElement.table = Table( "library_item_info_element", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "contents", JSONType() ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
- Column( "library_item_info_template_element_id", Integer, ForeignKey( "library_item_info_template_element.id" ), index=True ) )
-
-LibraryItemInfo.table = Table( "library_item_info", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "deleted", Boolean, index=True, default=False ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ),
- Column( "library_item_info_template_id", Integer, ForeignKey( "library_item_info_template.id" ), nullable=True, index=True )
- )
-
-LibraryInfoAssociation.table = Table( "library_info_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_id", Integer, ForeignKey( "library.id" ), nullable=True, index=True ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
-
-LibraryFolderInfoAssociation.table = Table( "library_folder_info_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
-
-LibraryDatasetInfoAssociation.table = Table( "library_dataset_info_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_dataset_id", Integer, ForeignKey( "library_dataset.id" ), nullable=True, index=True ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
-
-LibraryDatasetDatasetInfoAssociation.table = Table( "library_dataset_dataset_info_association", metadata,
- Column( "id", Integer, primary_key=True ),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
- Column( "library_item_info_id", Integer, ForeignKey( "library_item_info.id" ), index=True ),
- Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), nullable=True, index=True ) )
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
Job.table = Table( "job", metadata,
Column( "id", Integer, primary_key=True ),
@@ -545,7 +447,6 @@
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
-
FormDefinitionCurrent.table = Table('form_definition_current', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
@@ -553,20 +454,17 @@
Column( "latest_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "deleted", Boolean, index=True, default=False ))
-# new table to store all the forms which is created by the admin
FormDefinition.table = Table('form_definition', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
Column( "desc", TEXT ),
- Column( "form_definition_current_id",
+ Column( "form_definition_current_id",
Integer,
- ForeignKey( "form_definition_current.id",
- name='for_def_form_def_current_id_fk',
- use_alter=True),
+ ForeignKey( "form_definition_current.id", name='for_def_form_def_current_id_fk', use_alter=True ),
index=True ),
- Column( "fields", JSONType()))
+ Column( "fields", JSONType() ) )
RequestType.table = Table('request_type', metadata,
Column( "id", Integer, primary_key=True),
@@ -598,15 +496,8 @@
Column( "state", TrimmedString( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
-RequestState_table = Table('request_state', metadata,
- Column( "id", Integer, primary_key=True),
- Column( "create_time", DateTime, default=now ),
- Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "name", TrimmedString( 255 ), nullable=False ),
- Column( "desc", TEXT ))
-
Sample.table = Table('sample', metadata,
- Column( "id", Integer, primary_key=True),
+ Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
@@ -616,10 +507,8 @@
Column( "bar_code", TrimmedString( 255 ), index=True ),
Column( "deleted", Boolean, index=True, default=False ) )
-# new table to store all the possible sample states and the sample type it
-# belongs to
SampleState.table = Table('sample_state', metadata,
- Column( "id", Integer, primary_key=True),
+ Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "name", TrimmedString( 255 ), nullable=False ),
@@ -627,14 +516,12 @@
Column( "request_type_id", Integer, ForeignKey( "request_type.id" ), index=True ) )
SampleEvent.table = Table('sample_event', metadata,
- Column( "id", Integer, primary_key=True),
+ Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "sample_id", Integer, ForeignKey( "sample.id" ), index=True ),
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
-
-
# With the tables defined we can define the mappers and setup the
# relationships between the model objects.
@@ -652,7 +539,8 @@
assign_mapper( context, FormValues, FormValues.table,
properties=dict( form_definition=relation( FormDefinition,
primaryjoin=( FormValues.table.c.form_definition_id == FormDefinition.table.c.id ) )
- ) )
+ )
+)
assign_mapper( context, Request, Request.table,
properties=dict( values=relation( FormValues,
@@ -863,25 +751,20 @@
)
)
-assign_mapper( context, LibraryItemInfoPermissions, LibraryItemInfoPermissions.table,
+assign_mapper( context, Library, Library.table,
properties=dict(
- library_item_info = relation( LibraryItemInfo, backref="actions" ),
- role=relation( Role, backref="library_item_info_actions" )
- )
+ root_folder=relation( LibraryFolder, backref=backref( "library_root" ) )
+ )
)
-assign_mapper( context, LibraryItemInfoTemplatePermissions, LibraryItemInfoTemplatePermissions.table,
- properties=dict(
- library_item_info_template = relation( LibraryItemInfoTemplate, backref="actions" ),
- role=relation( Role, backref="library_item_info_template_actions" )
- )
-)
-
-assign_mapper( context, Library, Library.table,
- properties=dict(
- root_folder=relation( LibraryFolder,
- backref=backref( "library_root" ) )
- ) )
+assign_mapper( context, LibraryInfoAssociation, LibraryInfoAssociation.table,
+ properties=dict( library=relation( Library,
+ primaryjoin=( LibraryInfoAssociation.table.c.library_id == Library.table.c.id ), backref="info_association" ),
+ template=relation( FormDefinition,
+ primaryjoin=( LibraryInfoAssociation.table.c.form_definition_id == FormDefinition.table.c.id ) ),
+ info=relation( FormValues,
+ primaryjoin=( LibraryInfoAssociation.table.c.form_values_id == FormValues.table.c.id ) )
+ ) )
assign_mapper( context, LibraryFolder, LibraryFolder.table,
properties=dict(
@@ -900,6 +783,15 @@
lazy=False,
viewonly=True )
) )
+
+assign_mapper( context, LibraryFolderInfoAssociation, LibraryFolderInfoAssociation.table,
+ properties=dict( folder=relation( LibraryFolder,
+ primaryjoin=( LibraryFolderInfoAssociation.table.c.library_folder_id == LibraryFolder.table.c.id ), backref="info_association" ),
+ template=relation( FormDefinition,
+ primaryjoin=( LibraryFolderInfoAssociation.table.c.form_definition_id == FormDefinition.table.c.id ) ),
+ info=relation( FormValues,
+ primaryjoin=( LibraryFolderInfoAssociation.table.c.form_values_id == FormValues.table.c.id ) )
+ ) )
assign_mapper( context, LibraryDataset, LibraryDataset.table,
properties=dict(
@@ -931,65 +823,14 @@
primaryjoin=( ( LibraryDatasetDatasetAssociation.table.c.parent_id == LibraryDatasetDatasetAssociation.table.c.id ) & ( LibraryDatasetDatasetAssociation.table.c.visible == True ) ) )
) )
-assign_mapper( context, LibraryItemInfoTemplateElement, LibraryItemInfoTemplateElement.table,
- properties=dict( library_item_info_template=relation( LibraryItemInfoTemplate, backref="elements" ),
- ) )
-
-assign_mapper( context, LibraryItemInfoTemplate, LibraryItemInfoTemplate.table )
-
-assign_mapper( context, LibraryInfoTemplateAssociation, LibraryInfoTemplateAssociation.table,
- properties=dict( library=relation( Library, backref="library_info_template_associations" ),
- library_item_info_template = relation( LibraryItemInfoTemplate, backref="library_info_template_associations" ),
- ) )
-
-assign_mapper( context, LibraryFolderInfoTemplateAssociation, LibraryFolderInfoTemplateAssociation.table,
- properties=dict( folder=relation( LibraryFolder, backref="library_folder_info_template_associations" ),
- library_item_info_template = relation( LibraryItemInfoTemplate, backref="library_folder_info_template_associations" ),
- ) )
-
-assign_mapper( context, LibraryDatasetInfoTemplateAssociation, LibraryDatasetInfoTemplateAssociation.table,
- properties=dict( library_dataset=relation( LibraryDataset, backref="library_dataset_info_template_associations" ),
- library_item_info_template = relation( LibraryItemInfoTemplate, backref="library_dataset_info_template_associations" ),
- ) )
-
-assign_mapper( context, LibraryDatasetDatasetInfoTemplateAssociation, LibraryDatasetDatasetInfoTemplateAssociation.table,
- properties=dict( library_dataset_dataset_association = relation( LibraryDatasetDatasetAssociation, backref="library_dataset_dataset_info_template_associations" ),
- library_item_info_template = relation( LibraryItemInfoTemplate, backref="library_dataset_dataset_info_template_associations" ),
- ) )
-
-assign_mapper( context, LibraryItemInfoElement, LibraryItemInfoElement.table,
- properties=dict( library_item_info=relation( LibraryItemInfo, backref="elements" ),
- library_item_info_template_element=relation( LibraryItemInfoTemplateElement )
- ) )
-
-assign_mapper( context, LibraryItemInfo, LibraryItemInfo.table,
- properties=dict( library_item_info_template=relation( LibraryItemInfoTemplate, backref="library_item_infos" ),
- user=relation( User.mapper )
- ) )
-
-assign_mapper( context, LibraryInfoAssociation, LibraryInfoAssociation.table,
- properties=dict( library=relation( Library, backref="library_info_associations" ),
- library_item_info = relation( LibraryItemInfo, backref="library_info_associations" ),
- user=relation( User.mapper )
- ) )
-
-assign_mapper( context, LibraryFolderInfoAssociation, LibraryFolderInfoAssociation.table,
- properties=dict( folder=relation( LibraryFolder, backref="library_folder_info_associations" ),
- library_item_info = relation( LibraryItemInfo, backref="library_folder_info_associations" ),
- user=relation( User.mapper )
- ) )
-
-assign_mapper( context, LibraryDatasetInfoAssociation, LibraryDatasetInfoAssociation.table,
- properties=dict( library_dataset=relation( LibraryDataset, backref="library_dataset_info_associations" ),
- library_item_info = relation( LibraryItemInfo, backref="library_dataset_info_associations" ),
- user=relation( User.mapper )
- ) )
-
assign_mapper( context, LibraryDatasetDatasetInfoAssociation, LibraryDatasetDatasetInfoAssociation.table,
- properties=dict( library_dataset_dataset_association = relation( LibraryDatasetDatasetAssociation, backref="library_dataset_dataset_info_associations" ),
- library_item_info = relation( LibraryItemInfo, backref="library_dataset_dataset_info_associations" ),
- user=relation( User.mapper )
- ) )
+ properties=dict( library_dataset_dataset_association=relation( LibraryDatasetDatasetAssociation,
+ primaryjoin=( LibraryDatasetDatasetInfoAssociation.table.c.library_dataset_dataset_association_id == LibraryDatasetDatasetAssociation.table.c.id ), backref="info_association" ),
+ template=relation( FormDefinition,
+ primaryjoin=( LibraryDatasetDatasetInfoAssociation.table.c.form_definition_id == FormDefinition.table.c.id ) ),
+ info=relation( FormValues,
+ primaryjoin=( LibraryDatasetDatasetInfoAssociation.table.c.form_values_id == FormValues.table.c.id ) )
+ ) )
assign_mapper( context, JobToInputDatasetAssociation, JobToInputDatasetAssociation.table,
properties=dict( job=relation( Job ), dataset=relation( HistoryDatasetAssociation, lazy=False ) ) )
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py
--- a/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0006_change_qual_datatype.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,7 @@
+"""
+This migration script changes certain values in the history_dataset_association.extension
+column, specifically 'qual' is chaged to be 'qual454'.
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from migrate import *
@@ -13,9 +17,17 @@
metadata = MetaData( migrate_engine )
db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+
+def display_migration_details():
+ print "========================================"
+ print "This migration script changes certain values in the history_dataset_association.extension"
+ print "column, specifically 'qual' is chaged to be 'qual454'."
+ print "========================================"
+
HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True )
-
+
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
# Add 2 indexes to the galaxy_user table
@@ -42,6 +54,5 @@
except Exception, e:
log.debug( "Dropping index 'ix_hda_extension' to history_dataset_association table failed: %s" % ( str( e ) ) )
-
def downgrade():
pass
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0007_sharing_histories.py
--- a/lib/galaxy/model/migrate/versions/0007_sharing_histories.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0007_sharing_histories.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,8 @@
+"""
+This migration script creates the new history_user_share_association table, and adds
+a new boolean type column to the history table. This provides support for sharing
+histories in the same way that workflows are shared.
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from migrate import *
@@ -14,6 +19,13 @@
metadata = MetaData( migrate_engine )
+def display_migration_details():
+ print "========================================"
+ print "This migration script creates the new history_user_share_association table, and adds"
+ print "a new boolean type column to the history table. This provides support for sharing"
+ print "histories in the same way that workflows are shared."
+ print "========================================"
+
HistoryUserShareAssociation_table = Table( "history_user_share_association", metadata,
Column( "id", Integer, primary_key=True ),
Column( "history_id", Integer, ForeignKey( "history.id" ), index=True ),
@@ -21,6 +33,7 @@
)
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
# Create the history_user_share_association table
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0008_galaxy_forms.py
--- a/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0008_galaxy_forms.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,14 @@
+"""
+This migration script adds the following new tables for supporting Galaxy forms:
+1) form_definition_current
+2) form_definition
+3) form_values
+4) request_type
+5) request
+6) sample
+7) sample_state
+8) sample_event
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.exceptions import *
@@ -22,15 +33,26 @@
metadata = MetaData( migrate_engine )
db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+def display_migration_details():
+ print "========================================"
+ print "This migration script adds the following new tables for supporting Galaxy forms:"
+ print "1) form_definition_current"
+ print "2) form_definition"
+ print "3) form_values"
+ print "4) request_type"
+ print "5) request"
+ print "6) sample"
+ print "7) sample_state"
+ print "8) sample_event"
+ print "========================================"
FormDefinitionCurrent_table = Table('form_definition_current', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
- Column( "latest_form_id", Integer,
- #ForeignKey( "form_definition.id", use_alter=True, name='form_definition_current_latest_form_id_fk'),
- index=True ),
+ Column( "latest_form_id", Integer, index=True ),
Column( "deleted", Boolean, index=True, default=False ))
+
FormDefinition_table = Table('form_definition', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
@@ -55,7 +77,7 @@
Column( "desc", TEXT ),
Column( "request_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
Column( "sample_form_id", Integer, ForeignKey( "form_definition.id" ), index=True ) )
-# request table
+
Request_table = Table('request', metadata,
Column( "id", Integer, primary_key=True),
Column( "create_time", DateTime, default=now ),
@@ -94,13 +116,10 @@
Column( "sample_state_id", Integer, ForeignKey( "sample_state.id" ), index=True ),
Column( "comment", TEXT ) )
-
-
-
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
-
# Add all of the new tables above
# metadata.create_all()
try:
@@ -145,8 +164,6 @@
SampleEvent_table.create()
except Exception, e:
log.debug( "Creating sample_event table failed: %s" % str( e ) )
-
-
def downgrade():
# Load existing tables
@@ -183,7 +200,3 @@
SampleEvent_table.drop()
except Exception, e:
log.debug( "Dropping sample_event table failed: %s" % str( e ) )
-
-
-
-
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0009_request_table.py
--- a/lib/galaxy/model/migrate/versions/0009_request_table.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0009_request_table.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,8 @@
+"""
+This migration script adds a new column to 2 tables:
+1) a new boolean type column named 'submitted' to the 'request' table
+2) a new string type column named 'bar_code' to the 'sample' table
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from migrate import *
@@ -15,11 +20,17 @@
metadata = MetaData( migrate_engine )
+def display_migration_details():
+ print "========================================"
+ print "This migration script adds a new column to 2 tables:"
+ print "1) a new boolean type column named 'submitted' to the 'request' table"
+ print "2) a new string type column named 'bar_code' to the 'sample' table"
+ print "========================================"
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
-
# Add 1 column to the request table
try:
Request_table = Table( "request", metadata, autoload=True )
@@ -33,7 +44,6 @@
assert col is Request_table.c.submitted
except Exception, e:
log.debug( "Adding column 'submitted' to request table failed: %s" % ( str( e ) ) )
-
# Add 1 column to the sample table
try:
Sample_table = Table( "sample", metadata, autoload=True )
@@ -49,4 +59,4 @@
log.debug( "Adding column 'bar_code' to sample table failed: %s" % ( str( e ) ) )
def downgrade():
- pass
\ No newline at end of file
+ pass
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py
--- a/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0010_hda_display_at_authz_table.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,13 @@
+"""
+This migration script adds the history_dataset_association_display_at_authorization table,
+which allows 'private' datasets to be displayed at external sites without making them public.
+If using mysql, this script will display the following error, which is corrected in the next
+migration script:
+
+history_dataset_association_display_at_authorization table failed: (OperationalError)
+(1059, "Identifier name 'ix_history_dataset_association_display_at_authorization_update_time'
+is too long
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.exceptions import *
@@ -22,6 +32,17 @@
metadata = MetaData( migrate_engine )
db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+def display_migration_details():
+ print "========================================"
+ print "This migration script adds the history_dataset_association_display_at_authorization table, which"
+ print "allows 'private' datasets to be displayed at external sites without making them public."
+ print ""
+ print "If using mysql, this script will display the following error, which is corrected in the next migration"
+ print "script: history_dataset_association_display_at_authorization table failed: (OperationalError)"
+ print "(1059, 'Identifier name 'ix_history_dataset_association_display_at_authorization_update_time'"
+ print "is too long."
+ print "========================================"
+
HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -31,13 +52,14 @@
Column( "site", TrimmedString( 255 ) ) )
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
try:
HistoryDatasetAssociationDisplayAtAuthorization_table.create()
except Exception, e:
log.debug( "Creating history_dataset_association_display_at_authorization table failed: %s" % str( e ) )
-
+
def downgrade():
# Load existing tables
metadata.reflect()
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py
--- a/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,3 +1,8 @@
+"""
+This script fixes a problem introduced in 0010_hda_display_at_atuhz_table.py. MySQL has a
+name length limit and thus the index "ix_hdadaa_history_dataset_association_id" has to be
+manually created.
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.exceptions import *
@@ -22,6 +27,13 @@
metadata = MetaData( migrate_engine )
db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+def display_migration_details():
+ print "========================================"
+ print "This script fixes a problem introduced in the previous migration script ( 9->10 ). MySQL"
+ print "has a name length limit and thus the index 'ix_hdadaa_history_dataset_association_id' has"
+ print "to be manually created."
+ print "========================================"
+
HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -31,6 +43,7 @@
Column( "site", TrimmedString( 255 ) ) )
def upgrade():
+ display_migration_details()
if migrate_engine.name == 'mysql':
# Load existing tables
metadata.reflect()
@@ -39,7 +52,7 @@
i.create()
except Exception, e:
log.debug( "Adding index 'ix_hdadaa_history_dataset_association_id' to table 'history_dataset_association_display_at_authorization' table failed: %s" % str( e ) )
-
+
def downgrade():
if migrate_engine.name == 'mysql':
# Load existing tables
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0012_user_address.py
--- a/lib/galaxy/model/migrate/versions/0012_user_address.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/model/migrate/versions/0012_user_address.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,13 +1,20 @@
+"""
+This script adds a new user_address table that is currently only used with sample requests, where
+a user can select from a list of his addresses to associate with the request. This script also
+drops the request.submitted column which was boolean and replaces it with a request.state column
+which is a string, allowing for more flexibility with request states.
+"""
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.exceptions import *
from migrate import *
from migrate.changeset import *
-
import datetime
now = datetime.datetime.utcnow
+import sys, logging
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
-import sys, logging
log = logging.getLogger( __name__ )
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler( sys.stdout )
@@ -16,12 +23,16 @@
handler.setFormatter( formatter )
log.addHandler( handler )
-# Need our custom types, but don't import anything else from model
-from galaxy.model.custom_types import *
-
metadata = MetaData( migrate_engine )
db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+def display_migration_details():
+ print "========================================"
+ print "This script adds a new user_address table that is currently only used with sample requests, where"
+ print "a user can select from a list of his addresses to associate with the request. This script also"
+ print "drops the request.submitted column which was boolean and replaces it with a request.state column"
+ print "which is a string, allowing for more flexibility with request states."
+ print "========================================"
UserAddress_table = Table( "user_address", metadata,
Column( "id", Integer, primary_key=True),
@@ -40,27 +51,15 @@
Column( "deleted", Boolean, index=True, default=False ),
Column( "purged", Boolean, index=True, default=False ) )
-#RequestState_table = Table('request_state', metadata,
-# Column( "id", Integer, primary_key=True),
-# Column( "create_time", DateTime, default=now ),
-# Column( "update_time", DateTime, default=now, onupdate=now ),
-# Column( "name", TrimmedString( 255 ), nullable=False ),
-# Column( "desc", TEXT ))
-
def upgrade():
+ display_migration_details()
# Load existing tables
metadata.reflect()
-
# Add all of the new tables above
try:
UserAddress_table.create()
except Exception, e:
log.debug( "Creating user_address table failed: %s" % str( e ) )
-# try:
-# RequestState_table.create()
-# except Exception, e:
-# log.debug( "Creating request_state table failed: %s" % str( e ) )
-
# Add 1 column to the request_type table
try:
RequestType_table = Table( "request_type", metadata, autoload=True )
@@ -74,7 +73,6 @@
assert col is RequestType_table.c.deleted
except Exception, e:
log.debug( "Adding column 'deleted' to request_type table failed: %s" % ( str( e ) ) )
-
# Delete the submitted column
try:
Request_table = Table( "request", metadata, autoload=True )
@@ -92,27 +90,6 @@
assert col is Request_table.c.state
except Exception, e:
log.debug( "Adding column 'state' to request table failed: %s" % ( str( e ) ) )
-#
-# # new column which points to the current state in the request_state table
-# try:
-# col = Column( "request_state_id", Integer, index=True )
-# col.create( Request_table )
-# assert col is Request_table.c.request_state_id
-# except Exception, e:
-# log.debug( "Adding column 'request_state_id' to request table failed: %s" % ( str( e ) ) )
-# # Add 1 foreign key constraint to the form_definition_current table
-# if RequestState_table and Request_table:
-# try:
-# cons = ForeignKeyConstraint( [Request_table.c.request_state_id],
-# [RequestState_table.c.id],
-# name='request_request_state_id_fk' )
-# # Create the constraint
-# cons.create()
-# except Exception, e:
-# log.debug( "Adding foreign key constraint 'request_request_state_id_fk' to table 'request' failed: %s" % ( str( e ) ) )
-
def downgrade():
pass
-
-
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/model/migrate/versions/0013_change_lib_item_templates_to_forms.py Thu Aug 13 13:44:04 2009 -0400
@@ -0,0 +1,255 @@
+"""
+This migration script eliminates all of the tables that were used for the 1st version of the
+library templates where template fields and contents were each stored as a separate table row
+in various library item tables. All of these tables are dropped in this script, eliminating all
+existing template data. A total of 14 existing tables are dropped.
+
+We're now basing library templates on forms, so field contents are
+stored as a jsonified list in the form_values table. This script introduces the following 3
+new association tables:
+1) library_info_association
+2) library_folder_info_association
+3) library_dataset_dataset_info_association
+
+If using mysql, this script will throw an (OperationalError) exception due to a long index name on
+the library_dataset_dataset_info_association table, which is OK because the script creates an index
+with a shortened name.
+"""
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+from migrate.changeset import *
+import sys, logging
+
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+
+def display_migration_details():
+ print "========================================"
+ print "This migration script eliminates all of the tables that were used for the 1st version of the"
+ print "library templates where template fields and contents were each stored as a separate table row"
+ print "in various library item tables. All of these tables are dropped in this script, eliminating all"
+ print "existing template data. A total of 14 existing tables are dropped."
+ print ""
+ print "We're now basing library templates on Galaxy forms, so field contents are stored as a jsonified"
+ print "list in the form_values table. This script introduces the following 3 new association tables:"
+ print "1) library_info_association"
+ print "2) library_folder_info_association"
+ print "3) library_dataset_dataset_info_association"
+ print ""
+ print "If using mysql, this script will throw an (OperationalError) exception due to a long index name"
+ print "on the library_dataset_dataset_info_association table, which is OK because the script creates"
+ print "an index with a shortened name."
+ print "========================================"
+
+if migrate_engine.name == 'postgres':
+ # http://blog.pythonisito.com/2008/01/cascading-drop-table-with-sqlalchemy.ht…
+ from sqlalchemy.databases import postgres
+ class PGCascadeSchemaDropper(postgres.PGSchemaDropper):
+ def visit_table(self, table):
+ for column in table.columns:
+ if column.default is not None:
+ self.traverse_single(column.default)
+ self.append("\nDROP TABLE " +
+ self.preparer.format_table(table) +
+ " CASCADE")
+ self.execute()
+ postgres.dialect.schemadropper = PGCascadeSchemaDropper
+
+LibraryInfoAssociation_table = Table( 'library_info_association', metadata,
+ Column( "id", Integer, primary_key=True),
+ Column( "library_id", Integer, ForeignKey( "library.id" ), index=True ),
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+LibraryFolderInfoAssociation_table = Table( 'library_folder_info_association', metadata,
+ Column( "id", Integer, primary_key=True),
+ Column( "library_folder_id", Integer, ForeignKey( "library_folder.id" ), nullable=True, index=True ),
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+LibraryDatasetDatasetInfoAssociation_table = Table( 'library_dataset_dataset_info_association', metadata,
+ Column( "id", Integer, primary_key=True),
+ Column( "library_dataset_dataset_association_id", Integer, ForeignKey( "library_dataset_dataset_association.id" ), nullable=True, index=True ),
+ Column( "form_definition_id", Integer, ForeignKey( "form_definition.id" ), index=True ),
+ Column( "form_values_id", Integer, ForeignKey( "form_values.id" ), index=True ) )
+
+def upgrade():
+ display_migration_details()
+ # Load existing tables
+ metadata.reflect()
+ # Drop all of the original library_item_info tables
+ # NOTE: all existing library item into template data is eliminated here via table drops
+ try:
+ LibraryItemInfoPermissions_table = Table( "library_item_info_permissions", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfoPermissions_table = None
+ log.debug( "Failed loading table library_item_info_permissions" )
+ try:
+ LibraryItemInfoPermissions_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info_permissions table failed: %s" % str( e ) )
+
+ try:
+ LibraryItemInfoTemplatePermissions_table = Table( "library_item_info_template_permissions", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfoTemplatePermissions_table = None
+ log.debug( "Failed loading table library_item_info_template_permissions" )
+ try:
+ LibraryItemInfoTemplatePermissions_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info_template_permissions table failed: %s" % str( e ) )
+
+ try:
+ LibraryItemInfoElement_table = Table( "library_item_info_element", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfoElement_table = None
+ log.debug( "Failed loading table library_item_info_element" )
+ try:
+ LibraryItemInfoElement_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info_element table failed: %s" % str( e ) )
+
+ try:
+ LibraryItemInfoTemplateElement_table = Table( "library_item_info_template_element", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfoTemplateElement_table = None
+ log.debug( "Failed loading table library_item_info_template_element" )
+ try:
+ LibraryItemInfoTemplateElement_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info_template_element table failed: %s" % str( e ) )
+
+ try:
+ LibraryInfoTemplateAssociation_table = Table( "library_info_template_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryInfoTemplateAssociation_table = None
+ log.debug( "Failed loading table library_info_template_association" )
+ try:
+ LibraryInfoTemplateAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_info_template_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryFolderInfoTemplateAssociation_table = Table( "library_folder_info_template_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryFolderInfoTemplateAssociation_table = None
+ log.debug( "Failed loading table library_folder_info_template_association" )
+ try:
+ LibraryFolderInfoTemplateAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_folder_info_template_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryDatasetInfoTemplateAssociation_table = Table( "library_dataset_info_template_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryDatasetInfoTemplateAssociation_table = None
+ log.debug( "Failed loading table library_dataset_info_template_association" )
+ try:
+ LibraryDatasetInfoTemplateAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_dataset_info_template_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryDatasetDatasetInfoTemplateAssociation_table = Table( "library_dataset_dataset_info_template_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryDatasetDatasetInfoTemplateAssociation_table = None
+ log.debug( "Failed loading table library_dataset_dataset_info_template_association" )
+ try:
+ LibraryDatasetDatasetInfoTemplateAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_dataset_dataset_info_template_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryInfoAssociation_table = Table( "library_info_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryInfoAssociation_table = None
+ log.debug( "Failed loading table library_info_association" )
+ try:
+ LibraryInfoAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_info_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryFolderInfoAssociation_table = Table( "library_folder_info_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryFolderInfoAssociation_table = None
+ log.debug( "Failed loading table library_folder_info_association" )
+ try:
+ LibraryFolderInfoAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_folder_info_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryDatasetInfoAssociation_table = Table( "library_dataset_info_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryDatasetInfoAssociation_table = None
+ log.debug( "Failed loading table library_dataset_info_association" )
+ try:
+ LibraryDatasetInfoAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_dataset_info_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryDatasetDatasetInfoAssociation_table = Table( "library_dataset_dataset_info_association", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryDatasetDatasetInfoAssociation_table = None
+ log.debug( "Failed loading table library_dataset_dataset_info_association" )
+ try:
+ LibraryDatasetDatasetInfoAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_dataset_dataset_info_association table failed: %s" % str( e ) )
+
+ try:
+ LibraryItemInfo_table = Table( "library_item_info", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfo_table = None
+ log.debug( "Failed loading table library_item_info" )
+ try:
+ LibraryItemInfo_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info table failed: %s" % str( e ) )
+
+ try:
+ LibraryItemInfoTemplate_table = Table( "library_item_info_template", metadata, autoload=True )
+ except NoSuchTableError:
+ LibraryItemInfoTemplate_table = None
+ log.debug( "Failed loading table library_item_info_template" )
+ try:
+ LibraryItemInfoTemplate_table.drop()
+ except Exception, e:
+ log.debug( "Dropping library_item_info_template table failed: %s" % str( e ) )
+
+ # Create all new tables above
+ try:
+ LibraryInfoAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating library_info_association table failed: %s" % str( e ) )
+ try:
+ LibraryFolderInfoAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating library_folder_info_association table failed: %s" % str( e ) )
+ try:
+ LibraryDatasetDatasetInfoAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating library_dataset_dataset_info_association table failed: %s" % str( e ) )
+ # Fix index on LibraryDatasetDatasetInfoAssociation_table for mysql
+ if migrate_engine.name == 'mysql':
+ # Load existing tables
+ metadata.reflect()
+ i = Index( "ix_lddaia_ldda_id", LibraryDatasetDatasetInfoAssociation_table.c.library_dataset_dataset_association_id )
+ try:
+ i.create()
+ except Exception, e:
+ log.debug( "Adding index 'ix_lddaia_ldda_id' to table 'library_dataset_dataset_info_association' table failed: %s" % str( e ) )
+
+def downgrade():
+ log.debug( "Downgrade is not possible." )
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/security/__init__.py Thu Aug 13 13:44:04 2009 -0400
@@ -75,12 +75,10 @@
self.permitted_actions = permitted_actions
# List of "library_item" objects and their associated permissions and info template objects
self.library_item_assocs = (
- ( self.model.Library, self.model.LibraryPermissions, self.model.LibraryInfoAssociation ),
- ( self.model.LibraryFolder, self.model.LibraryFolderPermissions, self.model.LibraryFolderInfoAssociation ),
- ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions, self.model.LibraryDatasetInfoAssociation ),
- ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions, self.model.LibraryDatasetDatasetInfoAssociation ),
- ( self.model.LibraryItemInfo, self.model.LibraryItemInfoPermissions, None ),
- ( self.model.LibraryItemInfoTemplate, self.model.LibraryItemInfoTemplatePermissions, None ) )
+ ( self.model.Library, self.model.LibraryPermissions ),
+ ( self.model.LibraryFolder, self.model.LibraryFolderPermissions ),
+ ( self.model.LibraryDataset, self.model.LibraryDatasetPermissions ),
+ ( self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions ) )
def allow_action( self, user, action, **kwd ):
if 'dataset' in kwd:
return self.allow_dataset_action( user, action, kwd[ 'dataset' ] )
@@ -117,7 +115,7 @@
user_role_ids = [ r.id for r in user.all_roles() ]
# Check to see if user has access to any of the roles
allowed_role_assocs = []
- for item_class, permission_class, info_association_class in self.library_item_assocs:
+ for item_class, permission_class in self.library_item_assocs:
if isinstance( library_item, item_class ):
if permission_class == self.model.LibraryPermissions:
allowed_role_assocs = permission_class.filter_by( action=action.action, library_id=library_item.id ).all()
@@ -127,10 +125,6 @@
allowed_role_assocs = permission_class.filter_by( action=action.action, library_dataset_id=library_item.id ).all()
elif permission_class == self.model.LibraryDatasetDatasetAssociationPermissions:
allowed_role_assocs = permission_class.filter_by( action=action.action, library_dataset_dataset_association_id=library_item.id ).all()
- elif permission_class == self.model.LibraryItemInfoPermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_item_info_id=library_item.id ).all()
- elif permission_class == self.model.LibraryItemInfoTemplatePermissions:
- allowed_role_assocs = permission_class.filter_by( action=action.action, library_item_info_template_id=library_item.id ).all()
for allowed_role_assoc in allowed_role_assocs:
if allowed_role_assoc.role_id in user_role_ids:
return True
@@ -366,7 +360,7 @@
role_assoc.delete()
role_assoc.flush()
# Add the new permissions on library_item
- for item_class, permission_class, info_association_class in self.library_item_assocs:
+ for item_class, permission_class in self.library_item_assocs:
if isinstance( library_item, item_class ):
for action, roles in permissions.items():
if isinstance( action, Action ):
@@ -396,12 +390,12 @@
permissions[role_assoc.action] = [ role_assoc.role ]
self.set_all_library_permissions( target_library_item, permissions )
if user:
- # The user passed will be the current Galaxy user. Make sure user's private role is included
item_class = None
- for item_class, permission_class, info_association_class in self.library_item_assocs:
+ for item_class, permission_class in self.library_item_assocs:
if isinstance( target_library_item, item_class ):
break
if item_class:
+ # Make sure user's private role is included
private_role = self.model.security_agent.get_private_user_role( user )
for name, action in self.permitted_actions.items():
if not permission_class.filter_by( role_id = private_role.id, action = action.action ).first():
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 13 13:44:04 2009 -0400
@@ -128,7 +128,7 @@
tree = util.parse_xml( config_file )
root = tree.getroot()
# Allow specifying a different tool subclass to instantiate
- if root.find( "type" ):
+ if root.find( "type" ) is not None:
type_elem = root.find( "type" )
module = type_elem.get( 'module', 'galaxy.tools' )
cls = type_elem.get( 'class' )
@@ -1468,7 +1468,7 @@
out_data[ name ] = data
return out_data
- def exec_after_process( self, app, inp_data, out_data, param_dict ):
+ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None ):
if self.tool_type == 'data_source':
name, data = out_data.items()[0]
data.set_size()
@@ -1572,6 +1572,18 @@
dataset.history.add( new_data )
new_data.flush()
return primary_datasets
+
+class SetMetadataTool( Tool ):
+ def exec_after_process( self, app, inp_data, out_data, param_dict, job = None ):
+ for name, dataset in inp_data.iteritems():
+ external_metadata = galaxy.datatypes.metadata.JobExternalOutputMetadataWrapper( job )
+ if external_metadata.external_metadata_set_successfully( dataset ):
+ dataset.metadata.from_JSON_dict( external_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
+ # If setting external metadata has failed, how can we inform the user?
+ # For now, we'll leave the default metadata and set the state back to its original.
+ dataset.datatype.after_edit( dataset )
+ dataset.state = param_dict.get( '__ORIGINAL_DATASET_STATE__' )
+ dataset.flush()
# ---- Utility classes to be factored out -----------------------------------
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/tools/actions/metadata.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/tools/actions/metadata.py Thu Aug 13 13:44:04 2009 -0400
@@ -0,0 +1,48 @@
+from . import ToolAction
+from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
+
+import logging
+log = logging.getLogger( __name__ )
+
+class SetMetadataToolAction( ToolAction ):
+ """Tool action used for setting external metadata on an existing dataset"""
+
+ def execute( self, tool, trans, incoming = {}, set_output_hid = False ):
+ for name, value in incoming.iteritems():
+ if isinstance( value, trans.app.model.HistoryDatasetAssociation ):
+ dataset = value
+ dataset_name = name
+ break
+ else:
+ raise Exception( 'The dataset to set metadata on could not be determined.' )
+
+ # Create the job object
+ job = trans.app.model.Job()
+ job.session_id = trans.get_galaxy_session().id
+ job.history_id = trans.history.id
+ job.tool_id = tool.id
+ try:
+ # For backward compatibility, some tools may not have versions yet.
+ job.tool_version = tool.version
+ except:
+ job.tool_version = "1.0.0"
+ job.flush() #ensure job.id is available
+
+ #add parameters to job_parameter table
+ incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state #store original dataset state, so we can restore it. A seperate table might be better (no chance of 'loosing' the original state)?
+ external_metadata_wrapper = JobExternalOutputMetadataWrapper( job )
+ cmd_line = external_metadata_wrapper.setup_external_metadata( dataset, exec_dir = None, tmp_dir = trans.app.config.new_file_path, dataset_files_path = trans.app.model.Dataset.file_path, output_fnames = None, config_root = None, datatypes_config = None, kwds = { 'overwrite' : True } )
+ incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ #add the dataset to job_to_input_dataset table
+ job.add_input_dataset( dataset_name, dataset )
+ #Need a special state here to show that metadata is being set and also allow the job to run
+ # i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
+ dataset.state = dataset.states.SETTING_METADATA
+ trans.app.model.flush()
+
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return []
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,4 +1,5 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from . import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
@@ -8,7 +9,7 @@
import logging
log = logging.getLogger( __name__ )
-class UploadToolAction( object ):
+class UploadToolAction( ToolAction ):
# Action for uploading files
def __init__( self ):
self.empty = False
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Thu Aug 13 13:44:04 2009 -0400
@@ -1,9 +1,9 @@
-import shutil, StringIO, operator, urllib, gzip, tempfile, sets, string
+import shutil, StringIO, operator, urllib, gzip, tempfile, sets, string, sys
from datetime import datetime, timedelta
from galaxy import util, datatypes
from galaxy.web.base.controller import *
from galaxy.model.orm import *
-import sys
+from galaxy.web.controllers.forms import get_all_forms, get_form_widgets
import logging
log = logging.getLogger( __name__ )
@@ -710,6 +710,7 @@
library=trans.app.model.Library.get( id ),
deleted=deleted,
created_ldda_ids=created_ldda_ids,
+ forms=get_all_forms( trans, filter=dict(deleted=False) ),
msg=msg,
messagetype=messagetype,
show_deleted=show_deleted )
@@ -751,6 +752,22 @@
messagetype='done' ) )
return trans.fill_template( '/admin/library/new_library.mako', msg=msg, messagetype=messagetype )
elif action == 'information':
+ # See if we have any associated templates
+ info_association = library.get_info_association()
+ if info_association:
+ template = info_association.template
+ # See if we have any field contents
+ info = info_association.info
+ if info:
+ field_contents = {}
+ for index, value in enumerate( info.content ):
+ key = 'field_%i' % index
+ field_contents[ key ] = value
+ widgets = get_form_widgets( trans, template, field_contents )
+ else:
+ widgets = get_form_widgets( trans, template )
+ else:
+ widgets = []
if params.get( 'rename_library_button', False ):
old_name = library.name
new_name = util.restore_text( params.name )
@@ -759,8 +776,7 @@
msg = 'Enter a valid name'
return trans.fill_template( '/admin/library/library_info.mako',
library=library,
- restrict=params.get( 'restrict', False ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype='error' )
else:
@@ -780,12 +796,10 @@
messagetype='done' ) )
return trans.fill_template( '/admin/library/library_info.mako',
library=library,
- restrict=params.get( 'restrict', False ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif action == 'delete':
- # TODO: need to revamp the way we delete libraries, folders and contained LibraryDatasets.
def delete_folder( library_folder ):
library_folder.refresh()
for folder in library_folder.folders:
@@ -939,6 +953,22 @@
msg=msg,
messagetype=messagetype )
elif action == 'information':
+ # See if we have any associated templates
+ info_association = folder.get_info_association()
+ if info_association:
+ template = info_association.template
+ # See if we have any field contents
+ info = info_association.info
+ if info:
+ field_contents = {}
+ for index, value in enumerate( info.content ):
+ key = 'field_%i' % index
+ field_contents[ key ] = value
+ widgets = get_form_widgets( trans, template, field_contents )
+ else:
+ widgets = get_form_widgets( trans, template )
+ else:
+ widgets = []
if params.get( 'rename_folder_button', False ):
old_name = folder.name
new_name = util.restore_text( params.name )
@@ -948,8 +978,7 @@
return trans.fill_template( '/admin/library/folder_info.mako',
folder=folder,
library_id=library_id,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype='error' )
else:
@@ -967,12 +996,10 @@
return trans.fill_template( '/admin/library/folder_info.mako',
folder=folder,
library_id=library_id,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif action == 'delete':
- # TODO: need to revamp the way we delete folders and contained LibraryDatasets
def delete_folder( folder ):
folder.refresh()
for subfolder in folder.active_folders:
@@ -1045,8 +1072,6 @@
return trans.fill_template( '/admin/library/library_dataset_info.mako',
library_dataset=library_dataset,
library_id=library_id,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
msg=msg,
messagetype=messagetype )
elif action == 'permissions':
@@ -1101,7 +1126,7 @@
created_ldda_ids = trans.webapp.controllers[ 'library_dataset' ].upload_dataset( trans,
controller='admin',
library_id=library_id,
- folder_id=folder_id,
+ folder_id=folder_id,
replace_dataset=replace_dataset,
**kwd )
if created_ldda_ids:
@@ -1126,6 +1151,13 @@
msg=util.sanitize_text( msg ),
messagetype=messagetype ) )
elif not id or replace_dataset:
+ # See if we have any associated templates
+ info_association = folder.get_info_association()
+ if info_association:
+ template = info_association.template
+ widgets = get_form_widgets( trans, template )
+ else:
+ widgets = []
upload_option = params.get( 'upload_option', 'upload_file' )
# No dataset(s) specified, so display the upload form. Send list of data formats to the form
# so the "extension" select list can be populated dynamically
@@ -1150,6 +1182,7 @@
last_used_build=last_used_build,
roles=roles,
history=history,
+ widgets=widgets,
msg=msg,
messagetype=messagetype,
replace_dataset=replace_dataset )
@@ -1175,6 +1208,22 @@
id=library_id,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
+ # See if we have any associated templates
+ info_association = ldda.get_info_association()
+ if info_association:
+ template = info_association.template
+ # See if we have any field contents
+ info = info_association.info
+ if info:
+ field_contents = {}
+ for index, value in enumerate( info.content ):
+ key = 'field_%i' % index
+ field_contents[ key ] = value
+ widgets = get_form_widgets( trans, template, field_contents )
+ else:
+ widgets = get_form_widgets( trans, template )
+ else:
+ widgets = []
if action == 'permissions':
if params.get( 'update_roles_button', False ):
permissions = {}
@@ -1242,6 +1291,7 @@
library_id=library_id,
deleted=deleted,
show_deleted=show_deleted,
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif action == 'edit_info':
@@ -1255,8 +1305,7 @@
ldda=ldda,
library_id=library_id,
datatypes=ldatatypes,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
else:
@@ -1293,8 +1342,7 @@
ldda=ldda,
library_id=library_id,
datatypes=ldatatypes,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif params.get( 'detect', False ):
@@ -1312,8 +1360,7 @@
ldda=ldda,
library_id=library_id,
datatypes=ldatatypes,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif params.get( 'delete', False ):
@@ -1324,8 +1371,7 @@
ldda=ldda,
library_id=library_id,
datatypes=ldatatypes,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
ldda.datatype.before_edit( ldda )
@@ -1340,8 +1386,7 @@
ldda=ldda,
library_id=library_id,
datatypes=ldatatypes,
- restrict=params.get( 'restrict', True ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif ids:
@@ -1553,26 +1598,17 @@
last_used_build=last_used_build,
roles=roles,
history=history,
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
@web.expose
@web.require_admin
- def info_template( self, trans, library_id, id=None, num_fields=0, folder_id=None, ldda_id=None, library_dataset_id=None, **kwd ):
+ def info_template( self, trans, library_id, id=None, folder_id=None, ldda_id=None, **kwd ):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
- if not num_fields:
- num_fields = 0
- else:
- num_fields = int( num_fields )
- if params.get( 'new_template', False ):
- action = 'new_template'
- elif params.get( 'permissions', False ):
- action = 'permissions'
- else:
- action = 'edit_template'
if id:
- library_item = trans.app.model.LibraryItemInfoTemplate.get( int( id ) )
+ library_item = trans.app.model.FormDefinition.get( int( id ) )
library_item_desc = 'information template'
response_action = 'info_template'
response_id = id
@@ -1586,174 +1622,51 @@
library_item_desc = 'library dataset'
response_action = 'library_dataset_dataset_association'
response_id = ldda_id
- elif library_dataset_id:
- library_item = trans.app.model.LibraryDataset.get( int( library_dataset_id ) )
- library_item_desc = 'dataset'
- response_action = 'library_dataset_dataset_association'
- response_id = library_item.library_dataset_dataset_association.id
else:
library_item = trans.app.model.Library.get( int( library_id ) )
library_item_desc = 'library'
response_action = 'browse_library'
response_id = library_id
- if action == 'new_template':
- if params.get( 'create_info_template_button', False ):
- return trans.fill_template( '/admin/library/new_info_template.mako',
- library_item_name=library_item.name,
- library_item_desc=library_item_desc,
- num_fields=num_fields,
- library_id=library_id,
- folder_id=folder_id,
- ldda_id=ldda_id,
- library_dataset_id=library_dataset_id,
- msg=msg,
- messagetype=messagetype )
- elif params.get( 'new_info_template_button', False ):
- # Make sure at least 1 template field is filled in
- # TODO: Eventually we'll enhance templates to allow for required and optional fields.
- proceed = False
- for i in range( int( params.get( 'set_num_fields', 0 ) ) ):
- elem_name = params.get( 'new_element_name_%i' % i, None )
- elem_description = params.get( 'new_element_description_%i' % i, None )
- if elem_name or elem_description:
- proceed = True
- break
- if not proceed:
- msg = "At least 1 of the fields must be completed."
- return trans.fill_template( '/admin/library/new_info_template.mako',
- library_item_name=library_item.name,
- library_item_desc=library_item_desc,
- num_fields=num_fields,
- library_id=library_id,
- folder_id=folder_id,
- ldda_id=ldda_id,
- library_dataset_id=library_dataset_id,
- msg=msg,
- messagetype=messagetype )
- # Create template
- liit = trans.app.model.LibraryItemInfoTemplate()
- liit.name = util.restore_text( params.get( 'name', '' ) )
- liit.description = util.restore_text( params.get( 'description', '' ) )
- liit.flush()
- # Inherit the template's permissions from the library_item
- trans.app.security_agent.copy_library_permissions( library_item, liit )
- # Create template association
+ forms = get_all_forms( trans, filter=dict(deleted=False) )
+ if not forms:
+ msg = "There are no forms on which to base the template, so create a form and "
+ msg += "try again to add the information template to the %s." % library_item_desc
+ trans.response.send_redirect( web.url_for( controller='forms',
+ action='new',
+ new=True,
+ msg=msg,
+ messagetype='done' ) )
+ if params.get( 'add', False ):
+ if params.get( 'add_info_template_button', False ):
+ form = trans.app.model.FormDefinition.get( int( kwd[ 'form_id' ] ) )
+ #fields = list( copy.deepcopy( form.fields ) )
+ form_values = trans.app.model.FormValues( form, [] )
+ form_values.flush()
if folder_id:
- liit_assoc = trans.app.model.LibraryFolderInfoTemplateAssociation()
- liit_assoc.folder = trans.app.model.LibraryFolder.get( folder_id )
+ assoc = trans.app.model.LibraryFolderInfoAssociation( library_item, form, form_values )
elif ldda_id:
- liit_assoc = trans.app.model.LibraryDatasetDatasetInfoTemplateAssociation()
- ldda = trans.app.model.LibraryDatasetDatasetAssociation.get( ldda_id )
- liit_assoc.library_dataset_dataset_association = ldda
- # This response_action method requires a folder_id
- folder_id = ldda.library_dataset.folder.id
- elif library_dataset_id:
- liit_assoc = trans.app.model.LibraryDatasetInfoTemplateAssociation()
- library_dataset = trans.app.model.LibraryDataset.get( library_dataset_id )
- liit_assoc.library_dataset = library_dataset
- # This response_action method requires a folder_id
- folder_id = library_dataset.folder.id
+ assoc = trans.app.model.LibraryDatasetDatasetInfoAssociation( library_item, form, form_values )
else:
- # We'll always be sent a library_id
- liit_assoc = trans.app.model.LibraryInfoTemplateAssociation()
- liit_assoc.library = trans.app.model.Library.get( library_id )
- liit_assoc.library_item_info_template = liit
- liit_assoc.flush()
- # Create and add elements
- for i in range( int( params.get( 'set_num_fields', 0 ) ) ):
- elem_name = params.get( 'new_element_name_%i' % i, None )
- elem_description = params.get( 'new_element_description_%i' % i, None )
- if elem_description and not elem_name:
- # If we have a description but no name, the description will be both
- # ( a name cannot be empty, but a description can )
- elem_name = elem_description
- if elem_name:
- # Skip any elements that have a missing name
- liit.add_element( name=elem_name, description=elem_description )
- msg = "The new information template has been created."
- return trans.response.send_redirect( web.url_for( controller='admin',
- action=response_action,
- id=response_id,
- library_id=library_id,
- folder_id=folder_id,
- msg=util.sanitize_text( msg ),
- messagetype='done' ) )
- return trans.fill_template( '/admin/library/create_info_template.mako',
- library_item=library_item,
- library_id=library_id,
- msg=msg,
- messagetype=messagetype )
- elif action == 'edit_template':
- define_or_save = 'define'
- edit_info_template_button = params.get( 'edit_info_template_button', False )
- if edit_info_template_button:
- if edit_info_template_button == 'Define fields':
- define_or_save = 'save'
- else:
- define_or_save = 'define'
- # Save changes to existing attributes, only set name if nonempty/nonNone is passed, but always set description
- name = params.get( 'name', None )
- if name:
- library_item.name = name
- library_item.description = params.get( 'description', '' )
- library_item.flush()
- # Save changes to exisiting elements
- for elem_id in util.listify( params.get( 'element_ids', [] ) ):
- liit_element = trans.app.model.LibraryItemInfoTemplateElement.get( elem_id )
- name = params.get( 'element_name_%s' % elem_id, None )
- if name:
- liit_element.name = name
- liit_element.description = params.get( 'element_description_%s' % elem_id, None )
- liit_element.flush()
- # Add new elements
- for i in range( int( params.get( 'set_num_fields', 0 ) ) ):
- elem_name = params.get( 'new_element_name_%i' % i, None )
- elem_description = params.get( 'new_element_description_%i' % i, None )
- # Skip any elements that have a missing name and description
- if not elem_name:
- # If we have a description but no name, the description will be both
- # ( a name cannot be empty, but a description can )
- elem_name = elem_description
- if elem_name:
- library_item.add_element( name=elem_name, description=elem_description )
- library_item.refresh()
- msg = "Information template '%s' has been updated" % library_item.name
- return trans.fill_template( "/admin/library/edit_info_template.mako",
- liit=library_item,
- num_fields=num_fields,
- library_id=library_id,
- library_dataset_id=library_dataset_id,
- ldda_id=ldda_id,
- folder_id=folder_id,
+ assoc = trans.app.model.LibraryInfoAssociation( library_item, form, form_values )
+ assoc.flush()
+ msg = 'An information template based on the form "%s" has been added to this %s.' % ( form.name, library_item_desc )
+ trans.response.send_redirect( web.url_for( controller='admin',
+ action=response_action,
+ id=response_id,
+ msg=msg,
+ message_type='done' ) )
+ return trans.fill_template( '/admin/library/select_info_template.mako',
library_item_name=library_item.name,
library_item_desc=library_item_desc,
- define_or_save=define_or_save,
- msg=msg,
- messagetype=messagetype )
- elif action == 'permissions':
- if params.get( 'update_roles_button', False ):
- permissions = {}
- for k, v in trans.app.model.Library.permitted_actions.items():
- in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
- permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
- trans.app.security_agent.set_all_library_permissions( library_item, permissions )
- library_item.refresh()
- msg = "Permissions updated for information template '%s'" % library_item.name
- return trans.response.send_redirect( web.url_for( controller='admin',
- action='info_template',
- library_id=library_id,
- id=id,
- permissions=True,
- msg=util.sanitize_text( msg ),
- messagetype='done' ) )
- return trans.fill_template( '/admin/library/info_template_permissions.mako',
- liit=library_item,
library_id=library_id,
+ folder_id=folder_id,
+ ldda_id=ldda_id,
+ forms=forms,
msg=msg,
messagetype=messagetype )
@web.expose
@web.require_admin
- def library_item_info( self, trans, library_id, id=None, library_item_id=None, library_item_type=None, **kwd ):
+ def edit_template_info( self, trans, library_id, num_widgets, library_item_id=None, library_item_type=None, **kwd ):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
@@ -1768,8 +1681,6 @@
library_item = trans.app.model.LibraryDatasetDatasetAssociation.get( library_item_id )
# This response_action method requires a folder_id
folder_id = library_item.library_dataset.folder.id
- elif library_item_type == 'library_item_info_elememt':
- library_item = trans.app.model.LibraryItemInfoElement.get( library_item_id )
else:
msg = "Invalid library item type ( %s ) specified, id ( %s )" % ( str( library_item_type ), str( library_item_id ) )
return trans.response.send_redirect( web.url_for( controller='admin',
@@ -1777,107 +1688,47 @@
id=library_id,
msg=util.sanitize_text( msg ),
messagetype='error' ) )
- if params.get( 'new_info', False ):
- if library_item:
- if params.get( 'create_new_info_button', False ):
- library_item_info_template_id = params.get( 'library_item_info_template_id', None )
- library_item_info_template = trans.app.model.LibraryItemInfoTemplate.get( int( library_item_info_template_id ) )
- # Make sure at least 1 template field is filled in
- # TODO: Eventually we'll enhance templates to allow for required and optional fields.
- proceed = False
- for template_element in library_item_info_template.elements:
- if params.get( "info_element_%s_%s" % ( library_item_info_template.id, template_element.id ), None ):
- proceed = True
- break
- if not proceed:
- msg = "At least 1 of the fields must be completed."
- return trans.response.send_redirect( web.url_for( controller='admin',
- action=library_item_type,
- id=library_item.id,
- library_id=library_id,
- folder_id=folder_id,
- edit_info=True,
- msg=util.sanitize_text( msg ),
- messagetype='error' ) )
- user = trans.get_user()
- library_item_info = trans.app.model.LibraryItemInfo( user=user )
- library_item_info.library_item_info_template = library_item_info_template
- library_item_info.flush()
- trans.app.security_agent.copy_library_permissions( library_item_info_template, library_item_info )
- for template_element in library_item_info_template.elements:
- info_element_value = params.get( "info_element_%s_%s" % ( library_item_info_template.id, template_element.id ), None )
- info_element = trans.app.model.LibraryItemInfoElement()
- info_element.contents = info_element_value
- info_element.library_item_info_template_element = template_element
- info_element.library_item_info = library_item_info
- info_element.flush()
- info_association_class = None
- for item_class, permission_class, info_association_class in trans.app.security_agent.library_item_assocs:
- if isinstance( library_item, item_class ):
- break
- if info_association_class:
- library_item_info_association = info_association_class( user=user )
- library_item_info_association.set_library_item( library_item )
- library_item_info_association.library_item_info = library_item_info
- library_item_info_association.flush()
- else:
- raise 'Invalid class (%s) specified for library_item (%s)' % ( library_item.__class__, library_item.__class__.__name__ )
- msg = 'The information has been saved'
- return trans.response.send_redirect( web.url_for( controller='admin',
- action=library_item_type,
- id=library_item.id,
- library_id=library_id,
- folder_id=folder_id,
- edit_info=True,
- msg=util.sanitize_text( msg ),
- messagetype='done' ) )
- return trans.fill_template( "/admin/library/new_info.mako",
- library_id=library_id,
- library_item=library_item,
- library_item_type=library_item_type,
- msg=msg,
- messagetype=messagetype )
- elif params.get( 'edit_info', False ):
- if params.get( 'edit_info_button', False ):
- ids = util.listify( id )
- for id in ids:
- library_item_info_element = trans.app.model.LibraryItemInfoElement.get( int( id ) )
- new_contents = util.restore_text( params.get( ( 'info_element_%s' % id ), '' ) )
- library_item_info_element.contents = new_contents
- library_item_info_element.flush()
- msg = 'The information has been updated.'
- return trans.response.send_redirect( web.url_for( controller='admin',
- action=library_item_type,
- id=library_item.id,
- library_id=library_id,
- folder_id=folder_id,
- edit_info=True,
- msg=util.sanitize_text( msg ),
- messagetype='done' ) )
- elif params.get( 'permissions', False ):
- if params.get( 'update_roles_button', False ):
- permissions = {}
- for k, v in trans.app.model.Library.permitted_actions.items():
- in_roles = [ trans.app.model.Role.get( x ) for x in util.listify( kwd.get( k + '_in', [] ) ) ]
- permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
- trans.app.security_agent.set_all_library_permissions( library_item.library_item_info, permissions )
- library_item.library_item_info.refresh()
- library_item.refresh()
- msg = "Permissions updated for field '%s'" % library_item.library_item_info_template_element.name
- return trans.response.send_redirect( web.url_for( controller='admin',
- action='library_item_info',
- library_id=library_id,
- id=id,
- library_item_id=library_item_id,
- library_item_type=library_item_type,
- permissions=True,
- msg=util.sanitize_text( msg ),
- messagetype='done' ) )
- return trans.fill_template( '/admin/library/info_permissions.mako',
- library_item_info_element=library_item,
- library_id=library_id,
- msg=msg,
- messagetype=messagetype )
+ # Save updated template field contents
+ field_contents = []
+ for index in range( int( num_widgets ) ):
+ field_contents.append( util.restore_text( params.get( 'field_%i' % ( index ), '' ) ) )
+ if field_contents:
+ # Since information templates are inherited, the template fields can be displayed on the information
+ # page for a folder or library dataset when it has no info_association object. If the user has added
+ # field contents on an inherited template via a parent's info_association, we'll need to create a new
+ # form_values and info_association for the current object.
+ info_association = library_item.get_info_association( restrict=True )
+ if info_association:
+ template = info_association.template
+ info = info_association.info
+ form_values = trans.app.model.FormValues.get( info.id )
+ # Update existing content only if it has changed
+ if form_values.content != field_contents:
+ form_values.content = field_contents
+ form_values.flush()
+ else:
+ # Inherit the next available info_association so we can get the template
+ info_association = library_item.get_info_association()
+ template = info_association.template
+ # Create a new FormValues object
+ form_values = trans.app.model.FormValues( template, field_contents )
+ form_values.flush()
+ # Create a new info_association between the current library item and form_values
+ if library_item_type == 'folder':
+ info_association = trans.app.model.LibraryFolderInfoAssociation( library_item, template, form_values )
+ info_association.flush()
+ elif library_item_type == 'library_dataset_dataset_association':
+ info_association = trans.app.model.LibraryDatasetDatasetInfoAssociation( library_item, template, form_values )
+ info_association.flush()
+ msg = 'The information has been updated.'
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action=library_item_type,
+ id=library_item.id,
+ library_id=library_id,
+ folder_id=folder_id,
+ edit_info=True,
+ msg=util.sanitize_text( msg ),
+ messagetype='done' ) )
@web.expose
@web.require_admin
def download_dataset_from_folder(self, trans, id, library_id=None, **kwd):
@@ -2101,19 +1952,6 @@
else:
last_updated[job.id] = '%s minutes' % int( delta.seconds / 60 )
return trans.fill_template( '/admin/jobs.mako', jobs = jobs, last_updated = last_updated, cutoff = cutoff, msg = msg, messagetype = messagetype )
-
- def _get_all_forms(self, trans, all_versions=False):
- '''
- This method returns all the latest forms from the
- form_definition_current table if all_versions is set to True. Otherwise
- this method return all the versions of all the forms from form_definition
- table
- '''
- if all_versions:
- return trans.app.model.FormDefinition.query().all()
- else:
- fdc_list = trans.app.model.FormDefinitionCurrent.query().all()
- return [fdc.latest_form for fdc in fdc_list]
@web.expose
@web.require_admin
def manage_request_types( self, trans, **kwd ):
@@ -2121,7 +1959,7 @@
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
show_filter = util.restore_text( params.get( 'show_filter', 'Active' ) )
- forms = self._get_all_forms(trans, all_versions=True)
+ forms = get_all_forms(trans, all_versions=True)
request_types_list = trans.app.model.RequestType.query().all()
if show_filter == 'All':
request_types = request_types_list
@@ -2141,37 +1979,38 @@
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
- if params.get('create', False) == 'True':
+ if params.get( 'create', False ):
return trans.fill_template( '/admin/requests/create_request_type.mako',
- forms=self._get_all_forms(trans, all_versions=False),
+ forms=get_all_forms( trans,
+ filter=dict(deleted=False) ),
msg=msg,
messagetype=messagetype)
- elif params.get('add_states', False) == 'True':
+ elif params.get( 'define_states_button', False ):
return trans.fill_template( '/admin/requests/add_states.mako',
- sample_type_name=util.restore_text( params.name ),
+ request_type_name=util.restore_text( params.name ),
desc=util.restore_text( params.description ),
num_states=int(util.restore_text( params.num_states )),
request_form_id=int(util.restore_text( params.request_form_id )),
sample_form_id=int(util.restore_text( params.sample_form_id )),
msg=msg,
messagetype=messagetype)
- elif params.get('save_new', False) == 'True':
+ elif params.get( 'save_request_type', False ):
st, msg = self._save_request_type(trans, **kwd)
if not st:
return trans.fill_template( '/admin/requests/create_request_type.mako',
- forms=self._get_all_forms(trans, all_versions=False),
+ forms=get_all_forms( trans ),
msg=msg,
messagetype='error')
return trans.response.send_redirect( web.url_for( controller='admin',
action='manage_request_types',
msg='Request type <b>%s</b> has been created' % st.name,
messagetype='done') )
- elif params.get('edit', False) == 'True':
+ elif params.get('view', False):
rt = trans.app.model.RequestType.get(int(util.restore_text( params.id )))
ss_list = trans.app.model.SampleState.filter(trans.app.model.SampleState.table.c.request_type_id == rt.id).all()
return trans.fill_template( '/admin/requests/view_request_type.mako',
request_type=rt,
- forms=self._get_all_forms(trans, all_versions=False),
+ forms=get_all_forms( trans ),
states_list=ss_list,
deleted=False,
show_deleted=False,
@@ -2200,9 +2039,6 @@
for ss in ss_list:
ss.delete()
ss.flush()
- # unsubmitted state
- #ss = trans.app.model.SampleState('Unsubmitted', 'Sample not yet submitted', rt)
- ##ss.flush()
for i in range( num_states ):
name = util.restore_text( params.get( 'new_element_name_%i' % i, None ))
desc = util.restore_text( params.get( 'new_element_description_%i' % i, None ))
@@ -2236,5 +2072,5 @@
rt.flush()
return trans.response.send_redirect( web.url_for( controller='admin',
action='manage_request_types',
- msg='Request type <b>%s</b> has been deleted' % rt.name,
+ msg='Request type <b>%s</b> has been undeleted' % rt.name,
messagetype='done') )
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/web/controllers/forms.py Thu Aug 13 13:44:04 2009 -0400
@@ -10,7 +10,6 @@
import copy
log = logging.getLogger( __name__ )
-
class Forms( BaseController ):
@web.expose
@@ -44,45 +43,30 @@
show_filter=show_filter,
msg=msg,
messagetype=messagetype )
- def _get_all_forms(self, trans, all_versions=False):
- '''
- This method returns all the latest forms from the
- form_definition_current table if all_versions is set to True. Otherwise
- this method return all the versions of all the forms from form_definition
- table
- '''
- if all_versions:
- return trans.app.model.FormDefinition.query().all()
- else:
- fdc_list = trans.app.model.FormDefinitionCurrent.query().all()
- return [fdc.latest_form for fdc in fdc_list]
@web.expose
@web.require_admin
def new( self, trans, **kwd ):
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
- messagetype = params.get( 'messagetype', 'done' )
- if params.get('new', False) and not params.get('create_form', False):
- self.current_form = {}
- self.current_form['name'] = 'New Form'
- self.current_form['desc'] = ''
- self.current_form['fields'] = []
- inputs = [ ( 'Name', TextField('name', 40,self.current_form['name'] ) ),
- ( 'Description', TextField('description', 40, self.current_form['desc']) ),
- ( 'Import from csv file (Optional)', TextField('csv_file', 40, '') ) ]
- return trans.fill_template( '/admin/forms/create_form.mako',
- inputs=inputs,
- msg=msg,
- messagetype=messagetype )
- elif params.get('create_form', False) == 'True':
- if 'submitted' in params.new:
- self.num_add_fields = 0
- fd, msg = self.__save_form(trans, fdc_id=None, **kwd)
- self.__get_saved_form(fd)
- return trans.response.send_redirect( web.url_for( controller='forms',
- action='edit',
- form_id=fd.id,
- show_form=True) )
+ messagetype = params.get( 'messagetype', 'done' )
+ if params.get( 'create_form_button', False ):
+ fd, msg = self.__save_form( trans, fdc_id=None, **kwd )
+ self.__get_saved_form( fd )
+ return trans.response.send_redirect( web.url_for( controller='forms',
+ action='edit',
+ form_id=fd.id,
+ show_form=True ) )
+ self.current_form = {}
+ self.current_form[ 'name' ] = 'New Form'
+ self.current_form[ 'desc' ] = ''
+ self.current_form[ 'fields' ] = []
+ inputs = [ ( 'Name', TextField( 'name', 40, self.current_form[ 'name' ] ) ),
+ ( 'Description', TextField( 'description', 40, self.current_form[ 'desc' ] ) ),
+ ( 'Import from csv file (Optional)', FileField( 'file_data', 40, '' ) ) ]
+ return trans.fill_template( '/admin/forms/create_form.mako',
+ inputs=inputs,
+ msg=msg,
+ messagetype=messagetype )
@web.expose
@web.require_admin
def delete( self, trans, **kwd ):
@@ -111,73 +95,75 @@
@web.require_admin
def edit( self, trans, **kwd ):
'''
- This callback method is for handling all the editing functions like:
- remaning fields, adding/deleting fields, changing fields attributes
+ This callback method is for handling all the editing functions like
+ renaming fields, adding/deleting fields, changing fields attributes.
'''
params = util.Params( kwd )
msg = util.restore_text( params.get( 'msg', '' ) )
messagetype = params.get( 'messagetype', 'done' )
- fd = trans.app.model.FormDefinition.get(int(util.restore_text( params.form_id )))
- # SHOW THE FORM FOR EDITING.
- if params.get('show_form', False) == 'True':
- self.__get_saved_form(fd)
- # the following two dicts store the unsaved select box options
+ form_id = params.get( 'form_id', None )
+ if not form_id:
+ msg = 'Invalid form id %s' % str( form_id )
+ trans.response.send_redirect( web.url_for( controller='forms',
+ action='manage',
+ msg=msg,
+ messagetype='error' ) )
+ fd = trans.app.model.FormDefinition.get( int( params.form_id ) )
+ # Show the form for editing
+ if params.get( 'show_form', False ):
+ self.__get_saved_form( fd )
+ # The following two dicts store the unsaved select box options
self.del_options = {}
self.add_options = {}
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- # DELETE FIELD
- elif params.get('remove_button', False):
- self.__update_current_form(**kwd)
- index = int(params.get('remove_button', None).split(' ')[2])-1
- self.__remove_field(index)
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- # SAVE CHANGES
- elif params.get('save_changes_button', False) == 'Save':
- self.__update_current_form(**kwd)
- fd_new, msg = self.__save_form(trans, fd.form_definition_current.id, **kwd)
+ if fd.fields:
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ else:
+ # If the form is empty, we'll simulate a click on the add_field_button so the
+ # form will be displayed with the field choice, saving a mouse click.
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, empty_form=True, **kwd )
+ # Delete a field
+ elif params.get( 'remove_button', False ):
+ self.__update_current_form( trans, **kwd )
+ index = int( kwd[ 'remove_button' ].split( ' ' )[2] ) - 1
+ self.__remove_field( index )
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ # Save changes
+ elif params.get( 'save_changes_button', False ):
+ self.__update_current_form( trans, **kwd )
+ fd_new, msg = self.__save_form( trans, fd.form_definition_current.id, **kwd )
if not fd_new:
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype='error', **kwd)
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype='error', **kwd )
else:
fd = fd_new
msg = "The form '%s' has been updated with the changes." % fd.name
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- #ADD A FIELD
- elif params.get('add_field_button', False) == 'Add field':
- self.__update_current_form(**kwd)
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ #Add a field
+ elif params.get( 'add_field_button', False ):
+ self.__update_current_form( trans, **kwd )
self.__add_field()
# show the form again with one empty field
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- # SHOW FORM READ ONLY
- elif params.get('read_only', False):
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ # Show form read-only
+ elif params.get( 'read_only', False ):
return trans.fill_template( '/admin/forms/show_form_read_only.mako',
form=fd,
msg=msg,
messagetype=messagetype )
- # REFRESH PAGE, SelectField is selected/deselected as the type of a field
- elif params.get('refresh', False) == 'true':
- self.__update_current_form(**kwd)
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- # REMOVE SelectField OPTION
- elif params.get('select_box_options', False) == 'remove':
- #self.__update_current_form(**kwd)
- index = int(params.get( 'field_index', None ))
- option = int(params.get( 'option_index', None ))
- del self.current_form['fields'][index]['selectlist'][option]
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
- # ADD SelectField OPTION
- elif params.get('select_box_options', False) == 'add':
- #self.__update_current_form(**kwd)
- index = int(params.get( 'field_index', None ))
- self.current_form['fields'][index]['selectlist'].append('')
- return self.__show(trans=trans, form=fd, msg=msg,
- messagetype=messagetype, **kwd)
+ # Refresh page, SelectField is selected/deselected as the type of a field
+ elif params.get( 'refresh', False ):
+ self.__update_current_form( trans, **kwd )
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ # Remove SelectField option
+ elif params.get( 'select_box_options', False ) == 'remove':
+ index = int( kwd[ 'field_index' ] )
+ option = int( kwd[ 'option_index' ] )
+ del self.current_form[ 'fields' ][ index ][ 'selectlist' ][ option ]
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
+ # Add SelectField option
+ elif params.get( 'select_box_options', False ) == 'add':
+ index = int( kwd[ 'field_index' ] )
+ self.current_form[ 'fields' ][ index ][ 'selectlist' ].append( '' )
+ return self.__show( trans=trans, form=fd, msg=msg, messagetype=messagetype, **kwd )
def __remove_field(self, index):
del self.current_form['fields'][index]
def __add_field(self):
@@ -193,6 +179,10 @@
self.current_form['fields'].append(empty_field)
def __get_field(self, index, **kwd):
params = util.Params( kwd )
+ #TODO: RC this needs to be handled so that it does not throw an exception.
+ # To reproduce, create a new form, click the "add field" button, click the
+ # browser back arrow, then click the "add field" button again.
+ # You should never attempt to "restore_text()" on a None object...
name = util.restore_text( params.get( 'field_name_%i' % index, None ) )
helptext = util.restore_text( params.get( 'field_helptext_%i' % index, None ) )
required = params.get( 'field_required_%i' % index, False )
@@ -245,29 +235,28 @@
if not util.restore_text(params.get( 'field_name_%i' % i, None )):
return None, "All the field label(s) must be completed."
return True, ''
- def __get_form(self, **kwd):
+ def __get_form(self, trans, **kwd):
params = util.Params( kwd )
name = util.restore_text( params.name )
desc = util.restore_text( params.description ) or ""
- if params.get( 'csv_file', None ):
- csv_file = util.restore_text( params.get( 'csv_file', '' ) )
- if csv_file:
- fields = self.__import_fields(csv_file)
- else:
+ csv_file = params.get( 'file_data', '' )
+ if csv_file == '':
# set form fields
fields = []
for i in range( len(self.current_form['fields']) ):
fields.append(self.__get_field(i, **kwd))
fields = fields
+ else:
+ fields = self.__import_fields(trans, csv_file)
return name, desc, fields
- def __update_current_form(self, **kwd):
- name, desc, fields = self.__get_form(**kwd)
+ def __update_current_form(self, trans, **kwd):
+ name, desc, fields = self.__get_form(trans, **kwd)
self.current_form = {}
self.current_form['name'] = name
self.current_form['desc'] = desc
self.current_form['fields'] = fields
- def __import_fields(self, csv_file):
+ def __import_fields(self, trans, csv_file):
'''
"company","name of the company", "True", "required", "TextField",,
"due date","turnaround time", "True", "optional", "SelectField","24 hours, 1 week, 1 month"
@@ -275,7 +264,7 @@
import csv
fields = []
try:
- reader = csv.reader(open(csv_file))
+ reader = csv.reader(csv_file.file)
for row in reader:
options = row[5].split(',')
fields.append({'label': row[0],
@@ -301,7 +290,7 @@
if not flag:
return None, msg
fd = trans.app.model.FormDefinition()
- fd.name, fd.desc, fd.fields = self.__get_form(**kwd)
+ fd.name, fd.desc, fd.fields = self.__get_form(trans, **kwd)
if fdc_id: # save changes to the existing form
# change the pointer in the form_definition_current table to point
# to this new record
@@ -314,9 +303,6 @@
trans.sa_session.save_or_update( fdc )
trans.sa_session.flush()
msg = "The new form named '%s' has been created. " % (fd.name)
- request_types = trans.app.model.RequestType.query().all()
- if not request_types:
- msg = msg + "Now you can create requests to associate with this form."
return fd, msg
class FieldUI(object):
@@ -381,18 +367,31 @@
def label(self):
return str(self.index)+'.'+self.label
- def __show(self, trans, form, msg=None, messagetype='done', **kwd):
+ def __show( self, trans, form, msg='', messagetype='done', empty_form=False, **kwd ):
'''
- This method displays the form and any of the changes made to it
+ This method displays the form and any of the changes made to it,
+ The empty_form param allows for this method to simulate clicking
+ the "add_field_button" on the edit_form.mako page so that the page
+ is displayed with the first field to be added, saving a mouse click.
'''
+ if empty_form:
+ # Send params that will simulate a button click on the add_field_button
+ # button on edit_form.mako.
+ param_dict = { 'form_id' : str( form.id ),
+ 'num_fields' : '0',
+ 'refresh' : 'true',
+ 'name' : form.name,
+ 'description' : form.desc,
+ 'add_field_button' : 'Add field' }
+ self.edit( trans, **param_dict )
params = util.Params( kwd )
# name & description
- form_details = [ ( 'Name', TextField('name', 40, self.current_form['name']) ),
- ( 'Description', TextField('description', 40, self.current_form['desc']) ) ]
+ form_details = [ ( 'Name', TextField( 'name', 40, self.current_form[ 'name' ] ) ),
+ ( 'Description', TextField( 'description', 40, self.current_form[ 'desc' ] ) ) ]
# fields
field_details = []
- for index, field in enumerate(self.current_form['fields']):
- field_ui = self.FieldUI(index, field)
+ for index, field in enumerate( self.current_form[ 'fields' ] ):
+ field_ui = self.FieldUI( index, field )
field_details.append( field_ui.get() )
return trans.fill_template( '/admin/forms/edit_form.mako',
form_details=form_details,
@@ -401,3 +400,61 @@
field_types=BaseField.form_field_types(),
msg=msg,
messagetype=messagetype )
+
+# Common methods for all components that use forms
+def get_all_forms( trans, all_versions=False, filter=None ):
+ '''
+ Return all the latest forms from the form_definition_current table
+ if all_versions is set to True. Otherwise return all the versions
+ of all the forms from the form_definition table.
+
+ TODO: when we add the concept of a form_definition_type ( e.g.,
+ 'request_header', 'request_sample', 'library_template' ), filter
+ the query if received filter is not None.
+ '''
+ if all_versions:
+ return trans.app.model.FormDefinition.query().all()
+ if filter:
+ fdc_list = trans.app.model.FormDefinitionCurrent.query().filter_by(**filter)
+ else:
+ fdc_list = trans.app.model.FormDefinitionCurrent.query().all()
+ return [ fdc.latest_form for fdc in fdc_list ]
+def get_form_widgets( trans, form, contents={} ):
+ '''
+ Return the list of widgets that comprise a form definition,
+ including field contents if any.
+ '''
+ widgets = []
+ for index, field in enumerate( form.fields ):
+ field_name = 'field_%i' % index
+ if field_name in contents:
+ value = contents[ field_name ]
+ elif field[ 'type' ] == 'CheckboxField':
+ # Since we do not have contents, set checkbox value to False
+ value = False
+ else:
+ # Set other field types to empty string
+ value = ''
+ field_widget = eval( field[ 'type' ] )( field_name )
+ if field[ 'type' ] == 'TextField':
+ field_widget.set_size( 40 )
+ field_widget.value = value
+ elif field[ 'type' ] == 'TextArea':
+ field_widget.set_size( 3, 40 )
+ field_widget.value = value
+ elif field[ 'type' ] == 'SelectField':
+ for option in field[ 'selectlist' ]:
+ if option == value:
+ field_widget.add_option( option, option, selected=True )
+ else:
+ field_widget.add_option( option, option )
+ elif field[ 'type' ] == 'CheckboxField':
+ field_widget.checked = value
+ if field[ 'required' ] == 'required':
+ req = 'Required'
+ else:
+ req = 'Optional'
+ widgets.append( dict( label=field[ 'label' ],
+ widget=field_widget,
+ helptext='%s (%s)' % ( field[ 'helptext' ], req ) ) )
+ return widgets
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Thu Aug 13 13:44:04 2009 -0400
@@ -12,7 +12,6 @@
# States for passing messages
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
-
class HistoryListGrid( grids.Grid ):
# Custom column types
@@ -70,8 +69,43 @@
def apply_default_filter( self, trans, query ):
return query.filter_by( user=trans.user, purged=False )
+class SharedHistoryListGrid( grids.Grid ):
+ # Custom column types
+ class DatasetsByStateColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, history ):
+ rval = []
+ for state in ( 'ok', 'running', 'queued', 'error' ):
+ total = sum( 1 for d in history.active_datasets if d.state == state )
+ if total:
+ rval.append( '<div class="count-box state-color-%s">%s</div>' % ( state, total ) )
+ else:
+ rval.append( '' )
+ return rval
+ class SharedByColumn( grids.GridColumn ):
+ def get_value( self, trans, grid, history ):
+ return history.user.email
+ # Grid definition
+ title = "Histories shared with you by others"
+ model_class = model.History
+ default_sort_key = "-update_time"
+ columns = [
+ grids.GridColumn( "Name", key="name" ),
+ DatasetsByStateColumn( "Datasets (by state)", ncells=4 ),
+ grids.GridColumn( "Created", key="create_time", format=time_ago ),
+ grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
+ SharedByColumn( "Shared by", key="user_id" )
+ ]
+ operations = [
+ grids.GridOperation( "Clone" ),
+ grids.GridOperation( "Unshare" )
+ ]
+ standard_filters = []
+ def build_initial_query( self, session ):
+ return session.query( self.model_class ).join( 'users_shared_with' )
+ def apply_default_filter( self, trans, query ):
+ return query.filter( model.HistoryUserShareAssociation.user == trans.user )
+
class HistoryController( BaseController ):
-
@web.expose
def index( self, trans ):
return ""
@@ -80,7 +114,8 @@
"""XML history list for functional tests"""
return trans.fill_template( "/history/list_as_xml.mako" )
- list_grid = HistoryListGrid()
+ stored_list_grid = HistoryListGrid()
+ shared_list_grid = SharedHistoryListGrid()
@web.expose
@web.require_login( "work with multiple histories" )
@@ -91,7 +126,6 @@
if 'operation' in kwargs:
history_ids = util.listify( kwargs.get( 'id', [] ) )
histories = []
- shared_by_others = []
operation = kwargs['operation'].lower()
if operation == "share":
return self.share( trans, **kwargs )
@@ -127,7 +161,7 @@
status, message = self._list_undelete( trans, histories )
trans.sa_session.flush()
# Render the list view
- return self.list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
+ return self.stored_list_grid( trans, status=status, message=message, template='/history/stored_grid.mako', **kwargs )
def _list_delete( self, trans, histories ):
"""Delete histories"""
n_deleted = 0
@@ -195,18 +229,38 @@
# No message
return None, None
@web.expose
- def list_shared( self, trans, **kwd ):
+ def list_shared( self, trans, **kwargs ):
"""List histories shared with current user by others"""
- params = util.Params( kwd )
- msg = util.restore_text( params.get( 'msg', '' ) )
- shared_by_others = trans.sa_session \
- .query( model.HistoryUserShareAssociation ) \
- .filter_by( user=trans.user ) \
- .join( 'history' ) \
- .filter( model.History.deleted == False ) \
- .order_by( desc( model.History.update_time ) ) \
- .all()
- return trans.fill_template( "/history/list_shared.mako", shared_by_others=shared_by_others, msg=msg, messagetype='done' )
+ msg = util.restore_text( kwargs.get( 'msg', '' ) )
+ status = message = None
+ if 'operation' in kwargs:
+ id = kwargs.get( 'id', None )
+ operation = kwargs['operation'].lower()
+ if operation == "clone":
+ if not id:
+ message = "Select a history to clone"
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/shared_grid.mako', **kwargs )
+ # When cloning shared histories, only copy active datasets
+ new_kwargs = { 'clone_choice' : 'active' }
+ return self.clone( trans, id, **new_kwargs )
+ elif operation == 'unshare':
+ if not id:
+ message = "Select a history to unshare"
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/shared_grid.mako', **kwargs )
+ ids = util.listify( id )
+ histories = []
+ for history_id in ids:
+ history = get_history( trans, history_id, check_ownership=False )
+ histories.append( history )
+ for history in histories:
+ # Current user is the user with which the histories were shared
+ association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one()
+ association.delete()
+ association.flush()
+ message = "Unshared %d shared histories" % len( ids )
+ status = 'done'
+ # Render the list view
+ return self.shared_list_grid( trans, status=status, message=message, template='/history/shared_grid.mako', **kwargs )
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -323,6 +377,9 @@
can_change, cannot_change, no_change_needed, unique_no_change_needed, send_to_err = \
self._populate_restricted( trans, user, histories, send_to_users, None, send_to_err, unique=True )
send_to_err += err_msg
+ if cannot_change and not no_change_needed and not can_change:
+ send_to_err = "The histories you are sharing do not contain any datasets that can be accessed by the users with which you are sharing."
+ return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
if can_change or cannot_change:
return trans.fill_template( "/history/share.mako",
histories=histories,
@@ -350,8 +407,6 @@
email=email,
err_msg=err_msg,
share_button=True ) )
- if action == "no_share":
- trans.response.send_redirect( url_for( controller='root', action='history_options' ) )
user = trans.get_user()
histories, send_to_users, send_to_err = self._get_histories_and_users( trans, user, id, email )
send_to_err = ''
@@ -629,29 +684,38 @@
@web.expose
@web.require_login( "clone shared Galaxy history" )
def clone( self, trans, id, **kwd ):
- history = get_history( trans, id, check_ownership=False )
+ """Clone a list of histories"""
params = util.Params( kwd )
+ ids = util.listify( id )
+ histories = []
+ for history_id in ids:
+ history = get_history( trans, history_id, check_ownership=False )
+ histories.append( history )
clone_choice = params.get( 'clone_choice', None )
if not clone_choice:
return trans.fill_template( "/history/clone.mako", history=history )
user = trans.get_user()
- if history.user == user:
- owner = True
+ for history in histories:
+ if history.user == user:
+ owner = True
+ else:
+ if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
+ .filter_by( user=user, history=history ).count() == 0:
+ return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " )
+ owner = False
+ name = "Clone of '%s'" % history.name
+ if not owner:
+ name += " shared by '%s'" % history.user.email
+ if clone_choice == 'activatable':
+ new_history = history.copy( name=name, target_user=user, activatable=True )
+ elif clone_choice == 'active':
+ name += " (active items only)"
+ new_history = history.copy( name=name, target_user=user )
+ if len( histories ) == 1:
+ msg = 'Clone with name "%s" is now included in your previously stored histories.' % new_history.name
else:
- if trans.sa_session.query( trans.app.model.HistoryUserShareAssociation ) \
- .filter_by( user=user, history=history ).count() == 0:
- return trans.show_error_message( "The history you are attempting to clone is not owned by you or shared with you. " )
- owner = False
- name = "Clone of '%s'" % history.name
- if not owner:
- name += " shared by '%s'" % history.user.email
- if clone_choice == 'activatable':
- new_history = history.copy( name=name, target_user=user, activatable=True )
- elif clone_choice == 'active':
- name += " (active items only)"
- new_history = history.copy( name=name, target_user=user )
- # Render the list view
- return trans.show_ok_message( 'Clone with name "%s" is now included in your list of stored histories.' % new_history.name )
+ msg = '%d cloned histories are now included in your previously stored histories.' % len( histories )
+ return trans.show_ok_message( msg )
## ---- Utility methods -------------------------------------------------------
diff -r f1f2d1de5f3e -r 5b6146705e80 lib/galaxy/web/controllers/library.py
--- a/lib/galaxy/web/controllers/library.py Thu Aug 13 13:43:25 2009 -0400
+++ b/lib/galaxy/web/controllers/library.py Thu Aug 13 13:44:04 2009 -0400
@@ -2,6 +2,7 @@
from galaxy.model.orm import *
from galaxy.datatypes import sniff
from galaxy import util
+from galaxy.web.controllers.forms import get_all_forms, get_form_widgets
from galaxy.util.streamball import StreamBall
import logging, tempfile, zipfile, tarfile, os, sys
@@ -130,6 +131,21 @@
msg=util.sanitize_text( msg ),
messagetype='error' ) )
if action == 'information':
+ # See if we have any associated templates
+ if library.info_association:
+ template = library.info_association[0].template
+ # See if we have any field contents
+ info = library.info_association[0].info
+ if info:
+ field_contents = {}
+ for index, value in enumerate( info.content ):
+ key = 'field_%i' % index
+ field_contents[ key ] = value
+ widgets = get_form_widgets( trans, template, field_contents )
+ else:
+ widgets = get_form_widgets( trans, template )
+ else:
+ widgets = []
if params.get( 'rename_library_button', False ):
old_name = library.name
new_name = util.restore_text( params.name )
@@ -138,8 +154,7 @@
msg = 'Enter a valid name'
return trans.fill_template( '/library/library_info.mako',
library=library,
- restrict=params.get( 'restrict', False ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype='error' )
else:
@@ -159,8 +174,7 @@
messagetype='done' ) )
return trans.fill_template( '/library/library_info.mako',
library=library,
- restrict=params.get( 'restrict', False ),
- render_templates=params.get( 'render_templates', False ),
+ widgets=widgets,
msg=msg,
messagetype=messagetype )
elif action == 'permissions':
@@ -377,8 +391,6 @@
return trans.fill_template( '/library/library_dataset_info.mako',
library_dataset=library_dataset,
library_id=library_id,
- restrict=params.get( 'restrict', True ),
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/c5968ee9399b
changeset: 2579:c5968ee9399b
user: Kanwei Li <kanwei(a)gmail.com>
date: Wed Aug 19 11:33:58 2009 -0400
description:
Typo
1 file(s) affected in this change:
tools/new_operations/cluster.xml
diffs (12 lines):
diff -r e9d70dca2ff5 -r c5968ee9399b tools/new_operations/cluster.xml
--- a/tools/new_operations/cluster.xml Wed Aug 19 11:33:48 2009 -0400
+++ b/tools/new_operations/cluster.xml Wed Aug 19 11:33:58 2009 -0400
@@ -74,7 +74,7 @@
**Syntax**
- **Maximum distance** is greatest distance in base pairs allowed between intervals that will be considered "clustered". **Negative** values for distance are allowed, and are useful for clustering intervals that overlap.
-- **Minimum intervals per cluster** allow a threshold to be set on the minimum number of intervals to be considered a cluster. Any area with less than this minimum will not be included in the ouput.
+- **Minimum intervals per cluster** allow a threshold to be set on the minimum number of intervals to be considered a cluster. Any area with less than this minimum will not be included in the output.
- **Merge clusters into single intervals** outputs intervals that span the entire cluster.
- **Find cluster intervals; preserve comments and order** filters out non-cluster intervals while maintaining the original ordering and comments in the file.
- **Find cluster intervals; output grouped by clusters** filters out non-cluster intervals, but outputs the cluster intervals so that they are grouped together. Comments and original ordering in the file are lost.
1
0