galaxy-dev
Threads by month
- ----- 2025 -----
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10008 discussions
details: http://www.bx.psu.edu/hg/galaxy/rev/542471b183d7
changeset: 2603:542471b183d7
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 18:44:35 2009 -0400
description:
Merge trunk
0 file(s) affected in this change:
diffs (1505 lines):
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/jobs/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -6,6 +6,8 @@
from galaxy.datatypes.tabular import *
from galaxy.datatypes.interval import *
from galaxy.datatypes import metadata
+from galaxy.util.json import from_json_string
+from galaxy.util.expressions import ExpressionContext
import pkg_resources
pkg_resources.require( "PasteDeploy" )
@@ -18,6 +20,12 @@
# States for running a job. These are NOT the same as data states
JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_OK, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ok', 'ready', 'deleted', 'admin_deleted'
+
+# This file, if created in the job's working directory, will be used for
+# setting advanced metadata properties on the job and its associated outputs.
+# This interface is currently experimental, is only used by the upload tool,
+# and should eventually become API'd
+TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
class JobManager( object ):
"""
@@ -320,6 +328,7 @@
self.working_directory = \
os.path.join( self.app.config.job_working_directory, str( self.job_id ) )
self.output_paths = None
+ self.tool_provided_job_metadata = None
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job ) #wrapper holding the info required to restore and clean up from files used for setting metadata externally
def get_param_dict( self ):
@@ -422,6 +431,8 @@
dataset.blurb = 'tool error'
dataset.info = message
dataset.set_size()
+ if dataset.ext == 'auto':
+ dataset.extension = 'data'
dataset.flush()
job.state = model.Job.states.ERROR
job.command_line = self.command_line
@@ -486,16 +497,28 @@
except ( IOError, OSError ):
self.fail( "Job %s's output dataset(s) could not be read" % job.id )
return
+ job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
for dataset_assoc in job.output_datasets:
+ context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
#should this also be checking library associations? - can a library item be added from a history before the job has ended? - lets not allow this to occur
for dataset in dataset_assoc.dataset.dataset.history_associations: #need to update all associated output hdas, i.e. history was shared with job running
+ if context.get( 'path', None ):
+ # The tool can set an alternate output path for the dataset.
+ try:
+ shutil.move( context['path'], dataset.file_name )
+ except ( IOError, OSError ):
+ if not context['stderr']:
+ context['stderr'] = 'This dataset could not be processed'
dataset.blurb = 'done'
dataset.peek = 'no peek'
- dataset.info = stdout + stderr
+ dataset.info = context['stdout'] + context['stderr']
dataset.set_size()
- if stderr:
+ if context['stderr']:
dataset.blurb = "error"
elif dataset.has_data():
+ # If the tool was expected to set the extension, attempt to retrieve it
+ if dataset.ext == 'auto':
+ dataset.extension = context.get( 'ext', 'data' )
#if a dataset was copied, it won't appear in our dictionary:
#either use the metadata from originating output dataset, or call set_meta on the copies
#it would be quicker to just copy the metadata from the originating output dataset,
@@ -510,18 +533,39 @@
#the metadata that was stored to disk for use via the external process,
#and the changes made by the user will be lost, without warning or notice
dataset.metadata.from_JSON_dict( self.external_output_metadata.get_output_filenames_by_dataset( dataset ).filename_out )
- if self.tool.is_multi_byte:
- dataset.set_multi_byte_peek()
- else:
- dataset.set_peek()
+ try:
+ assert context.get( 'line_count', None ) is not None
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek( line_count=context['line_count'] )
+ else:
+ dataset.set_peek( line_count=context['line_count'] )
+ except:
+ if self.tool.is_multi_byte:
+ dataset.set_multi_byte_peek()
+ else:
+ dataset.set_peek()
+ try:
+ # set the name if provided by the tool
+ dataset.name = context['name']
+ except:
+ pass
else:
dataset.blurb = "empty"
+ if dataset.ext == 'auto':
+ dataset.extension = 'txt'
dataset.flush()
- if stderr:
+ if context['stderr']:
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
- dataset_assoc.dataset.dataset.flush()
+ # If any of the rest of the finish method below raises an
+ # exception, the fail method will run and set the datasets to
+ # ERROR. The user will never see that the datasets are in error if
+ # they were flushed as OK here, since upon doing so, the history
+ # panel stops checking for updates. So allow the
+ # mapping.context.current.flush() at the bottom of this method set
+ # the state instead.
+ #dataset_assoc.dataset.dataset.flush()
# Save stdout and stderr
if len( stdout ) > 32768:
@@ -591,7 +635,8 @@
return self.output_paths
class DatasetPath( object ):
- def __init__( self, real_path, false_path = None ):
+ def __init__( self, dataset_id, real_path, false_path = None ):
+ self.dataset_id = dataset_id
self.real_path = real_path
self.false_path = false_path
def __str__( self ):
@@ -605,10 +650,55 @@
self.output_paths = []
for name, data in [ ( da.name, da.dataset.dataset ) for da in job.output_datasets ]:
false_path = os.path.abspath( os.path.join( self.working_directory, "galaxy_dataset_%d.dat" % data.id ) )
- self.output_paths.append( DatasetPath( data.file_name, false_path ) )
+ self.output_paths.append( DatasetPath( data.id, data.file_name, false_path ) )
else:
- self.output_paths = [ DatasetPath( da.dataset.file_name ) for da in job.output_datasets ]
+ self.output_paths = [ DatasetPath( da.dataset.dataset.id, da.dataset.file_name ) for da in job.output_datasets ]
return self.output_paths
+
+ def get_output_file_id( self, file ):
+ if self.output_paths is None:
+ self.get_output_fnames()
+ for dp in self.output_paths:
+ if self.app.config.outputs_to_working_directory and os.path.basename( dp.false_path ) == file:
+ return dp.dataset_id
+ elif os.path.basename( dp.real_path ) == file:
+ return dp.dataset_id
+ return None
+
+ def get_tool_provided_job_metadata( self ):
+ if self.tool_provided_job_metadata is not None:
+ return self.tool_provided_job_metadata
+
+ # Look for JSONified job metadata
+ self.tool_provided_job_metadata = []
+ meta_file = os.path.join( self.working_directory, TOOL_PROVIDED_JOB_METADATA_FILE )
+ if os.path.exists( meta_file ):
+ for line in open( meta_file, 'r' ):
+ try:
+ line = from_json_string( line )
+ assert 'type' in line
+ except:
+ log.exception( '(%s) Got JSON data from tool, but data is improperly formatted or no "type" key in data' % self.job_id )
+ log.debug( 'Offending data was: %s' % line )
+ continue
+ # Set the dataset id if it's a dataset entry and isn't set.
+ # This isn't insecure. We loop the job's output datasets in
+ # the finish method, so if a tool writes out metadata for a
+ # dataset id that it doesn't own, it'll just be ignored.
+ if line['type'] == 'dataset' and 'dataset_id' not in line:
+ try:
+ line['dataset_id'] = self.get_output_file_id( line['dataset'] )
+ except KeyError:
+ log.warning( '(%s) Tool provided job dataset-specific metadata without specifying a dataset' % self.job_id )
+ continue
+ self.tool_provided_job_metadata.append( line )
+ return self.tool_provided_job_metadata
+
+ def get_dataset_finish_context( self, job_context, dataset ):
+ for meta in self.get_tool_provided_job_metadata():
+ if meta['type'] == 'dataset' and meta['dataset_id'] == dataset.id:
+ return ExpressionContext( meta, job_context )
+ return job_context
def check_output_sizes( self ):
sizes = []
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -5,7 +5,7 @@
pkg_resources.require( "simplejson" )
-import logging, os, string, sys, tempfile, glob, shutil
+import logging, os, string, sys, tempfile, glob, shutil, types
import simplejson
import binascii
from UserDict import DictMixin
@@ -415,6 +415,7 @@
output.metadata_source = data_elem.get("metadata_source", "")
output.parent = data_elem.get("parent", None)
output.label = util.xml_text( data_elem, "label" )
+ output.count = int( data_elem.get("count", 1) )
output.filters = data_elem.findall( 'filter' )
self.outputs[ output.name ] = output
# Any extra generated config files for the tool
@@ -816,7 +817,11 @@
# If we've completed the last page we can execute the tool
elif state.page == self.last_page:
out_data = self.execute( trans, incoming=params )
- return 'tool_executed.mako', dict( out_data=out_data )
+ try:
+ assert type( out_data ) is types.DictType
+ return 'tool_executed.mako', dict( out_data=out_data )
+ except:
+ return 'message.mako', dict( message_type='error', message=out_data, refresh_frames=[] )
# Otherwise move on to the next page
else:
state.page += 1
@@ -824,15 +829,26 @@
self.fill_in_new_state( trans, self.inputs_by_page[ state.page ], state.inputs )
return 'tool_form.mako', dict( errors=errors, tool_state=state )
else:
- if filter( lambda x: isinstance( x, FieldStorage ) and x.file, state.inputs.values() ):
+ try:
+ self.find_fieldstorage( state.inputs )
+ except InterruptedUpload:
# If inputs contain a file it won't persist. Most likely this
# is an interrupted upload. We should probably find a more
# standard method of determining an incomplete POST.
return self.handle_interrupted( trans, state.inputs )
- else:
- # Just a refresh, render the form with updated state and errors.
- return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ except:
+ pass
+ # Just a refresh, render the form with updated state and errors.
+ return 'tool_form.mako', dict( errors=errors, tool_state=state )
+ def find_fieldstorage( self, x ):
+ if isinstance( x, FieldStorage ):
+ raise InterruptedUpload( None )
+ elif type( x ) is types.DictType:
+ [ self.find_fieldstorage( y ) for y in x.values() ]
+ elif type( x ) is types.ListType:
+ [ self.find_fieldstorage( y ) for y in x ]
+
def handle_interrupted( self, trans, inputs ):
"""
Upon handling inputs, if it appears that we have received an incomplete
@@ -1704,3 +1720,6 @@
return value
else:
return incoming.get( key, default )
+
+class InterruptedUpload( Exception ):
+ pass
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 18:44:35 2009 -0400
@@ -1,8 +1,10 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
+from cgi import FieldStorage
from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
+from galaxy.util.json import to_json_string
import sys, traceback
@@ -11,14 +13,28 @@
class UploadToolAction( ToolAction ):
# Action for uploading files
- def __init__( self ):
- self.empty = False
- self.line_count = None
- def remove_tempfile( self, filename ):
- try:
- os.unlink( filename )
- except:
- log.exception( 'failure removing temporary file: %s' % filename )
+ def persist_uploads( self, incoming ):
+ if 'files' in incoming:
+ new_files = []
+ temp_files = []
+ for upload_dataset in incoming['files']:
+ f = upload_dataset['file_data']
+ if isinstance( f, FieldStorage ):
+ # very small files can be StringIOs
+ if 'name' in dir( f.file ) and f.file.name != '<fdopen>':
+ local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+ f.file.close()
+ else:
+ local_filename = datatypes.sniff.stream_to_file( f.file, prefix="strio_upload_file_" )[0]
+ upload_dataset['file_data'] = dict( filename = f.filename,
+ local_filename = local_filename )
+ if upload_dataset['url_paste'].strip() != '':
+ upload_dataset['url_paste'] = datatypes.sniff.stream_to_file( StringIO.StringIO( upload_dataset['url_paste'] ), prefix="strio_url_paste_" )[0]
+ else:
+ upload_dataset['url_paste'] = None
+ new_files.append( upload_dataset )
+ incoming['files'] = new_files
+ return incoming
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
dataset_upload_inputs = []
for input_name, input in tool.inputs.iteritems():
@@ -42,330 +58,100 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
+
data_list = []
+
+ incoming = self.persist_uploads( incoming )
+
+ json_file = tempfile.mkstemp()
+ json_file_path = json_file[1]
+ json_file = os.fdopen( json_file[0], 'w' )
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
for uploaded_dataset in uploaded_datasets:
- precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
- dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset, metadata = uploaded_dataset.metadata, uploaded_dataset = uploaded_dataset )
- #dataset state is now set, we should not do anything else to this dataset
- data_list.append( dataset )
- #clean up extra temp names
- uploaded_dataset.clean_up_temp_files()
-
+ data = self.get_precreated_dataset( uploaded_dataset.name )
+ if not data:
+ data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
+ data.name = uploaded_dataset.name
+ data.state = data.states.QUEUED
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.add_dataset( data, genome_build = uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
+ else:
+ data.extension = uploaded_dataset.file_type
+ data.dbkey = uploaded_dataset.dbkey
+ data.flush()
+ trans.history.genome_build = uploaded_dataset.dbkey
+ if uploaded_dataset.type == 'composite':
+ # we need to init metadata before the job is dispatched
+ data.init_meta()
+ for meta_name, meta_value in uploaded_dataset.metadata.iteritems():
+ setattr( data.metadata, meta_name, meta_value )
+ data.flush()
+ json = dict( file_type = uploaded_dataset.file_type,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ metadata = uploaded_dataset.metadata,
+ primary_file = uploaded_dataset.primary_file,
+ extra_files_path = data.extra_files_path,
+ composite_file_paths = uploaded_dataset.composite_files,
+ composite_files = dict( [ ( k, v.__dict__ ) for k, v in data.datatype.get_composite_files( data ).items() ] ) )
+ else:
+ try:
+ is_binary = uploaded_dataset.datatype.is_binary
+ except:
+ is_binary = None
+ json = dict( file_type = uploaded_dataset.file_type,
+ name = uploaded_dataset.name,
+ dataset_id = data.dataset.id,
+ dbkey = uploaded_dataset.dbkey,
+ type = uploaded_dataset.type,
+ is_binary = is_binary,
+ space_to_tab = uploaded_dataset.space_to_tab,
+ path = uploaded_dataset.path )
+ json_file.write( to_json_string( json ) + '\n' )
+ data_list.append( data )
+ json_file.close()
+
#cleanup unclaimed precreated datasets:
for data in self.precreated_datasets:
log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
data.state = data.states.ERROR
data.info = 'No file contents were available.'
- if data_list:
- trans.app.model.flush()
+ if not data_list:
+ try:
+ os.remove( json_file_path )
+ except:
+ pass
+ return 'No data was entered in the upload form, please go back and choose data to upload.'
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
job.history_id = trans.history.id
job.tool_id = tool.id
- try:
- # For backward compatibility, some tools may not have versions yet.
- job.tool_version = tool.version
- except:
- job.tool_version = "1.0.1"
+ job.tool_version = tool.version
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
+
+ for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ job.add_parameter( name, value )
+ job.add_parameter( 'paramfile', to_json_string( json_file_path ) )
+ for i, dataset in enumerate( data_list ):
+ job.add_output_dataset( i, dataset )
+ trans.app.model.flush()
- #if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as output for the job, why?
- job.state = trans.app.model.Job.states.OK
- file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
- job.info = "%s, size: %s" % ( hda.info, file_size_str )
- job.add_output_dataset( hda.name, hda )
- job.flush()
- log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
- trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
- return dict( output=hda )
-
- def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( create_dataset=True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
- data.name = err_code
- data.extension = "txt"
- data.dbkey = "?"
- data.info = err_msg
- data.file_size = 0
- data.state = data.states.EMPTY
- data.flush()
- if precreated_dataset is None:
- trans.history.add_dataset( data )
- trans.app.model.flush()
- # Indicate job failure by setting state and info
- job.state = trans.app.model.Job.states.ERROR
- job.info = err_msg
- job.add_output_dataset( data.name, data )
- job.flush()
- log.info( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ) )
- trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
- return dict( output=data )
-
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None, metadata = {}, uploaded_dataset = None ):
- def dataset_no_data_error( data, message = 'there was an error uploading your file' ):
- data.info = "No data: %s." % message
- data.state = data.states.ERROR
- if data.extension is None:
- data.extension = 'data'
- return data
- data_type = None
-
- if precreated_dataset is not None:
- data = precreated_dataset
- else:
- data = trans.app.model.HistoryDatasetAssociation( history = trans.history, create_dataset = True )
- trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
-
- # See if we have an empty file
- if not os.path.getsize( temp_name ) > 0:
- return dataset_no_data_error( data, message = 'you attempted to upload an empty file' )
- #raise BadFileException( "you attempted to upload an empty file." )
- if is_multi_byte:
- ext = sniff.guess_ext( temp_name, is_multi_byte=True )
- else:
- if not data_type: #at this point data_type is always None (just initialized above), so this is always True...lots of cleanup needed here
- # See if we have a gzipped file, which, if it passes our restrictions,
- # we'll decompress on the fly.
- is_gzipped, is_valid = self.check_gzip( temp_name )
- if is_gzipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_gzipped and is_valid:
- # We need to uncompress the temp_name file
- CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp()
- gzipped_file = gzip.GzipFile( temp_name )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- return dataset_no_data_error( data, message = 'problem decompressing gzipped data' )
- #raise BadFileException( 'problem decompressing gzipped data.' )
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file
- shutil.move( uncompressed, temp_name )
- file_name = file_name.rstrip( '.gz' )
- data_type = 'gzip'
- ext = ''
- if not data_type:
- # See if we have a zip archive
- is_zipped, is_valid, test_ext = self.check_zip( temp_name )
- if is_zipped and not is_valid:
- return dataset_no_data_error( data, message = 'you attempted to upload an inappropriate file' )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- elif is_zipped and is_valid:
- # Currently, we force specific tools to handle this case. We also require the user
- # to manually set the incoming file_type
- if ( test_ext == 'ab1' or test_ext == 'scf' ) and file_type != 'binseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'." )
- elif test_ext == 'txt' and file_type != 'txtseq.zip':
- return dataset_no_data_error( data, message = "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'" )
- #raise BadFileException( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'." )
- if not ( file_type == 'binseq.zip' or file_type == 'txtseq.zip' ):
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files" )
- #raise BadFileException( "you must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files." )
- data_type = 'zip'
- ext = file_type
- if not data_type:
- if self.check_binary( temp_name ):
- if uploaded_dataset and uploaded_dataset.datatype and uploaded_dataset.datatype.is_binary:
- #we need a more generalized way of checking if a binary upload is of the right format for a datatype...magic number, etc
- data_type = 'binary'
- ext = uploaded_dataset.file_type
- else:
- parts = file_name.split( "." )
- if len( parts ) > 1:
- ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- if ext == 'ab1' and file_type != 'ab1':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
- elif ext == 'scf' and file_type != 'scf':
- return dataset_no_data_error( data, message = "you must manually set the 'File Format' to 'Scf' when uploading scf files" )
- #raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
- data_type = 'binary'
- if not data_type:
- # We must have a text file
- if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
- return dataset_no_data_error( data, message = "you attempted to upload an inappropriate file" )
- #raise BadFileException( "you attempted to upload an inappropriate file." )
- #if data_type != 'binary' and data_type != 'zip' and not trans.app.datatypes_registry.get_datatype_by_extension( ext ).is_binary:
- if data_type != 'binary' and data_type != 'zip':
- if space_to_tab:
- self.line_count = sniff.convert_newlines_sep2tabs( temp_name )
- else:
- self.line_count = sniff.convert_newlines( temp_name )
- if file_type == 'auto':
- ext = sniff.guess_ext( temp_name, sniff_order=trans.app.datatypes_registry.sniff_order )
- else:
- ext = file_type
- data_type = ext
- if info is None:
- info = 'uploaded %s file' %data_type
- data.extension = ext
- data.name = file_name
- data.dbkey = dbkey
- data.info = info
- data.flush()
- shutil.move( temp_name, data.file_name )
- dataset_state = data.states.OK #don't set actual state here, only set to OK when finished setting attributes of the dataset
- data.set_size()
- data.init_meta()
- #need to set metadata, has to be done after extention is set
- for meta_name, meta_value in metadata.iteritems():
- setattr( data.metadata, meta_name, meta_value )
- if self.line_count is not None:
- try:
- if is_multi_byte:
- data.set_multi_byte_peek( line_count=self.line_count )
- else:
- data.set_peek( line_count=self.line_count )
- except:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
- else:
- if is_multi_byte:
- data.set_multi_byte_peek()
- else:
- data.set_peek()
-
- # validate incomming data
- # Commented by greg on 3/14/07
- # for error in data.datatype.validate( data ):
- # data.add_validation_error(
- # model.ValidationError( message=str( error ), err_type=error.__class__.__name__, attributes=util.object_to_string( error.__dict__ ) ) )
- if data.missing_meta():
- data.datatype.set_meta( data )
- dbkey_to_store = dbkey
- if type( dbkey_to_store ) == type( [] ):
- dbkey_to_store = dbkey[0]
- if precreated_dataset is not None:
- trans.history.genome_build = dbkey_to_store
- else:
- trans.history.add_dataset( data, genome_build=dbkey_to_store )
- #set up composite files
- if uploaded_dataset is not None:
- composite_files = data.datatype.get_composite_files( data )
- if composite_files:
- os.mkdir( data.extra_files_path ) #make extra files path
- for name, value in composite_files.iteritems():
- if uploaded_dataset.composite_files[ value.name ] is None and not value.optional:
- data.info = "A required composite data file was not provided (%s)" % name
- dataset_state = data.states.ERROR
- break
- elif uploaded_dataset.composite_files[ value.name] is not None:
- if not value.is_binary:
- if uploaded_dataset.composite_files[ value.name ].space_to_tab:
- sniff.convert_newlines_sep2tabs( uploaded_dataset.composite_files[ value.name ].filename )
- else:
- sniff.convert_newlines( uploaded_dataset.composite_files[ value.name ].filename )
- shutil.move( uploaded_dataset.composite_files[ value.name ].filename, os.path.join( data.extra_files_path, name ) )
- if data.datatype.composite_type == 'auto_primary_file':
- #now that metadata was set above, we should create the primary file as required
- open( data.file_name, 'wb+' ).write( data.datatype.generate_primary_file( dataset = data ) )
- data.state = dataset_state #Always set dataset state LAST
- trans.app.model.flush()
- trans.log_event( "Added dataset %d to history %d" %( data.id, trans.history.id ), tool_id="upload" )
- return data
-
- def check_gzip( self, temp_name ):
- temp = open( temp_name, "U" )
- magic_check = temp.read( 2 )
- temp.close()
- if magic_check != util.gzip_magic:
- return ( False, False )
- CHUNK_SIZE = 2**15 # 32Kb
- gzipped_file = gzip.GzipFile( temp_name )
- chunk = gzipped_file.read( CHUNK_SIZE )
- gzipped_file.close()
- if self.check_html( temp_name, chunk=chunk ) or self.check_binary( temp_name, chunk=chunk ):
- return( True, False )
- return ( True, True )
-
- def check_zip( self, temp_name ):
- if not zipfile.is_zipfile( temp_name ):
- return ( False, False, None )
- zip_file = zipfile.ZipFile( temp_name, "r" )
- # Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf or .txt files
- # 2. All file extensions within an archive must be the same
- name = zip_file.namelist()[0]
- test_ext = name.split( "." )[1].strip().lower()
- if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
- return ( True, False, test_ext )
- for name in zip_file.namelist():
- ext = name.split( "." )[1].strip().lower()
- if ext != test_ext:
- return ( True, False, test_ext )
- return ( True, True, test_ext )
-
- def check_html( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open(temp_name, "U")
- else:
- temp = chunk
- regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
- regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
- regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
- regexp4 = re.compile( "<META[^>]*>", re.I )
- lineno = 0
- for line in temp:
- lineno += 1
- matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
- if matches:
- if chunk is None:
- temp.close()
- return True
- if lineno > 100:
- break
- if chunk is None:
- temp.close()
- return False
- def check_binary( self, temp_name, chunk=None ):
- if chunk is None:
- temp = open( temp_name, "U" )
- else:
- temp = chunk
- lineno = 0
- for line in temp:
- lineno += 1
- line = line.strip()
- if line:
- if util.is_multi_byte( line ):
- return False
- for char in line:
- if ord( char ) > 128:
- if chunk is None:
- temp.close()
- return True
- if lineno > 10:
- break
- if chunk is None:
- temp.close()
- return False
+ # Queue the job for execution
+ trans.app.job_queue.put( job.id, tool )
+ trans.log_event( "Added job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return dict( [ ( i, v ) for i, v in enumerate( data_list ) ] )
def get_precreated_dataset( self, name ):
"""
@@ -378,7 +164,3 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
-class BadFileException( Exception ):
- pass
-
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Thu Aug 20 18:44:35 2009 -0400
@@ -304,21 +304,22 @@
def get_html_field( self, trans=None, value=None, other_values={} ):
return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
+ # TODO: Fix nginx upload module support
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
- if type( value ) == dict:
- upload_location = self.tool.app.config.nginx_upload_location
- assert upload_location, \
- "Request appears to have been processed by nginx_upload_module \
- but Galaxy is not configured to recgonize it"
- # Check that the file is in the right location
- local_filename = os.path.abspath( value['path'] )
- assert local_filename.startswith( upload_location ), \
- "Filename provided by nginx is not in correct directory"
- value = Bunch(
- filename = value["name"],
- local_filename = local_filename
- )
+ #if type( value ) == dict:
+ # upload_location = self.tool.app.config.nginx_upload_location
+ # assert upload_location, \
+ # "Request appears to have been processed by nginx_upload_module \
+ # but Galaxy is not configured to recgonize it"
+ # # Check that the file is in the right location
+ # local_filename = os.path.abspath( value['path'] )
+ # assert local_filename.startswith( upload_location ), \
+ # "Filename provided by nginx is not in correct directory"
+ # value = Bunch(
+ # filename = value["name"],
+ # local_filename = local_filename
+ # )
return value
def get_required_enctype( self ):
"""
@@ -330,10 +331,18 @@
return None
elif isinstance( value, unicode ) or isinstance( value, str ):
return value
+ elif isinstance( value, dict ):
+ # or should we jsonify?
+ try:
+ return value['local_filename']
+ except:
+ return None
raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
else:
raise Exception( "FileToolParameter cannot be persisted" )
def get_initial_value( self, trans, context ):
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Thu Aug 20 18:44:35 2009 -0400
@@ -12,6 +12,7 @@
from galaxy.datatypes import sniff
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
+from galaxy.util import json
class Group( object ):
def __init__( self ):
@@ -167,33 +168,30 @@
rval.append( rval_dict )
return rval
def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
- def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, override_name = None, override_info = None ):
dataset_name = override_name
dataset_info = override_info
def get_file_name( file_name ):
file_name = file_name.split( '\\' )[-1]
file_name = file_name.split( '/' )[-1]
return file_name
- if 'local_filename' in dir( data_file ):
+ try:
# Use the existing file
- return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
- elif 'filename' in dir( data_file ):
- #create a new tempfile
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- precreated_name = get_file_name( data_file.filename )
- if not dataset_name:
- dataset_name = precreated_name
- if not dataset_info:
- dataset_info = 'uploaded file'
- return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_temp_file( temp_name )
- return None, None, is_multi_byte, None, None
- def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ if not dataset_name and 'filename' in data_file:
+ dataset_name = get_file_name( data_file['filename'] )
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
+ except:
+ # The uploaded file should've been persisted by the upload tool action
+ return Bunch( type=None, path=None, name=None )
+ #return None, None, None, None, None
+ def get_url_paste_urls_or_filename( group_incoming, override_name = None, override_info = None ):
filenames = []
- if url_paste not in [ None, "" ]:
+ url_paste_file = group_incoming.get( 'url_paste', None )
+ if url_paste_file is not None:
+ url_paste = open( url_paste_file, 'r' ).read( 1024 )
if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
url_paste = url_paste.replace( '\r', '' ).split( '\n' )
for line in url_paste:
@@ -208,114 +206,54 @@
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- temp_name = None
- precreated_name = str( e )
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
+ yield Bunch( type='url', path=line, name=precreated_name )
+ #yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
if override_name:
dataset_name = override_name
if override_info:
dataset_info = override_info
- is_valid = False
- for line in url_paste: #Trim off empty lines from begining
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- temp_name = None
- precreated_name = str( e )
- try:
- self.remove_temp_file( temp_name )
- except:
- pass
- yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
- #yield ( None, str( e ), False, dataset_name, dataset_info )
-
+ yield Bunch( type='file', path=url_paste_file, name=precreated_name )
+ #yield ( 'file', url_paste_file, precreated_name, dataset_name, dataset_info )
def get_one_filename( context ):
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
if url_paste.strip():
warnings.append( "All file contents specified in the paste box were ignored." )
else: #we need to use url_paste
- #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
- if temp_name:
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
break
- ###this check will cause an additional file to be retrieved and created...so lets not do that
- #try: #check to see if additional paste contents were available
- # file_names.next()
- # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
- #except StopIteration:
- # pass
- return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
-
+ return file_bunch, warnings
def get_filenames( context ):
rval = []
data_file = context['file_data']
url_paste = context['url_paste']
name = context.get( 'NAME', None )
info = context.get( 'INFO', None )
- warnings = []
- is_multi_byte = False
space_to_tab = False
if context.get( 'space_to_tab', None ) not in ["None", None]:
space_to_tab = True
- temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
- for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
- if temp_name:
- rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ warnings = []
+ file_bunch = get_data_file_filename( data_file, override_name = name, override_info = info )
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
+ #rval.append( ( type, temp_name, precreated_name, space_to_tab, dataset_name, dataset_info ) )
+ for file_bunch in get_url_paste_urls_or_filename( context, override_name = name, override_info = info ):
+ if file_bunch.path:
+ file_bunch.space_to_tab = space_to_tab
+ rval.append( file_bunch )
return rval
- class UploadedDataset( Bunch ):
- def __init__( self, **kwd ):
- Bunch.__init__( self, **kwd )
- self.primary_file = None
- self.composite_files = odict()
- self.dbkey = None
- self.warnings = []
- self.metadata = {}
-
- self._temp_filenames = [] #store all created filenames here, delete on cleanup
- def register_temp_file( self, filename ):
- if isinstance( filename, list ):
- self._temp_filenames.extend( filename )
- else:
- self._temp_filenames.append( filename )
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- pass
- #log.warning( str( e ) )
- def clean_up_temp_files( self ):
- for filename in self._temp_filenames:
- self.remove_temp_file( filename )
-
file_type = self.get_file_type( context )
d_type = self.get_datatype( trans, context )
dbkey = context.get( 'dbkey', None )
@@ -325,51 +263,50 @@
for group_incoming in context.get( self.name, [] ):
i = int( group_incoming['__index__'] )
groups_incoming[ i ] = group_incoming
-
if d_type.composite_type is not None:
#handle uploading of composite datatypes
#Only one Dataset can be created
+ '''
dataset = UploadedDataset()
+ dataset.datatype = d_type
+ '''
+ dataset = Bunch()
+ dataset.type = 'composite'
dataset.file_type = file_type
+ dataset.dbkey = dbkey
dataset.datatype = d_type
- dataset.dbkey = dbkey
+ dataset.warnings = []
+ dataset.metadata = {}
+ dataset.composite_files = {}
#load metadata
files_metadata = context.get( self.metadata_ref, {} )
- for meta_name, meta_spec in d_type.metadata_spec.iteritems():
+ for meta_name, meta_spec in d_type.metadata_spec.iteritems():
if meta_spec.set_in_upload:
if meta_name in files_metadata:
dataset.metadata[ meta_name ] = files_metadata[ meta_name ]
-
- temp_name = None
- precreated_name = None
- is_multi_byte = False
- space_to_tab = False
- warnings = []
dataset_name = None
dataset_info = None
if dataset.datatype.composite_type == 'auto_primary_file':
#replace sniff here with just creating an empty file
temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
- precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ dataset.primary_file = temp_name
+ dataset.space_to_tab = False
+ dataset.precreated_name = dataset.name = 'Uploaded Composite Dataset (%s)' % ( file_type )
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ file_bunch, warnings = get_one_filename( groups_incoming[ 0 ] )
if dataset.datatype.composite_type:
precreated_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
writable_files_offset = 1
- if temp_name is None:#remove this before finish, this should create an empty dataset
+ dataset.primary_file = file_bunch.path
+ dataset.space_to_tab = file_bunch.space_to_tab
+ dataset.precreated_name = file_bunch.precreated_name
+ dataset.name = file_bunch.precreated_name
+ dataset.warnings.extend( file_bunch.warnings )
+ if dataset.primary_file is None:#remove this before finish, this should create an empty dataset
raise Exception( 'No primary dataset file was available for composite upload' )
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.precreated_name = precreated_name
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.warnings.extend( warnings )
- dataset.register_temp_file( temp_name )
-
keys = [ value.name for value in writable_files.values() ]
for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
key = keys[ i + writable_files_offset ]
@@ -377,37 +314,22 @@
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
dataset.composite_files[ key ] = None
else:
- temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
- if temp_name:
- dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
- dataset.register_temp_file( temp_name )
+ file_bunch, warnings = get_one_filename( group_incoming )
+ if file_bunch.path:
+ dataset.composite_files[ key ] = file_bunch.__dict__
else:
dataset.composite_files[ key ] = None
if not writable_files[ writable_files.keys()[ keys.index( key ) ] ].optional:
dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
return [ dataset ]
else:
+ datasets = get_filenames( context[ self.name ][0] )
rval = []
- for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
- dataset = UploadedDataset()
+ for dataset in datasets:
dataset.file_type = file_type
- dataset.datatype = d_type
dataset.dbkey = dbkey
- dataset.primary_file = temp_name
- dataset.is_multi_byte = is_multi_byte
- dataset.space_to_tab = space_to_tab
- dataset.name = dataset_name
- dataset.info = dataset_info
- dataset.precreated_name = precreated_name
- dataset.register_temp_file( temp_name )
rval.append( dataset )
- return rval
- def remove_temp_file( self, filename ):
- try:
- os.unlink( filename )
- except Exception, e:
- log.warning( str( e ) )
-
+ return rval
class Conditional( Group ):
type = "conditional"
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/util/__init__.py Thu Aug 20 18:44:35 2009 -0400
@@ -3,7 +3,7 @@
"""
import logging
-import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys
+import threading, random, string, re, binascii, pickle, time, datetime, math, re, os, sys, tempfile
# Older py compatibility
try:
@@ -454,6 +454,26 @@
out_dict[ str( key ) ] = value
return out_dict
+def mkstemp_ln( src, prefix='mkstemp_ln_' ):
+ """
+ From tempfile._mkstemp_inner, generate a hard link in the same dir with a
+ random name. Created so we can persist the underlying file of a
+ NamedTemporaryFile upon its closure.
+ """
+ dir = os.path.dirname(src)
+ names = tempfile._get_candidate_names()
+ for seq in xrange(tempfile.TMP_MAX):
+ name = names.next()
+ file = os.path.join(dir, prefix + name)
+ try:
+ linked_path = os.link( src, file )
+ return (os.path.abspath(file))
+ except OSError, e:
+ if e.errno == errno.EEXIST:
+ continue # try again
+ raise
+ raise IOError, (errno.EEXIST, "No usable temporary file name found")
+
galaxy_root_path = os.path.join(__path__[0], "..","..","..")
dbnames = read_dbnames( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "builds.txt" ) ) #this list is used in edit attributes and the upload tool
ucsc_build_sites = read_build_sites( os.path.join( galaxy_root_path, "tool-data", "shared", "ucsc", "ucsc_build_sites.txt" ) ) #this list is used in history.tmpl
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Thu Aug 20 18:44:35 2009 -0400
@@ -136,6 +136,7 @@
"""
Precreate datasets for asynchronous uploading.
"""
+ permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
def create_dataset( name, history ):
data = trans.app.model.HistoryDatasetAssociation( create_dataset = True )
data.name = name
@@ -143,6 +144,7 @@
data.history = history
data.flush()
history.add_dataset( data )
+ trans.app.security_agent.set_all_dataset_permissions( data.dataset, permissions )
return data
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
diff -r 62e24f51b518 -r 542471b183d7 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Thu Aug 20 12:51:39 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Thu Aug 20 18:44:35 2009 -0400
@@ -212,6 +212,17 @@
else:
return None
+# For request.params, override cgi.FieldStorage.make_file to create persistent
+# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
+# but for performance reasons it's way better to use Paste's tempfile than to
+# create a new one and copy.
+import cgi
+class FieldStorage( cgi.FieldStorage ):
+ def make_file(self, binary=None):
+ import tempfile
+ return tempfile.NamedTemporaryFile()
+cgi.FieldStorage = FieldStorage
+
class Request( webob.Request ):
"""
Encapsulates an HTTP request.
diff -r 62e24f51b518 -r 542471b183d7 templates/base_panels.mako
--- a/templates/base_panels.mako Thu Aug 20 12:51:39 2009 -0400
+++ b/templates/base_panels.mako Thu Aug 20 18:44:35 2009 -0400
@@ -72,9 +72,6 @@
<script type="text/javascript">
jQuery( function() {
$("iframe#galaxy_main").load( function() {
- ##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
- ##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
- ##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
$(this).contents().find("form").each( function() {
if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
$(this).submit( function() {
diff -r 62e24f51b518 -r 542471b183d7 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Thu Aug 20 12:51:39 2009 -0400
+++ b/test/base/twilltestcase.py Thu Aug 20 18:44:35 2009 -0400
@@ -93,6 +93,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when uploading file %s" % ( hid, filename )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
def upload_url_paste( self, url_paste, ftype='auto', dbkey='unspecified (?)' ):
"""Pasted data in the upload utility"""
self.visit_page( "tool_runner/index?tool_id=upload1" )
@@ -112,6 +114,8 @@
valid_hid = int( hid )
except:
raise AssertionError, "Invalid hid (%s) created when pasting %s" % ( hid, url_paste )
+ # Wait for upload processing to finish (TODO: this should be done in each test case instead)
+ self.wait()
# Functions associated with histories
def check_history_for_errors( self ):
diff -r 62e24f51b518 -r 542471b183d7 tools/data_source/upload.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/upload.py Thu Aug 20 18:44:35 2009 -0400
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+#Processes uploads from the user.
+
+# WARNING: Changes in this tool (particularly as related to parsing) may need
+# to be reflected in galaxy.web.controllers.tool_runner and galaxy.tools
+
+import urllib, sys, os, gzip, tempfile, shutil, re, gzip, zipfile
+from galaxy import eggs
+# need to import model before sniff to resolve a circular import dependency
+import galaxy.model
+from galaxy.datatypes import sniff
+from galaxy import util
+from galaxy.util.json import *
+
+assert sys.version_info[:2] >= ( 2, 4 )
+
+def stop_err( msg, ret=1 ):
+ sys.stderr.write( msg )
+ sys.exit( ret )
+
+def file_err( msg, dataset, json_file ):
+ json_file.write( to_json_string( dict( type = 'dataset',
+ ext = 'data',
+ dataset_id = dataset.dataset_id,
+ stderr = msg ) ) + "\n" )
+ try:
+ os.remove( dataset.path )
+ except:
+ pass
+
+def safe_dict(d):
+ """
+ Recursively clone json structure with UTF-8 dictionary keys
+ http://mellowmachines.com/blog/2009/06/exploding-dictionary-with-unicode-ke…
+ """
+ if isinstance(d, dict):
+ return dict([(k.encode('utf-8'), safe_dict(v)) for k,v in d.iteritems()])
+ elif isinstance(d, list):
+ return [safe_dict(x) for x in d]
+ else:
+ return d
+
+def check_html( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open(temp_name, "U")
+ else:
+ temp = chunk
+ regexp1 = re.compile( "<A\s+[^>]*HREF[^>]+>", re.I )
+ regexp2 = re.compile( "<IFRAME[^>]*>", re.I )
+ regexp3 = re.compile( "<FRAMESET[^>]*>", re.I )
+ regexp4 = re.compile( "<META[^>]*>", re.I )
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ matches = regexp1.search( line ) or regexp2.search( line ) or regexp3.search( line ) or regexp4.search( line )
+ if matches:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 100:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_binary( temp_name, chunk=None ):
+ if chunk is None:
+ temp = open( temp_name, "U" )
+ else:
+ temp = chunk
+ lineno = 0
+ for line in temp:
+ lineno += 1
+ line = line.strip()
+ if line:
+ for char in line:
+ if ord( char ) > 128:
+ if chunk is None:
+ temp.close()
+ return True
+ if lineno > 10:
+ break
+ if chunk is None:
+ temp.close()
+ return False
+
+def check_gzip( temp_name ):
+ temp = open( temp_name, "U" )
+ magic_check = temp.read( 2 )
+ temp.close()
+ if magic_check != util.gzip_magic:
+ return ( False, False )
+ CHUNK_SIZE = 2**15 # 32Kb
+ gzipped_file = gzip.GzipFile( temp_name )
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ gzipped_file.close()
+ if check_html( temp_name, chunk=chunk ) or check_binary( temp_name, chunk=chunk ):
+ return( True, False )
+ return ( True, True )
+
+def check_zip( temp_name ):
+ if not zipfile.is_zipfile( temp_name ):
+ return ( False, False, None )
+ zip_file = zipfile.ZipFile( temp_name, "r" )
+ # Make sure the archive consists of valid files. The current rules are:
+ # 1. Archives can only include .ab1, .scf or .txt files
+ # 2. All file extensions within an archive must be the same
+ name = zip_file.namelist()[0]
+ test_ext = name.split( "." )[1].strip().lower()
+ if not ( test_ext == 'scf' or test_ext == 'ab1' or test_ext == 'txt' ):
+ return ( True, False, test_ext )
+ for name in zip_file.namelist():
+ ext = name.split( "." )[1].strip().lower()
+ if ext != test_ext:
+ return ( True, False, test_ext )
+ return ( True, True, test_ext )
+
+def add_file( dataset, json_file ):
+ data_type = None
+ line_count = None
+
+ if dataset.type == 'url':
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( dataset.path ), prefix='url_paste' )
+ except Exception, e:
+ file_err( 'Unable to fetch %s\n%s' % ( dataset.path, str( e ) ), dataset, json_file )
+ return
+ dataset.path = temp_name
+ dataset.is_multi_byte = is_multi_byte
+
+ # See if we have an empty file
+ if not os.path.exists( dataset.path ):
+ file_err( 'Uploaded temporary file (%s) does not exist. Please' % dataset.path, dataset, json_file )
+ return
+ if not os.path.getsize( dataset.path ) > 0:
+ file_err( 'The uploaded file is empty', dataset, json_file )
+ return
+ if 'is_multi_byte' not in dir( dataset ):
+ dataset.is_multi_byte = util.is_multi_byte( open( dataset.path, 'r' ).read( 1024 )[:100] )
+ if dataset.is_multi_byte:
+ ext = sniff.guess_ext( dataset.path, is_multi_byte=True )
+ data_type = ext
+ else:
+ # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+ is_gzipped, is_valid = check_gzip( dataset.path )
+ if is_gzipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_gzipped and is_valid:
+ # We need to uncompress the temp_name file
+ CHUNK_SIZE = 2**20 # 1Mb
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( dataset.path ) )
+ gzipped_file = gzip.GzipFile( dataset.path )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing gzipped data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ # Replace the gzipped file with the decompressed file
+ shutil.move( uncompressed, dataset.path )
+ dataset.name = dataset.name.rstrip( '.gz' )
+ data_type = 'gzip'
+ if not data_type:
+ # See if we have a zip archive
+ is_zipped, is_valid, test_ext = check_zip( dataset.path )
+ if is_zipped and not is_valid:
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_zipped and is_valid:
+ # Currently, we force specific tools to handle this case. We also require the user
+ # to manually set the incoming file_type
+ if ( test_ext == 'ab1' or test_ext == 'scf' ) and dataset.file_type != 'binseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of binary files - use 'Binseq.zip'", dataset, json_file )
+ return
+ elif test_ext == 'txt' and dataset.file_type != 'txtseq.zip':
+ file_err( "Invalid 'File Format' for archive consisting of text files - use 'Txtseq.zip'", dataset, json_file )
+ return
+ if not ( dataset.file_type == 'binseq.zip' or dataset.file_type == 'txtseq.zip' ):
+ file_err( "You must manually set the 'File Format' to either 'Binseq.zip' or 'Txtseq.zip' when uploading zip files", dataset, json_file )
+ return
+ data_type = 'zip'
+ ext = dataset.file_type
+ if not data_type:
+ if check_binary( dataset.path ):
+ if dataset.is_binary is not None:
+ data_type = 'binary'
+ ext = dataset.file_type
+ else:
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[1].strip().lower()
+ if not( ext == 'ab1' or ext == 'scf' ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if ext == 'ab1' and dataset.file_type != 'ab1':
+ file_err( "You must manually set the 'File Format' to 'Ab1' when uploading ab1 files.", dataset, json_file )
+ return
+ elif ext == 'scf' and dataset.file_type != 'scf':
+ file_err( "You must manually set the 'File Format' to 'Scf' when uploading scf files.", dataset, json_file )
+ return
+ data_type = 'binary'
+ if not data_type:
+ # We must have a text file
+ if check_html( dataset.path ):
+ file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
+ return
+ if data_type != 'binary' and data_type != 'zip':
+ if dataset.space_to_tab:
+ line_count = sniff.convert_newlines_sep2tabs( dataset.path )
+ else:
+ line_count = sniff.convert_newlines( dataset.path )
+ if dataset.file_type == 'auto':
+ ext = sniff.guess_ext( dataset.path )
+ else:
+ ext = dataset.file_type
+ data_type = ext
+ # Save job info for the framework
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.path,
+ ext = ext,
+ stdout = 'uploaded %s file' % data_type,
+ name = dataset.name,
+ line_count = line_count )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def add_composite_file( dataset, json_file ):
+ if dataset.composite_files:
+ os.mkdir( dataset.extra_files_path )
+ for name, value in dataset.composite_files.iteritems():
+ value = util.bunch.Bunch( **value )
+ if dataset.composite_file_paths[ value.name ] is None and not value.optional:
+ file_err( 'A required composite data file was not provided (%s)' % name, dataset, json_file )
+ break
+ elif dataset.composite_file_paths[value.name] is not None:
+ if not value.is_binary:
+ if uploaded_dataset.composite_files[ value.name ].space_to_tab:
+ sniff.convert_newlines_sep2tabs( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ else:
+ sniff.convert_newlines( dataset.composite_file_paths[ value.name ][ 'path' ] )
+ shutil.move( dataset.composite_file_paths[ value.name ][ 'path' ], os.path.join( dataset.extra_files_path, name ) )
+ info = dict( type = 'dataset',
+ dataset_id = dataset.dataset_id,
+ path = dataset.primary_file,
+ stdout = 'uploaded %s file' % dataset.file_type )
+ json_file.write( to_json_string( info ) + "\n" )
+
+def __main__():
+
+ if len( sys.argv ) != 2:
+ print >>sys.stderr, 'usage: upload.py <json paramfile>'
+ sys.exit( 1 )
+
+ json_file = open( 'galaxy.json', 'w' )
+
+ for line in open( sys.argv[1], 'r' ):
+ dataset = from_json_string( line )
+ dataset = util.bunch.Bunch( **safe_dict( dataset ) )
+
+ if dataset.type == 'composite':
+ add_composite_file( dataset, json_file )
+ else:
+ add_file( dataset, json_file )
+
+ # clean up paramfile
+ try:
+ os.remove( sys.argv[1] )
+ except:
+ pass
+
+if __name__ == '__main__':
+ __main__()
diff -r 62e24f51b518 -r 542471b183d7 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Thu Aug 20 12:51:39 2009 -0400
+++ b/tools/data_source/upload.xml Thu Aug 20 18:44:35 2009 -0400
@@ -1,10 +1,13 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1" version="1.0.2">
+<tool name="Upload File" id="upload1" version="1.0.3">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
+ <command interpreter="python">
+ upload.py $paramfile
+ </command>
<inputs>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/97170896bb91
changeset: 2600:97170896bb91
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 11:04:45 2009 -0400
description:
Remove redundant auto-save js
2 file(s) affected in this change:
templates/workflow/editor_generic_form.mako
templates/workflow/editor_tool_form.mako
diffs (23 lines):
diff -r bedfce38e652 -r 97170896bb91 templates/workflow/editor_generic_form.mako
--- a/templates/workflow/editor_generic_form.mako Thu Aug 20 10:59:13 2009 -0400
+++ b/templates/workflow/editor_generic_form.mako Thu Aug 20 11:04:45 2009 -0400
@@ -40,7 +40,3 @@
</form>
</div>
</div>
-
-<script type="text/javascript">
- workflow.enable_auto_save();
-</script>
diff -r bedfce38e652 -r 97170896bb91 templates/workflow/editor_tool_form.mako
--- a/templates/workflow/editor_tool_form.mako Thu Aug 20 10:59:13 2009 -0400
+++ b/templates/workflow/editor_tool_form.mako Thu Aug 20 11:04:45 2009 -0400
@@ -108,8 +108,3 @@
</form>
</div>
</div>
-
-<script type="text/javascript">
- workflow.enable_auto_save();
-</script>
-
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/0517fd342fc8
changeset: 2601:0517fd342fc8
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 12:41:37 2009 -0400
description:
Use JSON2.js for all JSON operations, and remove unused javascript files
7 file(s) affected in this change:
static/scripts/cookie_set.js
static/scripts/jquery.cookie.js
static/scripts/jquery.json.js
static/scripts/packed/cookie_set.js
static/scripts/packed/jquery.cookie.js
static/scripts/packed/jquery.json.js
templates/workflow/editor.mako
diffs (270 lines):
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/cookie_set.js
--- a/static/scripts/cookie_set.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,30 +0,0 @@
-function CookieSet( cookie_name ) {
- this.cookie_name = cookie_name;
- this.store = store = {};
- jQuery.each( ( jQuery.cookie( cookie_name) || "" ).split( "|" ), function( k, v ) {
- store[ v ] = true;
- });
-};
-CookieSet.prototype.add = function( value ) {
- this.store[value] = true;
- return this;
-};
-CookieSet.prototype.remove = function( value ) {
- delete this.store[value];
- return this;
-};
-CookieSet.prototype.removeAll = function( value ) {
- this.store = {};
- return this;
-};
-CookieSet.prototype.contains = function( value ) {
- return ( value in this.store );
-};
-CookieSet.prototype.save = function() {
- t = [];
- for ( key in this.store ) {
- if ( key != "" ) { t.push( key ) }
- }
- jQuery.cookie( this.cookie_name, t.join( "|" ) );
- return this;
-};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/jquery.cookie.js
--- a/static/scripts/jquery.cookie.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,92 +0,0 @@
-/**
- * Cookie plugin
- *
- * Copyright (c) 2006 Klaus Hartl (stilbuero.de)
- * Dual licensed under the MIT and GPL licenses:
- * http://www.opensource.org/licenses/mit-license.php
- * http://www.gnu.org/licenses/gpl.html
- *
- */
-
-/**
- * Create a cookie with the given name and value and other optional parameters.
- *
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Set the value of a cookie.
- * @example $.cookie('the_cookie', 'the_value', {expires: 7, path: '/', domain: 'jquery.com', secure: true});
- * @desc Create a cookie with all available options.
- * @example $.cookie('the_cookie', 'the_value');
- * @desc Create a session cookie.
- * @example $.cookie('the_cookie', null);
- * @desc Delete a cookie by passing null as value.
- *
- * @param String name The name of the cookie.
- * @param String value The value of the cookie.
- * @param Object options An object literal containing key/value pairs to provide optional cookie attributes.
- * @option Number|Date expires Either an integer specifying the expiration date from now on in days or a Date object.
- * If a negative value is specified (e.g. a date in the past), the cookie will be deleted.
- * If set to null or omitted, the cookie will be a session cookie and will not be retained
- * when the the browser exits.
- * @option String path The value of the path atribute of the cookie (default: path of page that created the cookie).
- * @option String domain The value of the domain attribute of the cookie (default: domain of page that created the cookie).
- * @option Boolean secure If true, the secure attribute of the cookie will be set and the cookie transmission will
- * require a secure protocol (like HTTPS).
- * @type undefined
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl(a)stilbuero.de
- */
-
-/**
- * Get the value of a cookie with the given name.
- *
- * @example $.cookie('the_cookie');
- * @desc Get the value of a cookie.
- *
- * @param String name The name of the cookie.
- * @return The value of the cookie.
- * @type String
- *
- * @name $.cookie
- * @cat Plugins/Cookie
- * @author Klaus Hartl/klaus.hartl(a)stilbuero.de
- */
-jQuery.cookie = function(name, value, options) {
- if (typeof value != 'undefined') { // name and value given, set cookie
- options = options || {};
- if (value === null) {
- value = '';
- options.expires = -1;
- }
- var expires = '';
- if (options.expires && (typeof options.expires == 'number' || options.expires.toUTCString)) {
- var date;
- if (typeof options.expires == 'number') {
- date = new Date();
- date.setTime(date.getTime() + (options.expires * 24 * 60 * 60 * 1000));
- } else {
- date = options.expires;
- }
- expires = '; expires=' + date.toUTCString(); // use expires attribute, max-age is not supported by IE
- }
- var path = options.path ? '; path=' + options.path : '';
- var domain = options.domain ? '; domain=' + options.domain : '';
- var secure = options.secure ? '; secure' : '';
- document.cookie = [name, '=', encodeURIComponent(value), expires, path, domain, secure].join('');
- } else { // only name given, get cookie
- var cookieValue = null;
- if (document.cookie && document.cookie != '') {
- var cookies = document.cookie.split(';');
- for (var i = 0; i < cookies.length; i++) {
- var cookie = jQuery.trim(cookies[i]);
- // Does this cookie string begin with the name we want?
- if (cookie.substring(0, name.length + 1) == (name + '=')) {
- cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
- break;
- }
- }
- }
- return cookieValue;
- }
-};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/jquery.json.js
--- a/static/scripts/jquery.json.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,96 +0,0 @@
-(function ($) {
- var m = {
- '\b': '\\b',
- '\t': '\\t',
- '\n': '\\n',
- '\f': '\\f',
- '\r': '\\r',
- '"' : '\\"',
- '\\': '\\\\'
- },
- s = {
- 'array': function (x) {
- var a = ['['], b, f, i, l = x.length, v;
- for (i = 0; i < l; i += 1) {
- v = x[i];
- f = s[typeof v];
- if (f) {
- v = f(v);
- if (typeof v == 'string') {
- if (b) {
- a[a.length] = ',';
- }
- a[a.length] = v;
- b = true;
- }
- }
- }
- a[a.length] = ']';
- return a.join('');
- },
- 'boolean': function (x) {
- return String(x);
- },
- 'null': function (x) {
- return "null";
- },
- 'number': function (x) {
- return isFinite(x) ? String(x) : 'null';
- },
- 'object': function (x) {
- if (x) {
- if (x instanceof Array) {
- return s.array(x);
- }
- var a = ['{'], b, f, i, v;
- for (i in x) {
- v = x[i];
- f = s[typeof v];
- if (f) {
- v = f(v);
- if (typeof v == 'string') {
- if (b) {
- a[a.length] = ',';
- }
- a.push(s.string(i), ':', v);
- b = true;
- }
- }
- }
- a[a.length] = '}';
- return a.join('');
- }
- return 'null';
- },
- 'string': function (x) {
- if (/["\\\x00-\x1f]/.test(x)) {
- x = x.replace(/([\x00-\x1f\\"])/g, function(a, b) {
- var c = m[b];
- if (c) {
- return c;
- }
- c = b.charCodeAt();
- return '\\u00' +
- Math.floor(c / 16).toString(16) +
- (c % 16).toString(16);
- });
- }
- return '"' + x + '"';
- }
- };
-
- $.toJSON = function(v) {
- var f = isNaN(v) ? s[typeof v] : s['number'];
- if (f) return f(v);
- };
-
- $.parseJSON = function(v, safe) {
- if (safe === undefined) safe = $.parseJSON.safe;
- if (safe && !/^("(\\.|[^"\\\n\r])*?"|[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t])+?$/.test(v))
- return undefined;
- return eval('('+v+')');
- };
-
- $.parseJSON.safe = false;
-
-})(jQuery);
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/cookie_set.js
--- a/static/scripts/packed/cookie_set.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-function CookieSet(a){this.cookie_name=a;this.store=store={};jQuery.each((jQuery.cookie(a)||"").split("|"),function(c,b){store[b]=true})}CookieSet.prototype.add=function(a){this.store[a]=true;return this};CookieSet.prototype.remove=function(a){delete this.store[a];return this};CookieSet.prototype.removeAll=function(a){this.store={};return this};CookieSet.prototype.contains=function(a){return(a in this.store)};CookieSet.prototype.save=function(){t=[];for(key in this.store){if(key!=""){t.push(key)}}jQuery.cookie(this.cookie_name,t.join("|"));return this};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/jquery.cookie.js
--- a/static/scripts/packed/jquery.cookie.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-jQuery.cookie=function(b,j,m){if(typeof j!="undefined"){m=m||{};if(j===null){j="";m.expires=-1}var e="";if(m.expires&&(typeof m.expires=="number"||m.expires.toUTCString)){var f;if(typeof m.expires=="number"){f=new Date();f.setTime(f.getTime()+(m.expires*24*60*60*1000))}else{f=m.expires}e="; expires="+f.toUTCString()}var l=m.path?"; path="+m.path:"";var g=m.domain?"; domain="+m.domain:"";var a=m.secure?"; secure":"";document.cookie=[b,"=",encodeURIComponent(j),e,l,g,a].join("")}else{var d=null;if(document.cookie&&document.cookie!=""){var k=document.cookie.split(";");for(var h=0;h<k.length;h++){var c=jQuery.trim(k[h]);if(c.substring(0,b.length+1)==(b+"=")){d=decodeURIComponent(c.substring(b.length+1));break}}}return d}};
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 static/scripts/packed/jquery.json.js
--- a/static/scripts/packed/jquery.json.js Thu Aug 20 11:04:45 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-(function($){var m={"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r":"\\r",'"':'\\"',"\\":"\\\\"},s={array:function(x){var a=["["],b,f,i,l=x.length,v;for(i=0;i<l;i+=1){v=x[i];f=s[typeof v];if(f){v=f(v);if(typeof v=="string"){if(b){a[a.length]=","}a[a.length]=v;b=true}}}a[a.length]="]";return a.join("")},"boolean":function(x){return String(x)},"null":function(x){return"null"},number:function(x){return isFinite(x)?String(x):"null"},object:function(x){if(x){if(x instanceof Array){return s.array(x)}var a=["{"],b,f,i,v;for(i in x){v=x[i];f=s[typeof v];if(f){v=f(v);if(typeof v=="string"){if(b){a[a.length]=","}a.push(s.string(i),":",v);b=true}}}a[a.length]="}";return a.join("")}return"null"},string:function(x){if(/["\\\x00-\x1f]/.test(x)){x=x.replace(/([\x00-\x1f\\"])/g,function(a,b){var c=m[b];if(c){return c}c=b.charCodeAt();return"\\u00"+Math.floor(c/16).toString(16)+(c%16).toString(16)})}return'"'+x+'"'}};$.toJSON=function(v){var f=isNaN(v)?s[typeof v]:s.number;if(f){return f(v)
}};$.parseJSON=function(v,safe){if(safe===undefined){safe=$.parseJSON.safe}if(safe&&!/^("(\\.|[^"\\\n\r])*?"|[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t])+?$/.test(v)){return undefined}return eval("("+v+")")};$.parseJSON.safe=false})(jQuery);
\ No newline at end of file
diff -r 97170896bb91 -r 0517fd342fc8 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Thu Aug 20 11:04:45 2009 -0400
+++ b/templates/workflow/editor.mako Thu Aug 20 12:41:37 2009 -0400
@@ -30,7 +30,6 @@
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.drop.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.event.hover.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.form.js')}"> </script>
- <script type='text/javascript' src="${h.url_for('/static/scripts/jquery.json.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/jquery.jstore-all.js')}"> </script>
<script type='text/javascript' src="${h.url_for('/static/scripts/galaxy.base.js')}"> </script>
@@ -350,7 +349,7 @@
type: "POST",
data: {
id: "${trans.security.encode_id( workflow_id )}",
- workflow_data: function() { return $.toJSON( workflow.to_simple() ) },
+ workflow_data: function() { return JSON.stringify( workflow.to_simple() ) },
"_": "true"
},
dataType: 'json',
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/b77721ef035d
changeset: 2596:b77721ef035d
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Aug 21 17:03:42 2009 -0400
description:
Fix collect_primary_datasets() and add collected primary/child datasets to job.
Add new filter to dynamic_options: remove_value.
Fix error in exception when reruning a dataset with a JobToOutputDatasetAssociation but with no job.
3 file(s) affected in this change:
lib/galaxy/tools/__init__.py
lib/galaxy/tools/parameters/dynamic_options.py
lib/galaxy/web/controllers/tool_runner.py
diffs (150 lines):
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Fri Aug 21 17:03:42 2009 -0400
@@ -1534,10 +1534,19 @@
child_dataset.flush()
child_dataset.set_size()
child_dataset.name = "Secondary Dataset (%s)" % ( designation )
- child_dataset.state = child_dataset.states.OK
child_dataset.init_meta()
child_dataset.set_meta()
child_dataset.set_peek()
+ # Associate new dataset with job
+ job = None
+ for assoc in outdata.creating_job_associations:
+ job = assoc.job
+ break
+ if job:
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s__' % designation, child_dataset )
+ assoc.job = job
+ assoc.flush()
+ child_dataset.state = outdata.state
child_dataset.flush()
# Add child to return dict
children[name][designation] = child_dataset
@@ -1550,7 +1559,7 @@
def collect_primary_datasets( self, output):
primary_datasets = {}
- #Loop through output file names, looking for generated primary datasets in form of 'primary_associatedWithDatasetID_designation_visibility_extension'
+ #Loop through output file names, looking for generated primary datasets in form of 'primary_associatedWithDatasetID_designation_visibility_extension(_DBKEY)'
for name, outdata in output.items():
for filename in glob.glob(os.path.join(self.app.config.new_file_path,"primary_%i_*" % outdata.id) ):
if not name in primary_datasets:
@@ -1563,19 +1572,32 @@
if visible == "visible": visible = True
else: visible = False
ext = fields.pop(0).lower()
+ dbkey = outdata.dbkey
+ if fields:
+ dbkey = fields[ 0 ]
# Create new primary dataset
- primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=outdata.dbkey, create_dataset=True )
+ primary_data = self.app.model.HistoryDatasetAssociation( extension=ext, designation=designation, visible=visible, dbkey=dbkey, create_dataset=True )
self.app.security_agent.copy_dataset_permissions( outdata.dataset, primary_data.dataset )
primary_data.flush()
# Move data from temp location to dataset location
shutil.move( filename, primary_data.file_name )
primary_data.set_size()
- primary_data.name = dataset.name
- primary_data.info = dataset.info
- primary_data.state = primary_data.states.OK
- primary_data.init_meta( copy_from=dataset )
+ primary_data.name = outdata.name
+ primary_data.info = outdata.info
+ primary_data.init_meta( copy_from=outdata )
+ primary_data.dbkey = dbkey
primary_data.set_meta()
primary_data.set_peek()
+ # Associate new dataset with job
+ job = None
+ for assoc in outdata.creating_job_associations:
+ job = assoc.job
+ break
+ if job:
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s__' % designation, primary_data )
+ assoc.job = job
+ assoc.flush()
+ primary_data.state = outdata.state
primary_data.flush()
outdata.history.add_dataset( primary_data )
# Add dataset to return dict
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/tools/parameters/dynamic_options.py
--- a/lib/galaxy/tools/parameters/dynamic_options.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/tools/parameters/dynamic_options.py Fri Aug 21 17:03:42 2009 -0400
@@ -242,6 +242,55 @@
rval.append( add_value )
return rval
+class RemoveValueFilter( Filter ):
+ """
+ Removes a value from an options list.
+
+ Type: remove_value
+
+ Required Attributes:
+ value: value to remove from select list
+ or
+ ref: param to refer to
+ or
+ meta_ref: dataset to refer to
+ key: metadata key to compare to
+ """
+ def __init__( self, d_option, elem ):
+ Filter.__init__( self, d_option, elem )
+ self.value = elem.get( "value", None )
+ self.ref_name = elem.get( "ref", None )
+ self.meta_ref = elem.get( "meta_ref", None )
+ self.metadata_key = elem.get( "key", None )
+ assert self.value is not None or ( ( self.ref_name is not None or self.meta_ref is not None )and self.metadata_key is not None ), ValueError( "Required 'value' or 'ref' and 'key' attributes missing from filter" )
+ self.multiple = string_as_bool( elem.get( "multiple", "False" ) )
+ self.separator = elem.get( "separator", "," )
+ def filter_options( self, options, trans, other_values ):
+ if trans is not None and trans.workflow_building_mode: return options
+ assert self.value is not None or ( self.ref_name is not None and self.ref_name in other_values ) or (self.meta_ref is not None and self.meta_ref in other_values ) or ( trans is not None and trans.workflow_building_mode), Exception( "Required dependency '%s' or '%s' not found in incoming values" % ( self.ref_name, self.meta_ref ) )
+ def compare_value( option_value, filter_value ):
+ if isinstance( filter_value, list ):
+ if self.multiple:
+ option_value = option_value.split( self.separator )
+ for value in filter_value:
+ if value not in filter_value:
+ return False
+ return True
+ return option_value in filter_value
+ if self.multiple:
+ return filter_value in option_value.split( self.separator )
+ return option_value == filter_value
+ value = self.value
+ if value is None:
+ if self.ref_name is not None:
+ value = other_values.get( self.ref_name )
+ else:
+ data_ref = other_values.get( self.meta_ref )
+ if not isinstance( data_ref, self.dynamic_option.tool_param.tool.app.model.HistoryDatasetAssociation ):
+ return options #cannot modify options
+ value = data_ref.metadata.get( self.metadata_key, None )
+ return [ ( disp_name, optval, selected ) for disp_name, optval, selected in options if not compare_value( optval, value ) ]
+
class SortByColumnFilter( Filter ):
"""
Sorts an options list by a column
@@ -274,6 +323,7 @@
unique_value = UniqueValueFilter,
multiple_splitter = MultipleSplitterFilter,
add_value = AdditionalValueFilter,
+ remove_value = RemoveValueFilter,
sort_by = SortByColumnFilter )
class DynamicOptions( object ):
diff -r 108533bf35b8 -r b77721ef035d lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Fri Aug 21 15:18:17 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Fri Aug 21 17:03:42 2009 -0400
@@ -82,7 +82,7 @@
job = assoc.job
break
if not job:
- raise Exception("Failed to get job information for dataset hid %d" % hid)
+ raise Exception("Failed to get job information for dataset hid %d" % data.hid)
# Get the tool object
tool_id = job.tool_id
try:
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/bedfce38e652
changeset: 2599:bedfce38e652
user: Kanwei Li <kanwei(a)gmail.com>
date: Thu Aug 20 10:59:13 2009 -0400
description:
Merge trunk
0 file(s) affected in this change:
diffs (438 lines):
diff -r 3049432643f4 -r bedfce38e652 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Wed Aug 19 17:55:28 2009 -0400
+++ b/datatypes_conf.xml.sample Thu Aug 20 10:59:13 2009 -0400
@@ -3,6 +3,7 @@
<registration converters_path="lib/galaxy/datatypes/converters">
<datatype extension="ab1" type="galaxy.datatypes.images:Ab1" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="axt" type="galaxy.datatypes.sequence:Axt" display_in_upload="true"/>
+ <datatype extension="bam" type="galaxy.datatypes.images:Bam" mimetype="application/octet-stream"/>
<datatype extension="bed" type="galaxy.datatypes.interval:Bed" display_in_upload="true">
<converter file="bed_to_gff_converter.xml" target_datatype="gff"/>
<converter file="interval_to_coverage.xml" target_datatype="coverage"/>
@@ -49,6 +50,7 @@
<datatype extension="qualsolexa" type="galaxy.datatypes.qualityscore:QualityScoreSolexa" display_in_upload="true"/>
<datatype extension="qualsolid" type="galaxy.datatypes.qualityscore:QualityScoreSOLiD" display_in_upload="true"/>
<datatype extension="qual454" type="galaxy.datatypes.qualityscore:QualityScore454" display_in_upload="true"/>
+ <datatype extension="sam" type="galaxy.datatypes.tabular:Sam" display_in_upload="true"/>
<datatype extension="scf" type="galaxy.datatypes.images:Scf" mimetype="application/octet-stream" display_in_upload="true"/>
<datatype extension="taxonomy" type="galaxy.datatypes.tabular:Taxonomy" display_in_upload="true"/>
<datatype extension="tabular" type="galaxy.datatypes.tabular:Tabular" display_in_upload="true"/>
@@ -205,5 +207,6 @@
<sniffer type="galaxy.datatypes.interval:Gff"/>
<sniffer type="galaxy.datatypes.interval:Gff3"/>
<sniffer type="galaxy.datatypes.interval:Interval"/>
+ <sniffer type="galaxy.datatypes.tabular:Sam"/>
</sniffers>
</datatypes>
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/datatypes/images.py
--- a/lib/galaxy/datatypes/images.py Wed Aug 19 17:55:28 2009 -0400
+++ b/lib/galaxy/datatypes/images.py Thu Aug 20 10:59:13 2009 -0400
@@ -4,6 +4,8 @@
import data
import logging
+from galaxy.datatypes.metadata import MetadataElement
+from galaxy.datatypes import metadata
from galaxy.datatypes.sniff import *
from urllib import urlencode, quote_plus
import zipfile
@@ -187,7 +189,7 @@
return 'text/html'
def sniff( self, filename ):
"""
- Determines wether the file is in html format
+ Determines whether the file is in html format
>>> fname = get_test_fname( 'complete.bed' )
>>> Html().sniff( fname )
@@ -233,3 +235,25 @@
return dataset.peek
except:
return "peek unavailable"
+
+class Bam( data.Binary ):
+ """Class describing a BAM binary file"""
+ file_ext = "bam"
+ MetadataElement( name="bam_index", desc="BAM Index File", param=metadata.FileParameter, readonly=True, no_value=None, visible=False, optional=True )
+ def set_peek( self, dataset ):
+ if not dataset.dataset.purged:
+ export_url = "/history_add_to?" + urlencode({'history_id':dataset.history_id,'ext':'bam','name':'bam alignments','info':'Alignments file','dbkey':dataset.dbkey})
+ dataset.peek = "Binary bam alignments file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+ def display_peek(self, dataset):
+ try:
+ return dataset.peek
+ except:
+ return "Binary bam alignments file (%s)" % ( data.nice_size( dataset.get_size() ) )
+ def get_mime(self):
+ """Returns the mime type of the datatype"""
+ return 'application/octet-stream'
+
\ No newline at end of file
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Wed Aug 19 17:55:28 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Thu Aug 20 10:59:13 2009 -0400
@@ -111,6 +111,7 @@
self.datatypes_by_extension = {
'ab1' : images.Ab1(),
'axt' : sequence.Axt(),
+ 'bam' : images.Bam(),
'bed' : interval.Bed(),
'binseq.zip' : images.Binseq(),
'blastxml' : xml.BlastXml(),
@@ -130,6 +131,7 @@
'qualsolid' : qualityscore.QualityScoreSOLiD(),
'qualsolexa' : qualityscore.QualityScoreSolexa(),
'qual454' : qualityscore.QualityScore454(),
+ 'sam' : tabular.Sam(),
'scf' : images.Scf(),
'tabular' : tabular.Tabular(),
'taxonomy' : tabular.Taxonomy(),
@@ -140,6 +142,7 @@
self.mimetypes_by_extension = {
'ab1' : 'application/octet-stream',
'axt' : 'text/plain',
+ 'bam' : 'application/octet-stream',
'bed' : 'text/plain',
'binseq.zip' : 'application/zip',
'blastxml' : 'text/plain',
@@ -157,6 +160,7 @@
'qualsolid' : 'text/plain',
'qualsolexa' : 'text/plain',
'qual454' : 'text/plain',
+ 'sam' : 'text/plain',
'scf' : 'application/octet-stream',
'tabular' : 'text/plain',
'taxonomy' : 'text/plain',
@@ -184,7 +188,8 @@
interval.CustomTrack(),
interval.Gff(),
interval.Gff3(),
- interval.Interval()
+ interval.Interval(),
+ tabular.Sam()
]
def append_to_sniff_order():
# Just in case any supported data types are not included in the config's sniff_order section.
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/datatypes/tabular.py
--- a/lib/galaxy/datatypes/tabular.py Wed Aug 19 17:55:28 2009 -0400
+++ b/lib/galaxy/datatypes/tabular.py Thu Aug 20 10:59:13 2009 -0400
@@ -11,6 +11,7 @@
from cgi import escape
from galaxy.datatypes import metadata
from galaxy.datatypes.metadata import MetadataElement
+from sniff import *
log = logging.getLogger(__name__)
@@ -236,3 +237,84 @@
out = "Can't create peek %s" % exc
return out
+class Sam( Tabular ):
+ file_ext = 'sam'
+ def __init__(self, **kwd):
+ """Initialize taxonomy datatype"""
+ Tabular.__init__( self, **kwd )
+ self.column_names = ['QNAME', 'FLAG', 'RNAME', 'POS', 'MAPQ', 'CIGAR',
+ 'MRNM', 'MPOS', 'ISIZE', 'SEQ', 'QUAL', 'OPT'
+ ]
+ def make_html_table( self, dataset, skipchars=[] ):
+ """Create HTML table, used for displaying peek"""
+ out = ['<table cellspacing="0" cellpadding="3">']
+ try:
+ # Generate column header
+ out.append( '<tr>' )
+ for i, name in enumerate( self.column_names ):
+ out.append( '<th>%s.%s</th>' % ( str( i+1 ), name ) )
+ # This data type requires at least 11 columns in the data
+ if dataset.metadata.columns - len( self.column_names ) > 0:
+ for i in range( len( self.column_names ), dataset.metadata.columns ):
+ out.append( '<th>%s</th>' % str( i+1 ) )
+ out.append( '</tr>' )
+ out.append( self.make_html_peek_rows( dataset, skipchars=skipchars ) )
+ out.append( '</table>' )
+ out = "".join( out )
+ except Exception, exc:
+ out = "Can't create peek %s" % exc
+ return out
+ def sniff( self, filename ):
+ """
+ Determines whether the file is in SAM format
+
+ A file in SAM format consists of lines of tab-separated data.
+ The following header line may be the first line:
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL
+ or
+ @QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+ Data in the OPT column is optional and can consist of tab-separated data
+
+ For complete details see http://samtools.sourceforge.net/SAM1.pdf
+
+ Rules for sniffing as True:
+ There must be 11 or more columns of data on each line
+ Columns 2 (FLAG), 4(POS), 5 (MAPQ), 8 (MPOS), and 9 (ISIZE) must be numbers (9 can be negative)
+ We will only check that up to the first 5 alignments are correctly formatted.
+
+ >>> fname = get_test_fname( 'sequence.maf' )
+ >>> Sam().sniff( fname )
+ False
+ >>> fname = get_test_fname( '1.sam' )
+ >>> Sam().sniff( fname )
+ True
+ """
+ try:
+ fh = open( filename )
+ count = 0
+ while True:
+ line = fh.readline()
+ line = line.strip()
+ if not line:
+ break #EOF
+ if line:
+ if line[0] != '@':
+ linePieces = line.split('\t')
+ if len(linePieces) < 11:
+ return False
+ try:
+ check = int(linePieces[1])
+ check = int(linePieces[3])
+ check = int(linePieces[4])
+ check = int(linePieces[7])
+ check = int(linePieces[8])
+ except ValueError:
+ return False
+ count += 1
+ if count == 5:
+ return True
+ if count < 5 and count > 0:
+ return True
+ except:
+ pass
+ return False
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/datatypes/test/1.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/datatypes/test/1.sam Thu Aug 20 10:59:13 2009 -0400
@@ -0,0 +1,97 @@
+@QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+1378_11_329 69 * 0 0 * * 0 0 AGACCGGGCGGGGTGGCGTTCGGT %##+'#######%###$#$##$(#
+1378_11_329 133 * 0 0 * * 0 0 GTTCGTGGCCGGTGGGTGTTTGGG ###$$#$#$&#####$'$#$###$
+1378_17_1788 69 * 0 0 * * 0 0 TGCCGTGTCTTGCTAACGCCGATT #'#$$#$###%%##$$$$######
+1378_17_1788 133 * 0 0 * * 0 0 TGGGTGGATGTGTTGTCGTTCATG #$#$###$#$#######$#$####
+1378_25_2035 69 * 0 0 * * 0 0 CTGCGTGTTGGTGTCTACTGGGGT #%#'##$#$##&%#%$$$%#%#'#
+1378_25_2035 133 * 0 0 * * 0 0 GTGCGTCGGGGAGGGTGCTGTCGG ######%#$%#$$###($###&&%
+1378_28_770 89 chr11.nib:1-134452384 72131356 37 17M1I5M = 72131356 0 CACACTGTGACAGACAGCGCAGC 00/02!!0//1200210!!44/1 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_28_770 181 chr11.nib:1-134452384 72131356 0 24M = 72131356 0 TTGGTGCGCGCGGTTGAGGGTTGG $$(#%%#$%#%####$%%##$###
+1378_33_1945 113 chr2.nib:1-242951149 181247988 0 23M chr12.nib:1-132349534 41710908 0 GAGAGAGAGAGAGAGAGAGAGAG PQRVUMNXYRPUXYXWXSOSZ]M XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_33_1945 177 chr12.nib:1-132349534 41710908 0 23M chr2.nib:1-242951149 181247988 0 AGAGAGAGAGAGAGAGAGAGAGA SQQWZYURVYWX]]YXTSY]]ZM XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_34_789 69 * 0 0 * * 0 0 ATGGTGGCTGACGCGTTTGACTGT #$##%#$##$&$#%##$##$###$
+1378_34_789 133 * 0 0 * * 0 0 GGGCTTGCGTTAGTGAGAGGTTGT ###%$%$%%###$####$###$#&
+1378_35_263 115 chr16.nib:1-88827254 19671878 0 23M = 19671877 -1 AGAGAGAGAGAGAGAGAGAGTCT 77543:<55#"4!&=964518A> XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:4 X1:i:137 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_35_263 179 chr16.nib:1-88827254 19671877 0 23M = 19671878 1 GAGAGAGAGAGAGAGAGAGAGTC LE7402DD34FL:27AKE>;432 XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:265 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_43_186 69 * 0 0 * * 0 0 ATACTAGTTGGGACGCGTTGTGCT #$(4%$########$#$###$$$#
+1378_43_186 133 * 0 0 * * 0 0 GCTAGGGTTTGGGTTTGCGGTGGG $%#$########%##%#$###'#'
+1378_51_1671 117 chr2.nib:1-242951149 190342418 0 24M = 190342418 0 CTGGCGTTCTCGGCGTGGATGGGT #####$$##$#%#%%###%$#$##
+1378_51_1671 153 chr2.nib:1-242951149 190342418 37 16M1I6M = 190342418 0 TCTAACTTAGCCTCATAATAGCT /<<!"0///////00/!!0121/ XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_324 117 chr2.nib:1-242951149 80324999 0 24M = 80324999 0 TCCAGTCGCGTTGTTAGGTTCGGA #$#$$$#####%##%%###**#+/
+1378_56_324 153 chr2.nib:1-242951149 80324999 37 8M1I14M = 80324999 0 TTTAGCCCGAAATGCCTAGAGCA 4;6//11!"11100110////00 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_773 69 * 0 0 * * 0 0 TGTCGTGAGGTCACTTATCCCCAT &%#%##%%#####&#$%##$%##$
+1378_56_773 133 * 0 0 * * 0 0 TCTGGTCGGTTTCGGGGAGTGGAA ##%%#&$###$#$##%$####%%$
+1378_62_2027 69 * 0 0 * * 0 0 CTTCCACGATCTGCTCGCTGTGGT (#&&$##$$#$%#%$$$#$###'#
+1378_62_2027 133 * 0 0 * * 0 0 GTTGGCCTGGCCTGCCGTGCTGCG *##),/%##$)#%##1$#'%.#&#
+1378_62_2029 69 * 0 0 * * 0 0 TCTGGGCTGTCTTCGGGTCGGTGT $%$$####$##$$#)##%%#$###
+1378_62_2029 133 * 0 0 * * 0 0 GGCGGTGTGTGGTGCGGCTGTGCG /$$$=(####%####)$$%$-&%#
+1378_67_1795 81 chr16.nib:1-88827254 26739130 0 23M chrY.nib:1-57772954 57401793 0 TGGCATTCCTGTAGGCAGAGAGG AZWWZS]!"QNXZ]VQ]]]/2]] XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:3 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_67_1795 161 chrY.nib:1-57772954 57401793 37 23M chr16.nib:1-88827254 26739130 0 GATCACCCAGGTGATGTAACTCC ]WV]]]]WW]]]]]]]]]]PU]] XT:A:U CM:i:0 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_68_466 69 * 0 0 * * 0 0 GTGATCGTCGGTGCCAGTCCCTGT #(%)+##$#$#%#+$%##$#####
+1378_68_466 133 * 0 0 * * 0 0 GTGTCATCTGAGGTAAAGCATTGT /##$09#$#.=$#$76+$%1'###
+1378_68_1692 117 chr13.nib:1-114142980 36365609 0 24M = 36365609 0 TTGAACCGGGCACGGGTCTTCTGG #$#######%###$##%&'%)###
+1378_68_1692 153 chr13.nib:1-114142980 36365609 37 10M1D13M = 36365609 0 CTGCACATACAGAATATTCATAG 0010/!"0/!!021/132231// XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:10^T13
+1378_80_664 69 * 0 0 * * 0 0 CTGCTTTGATCCCCGGTGGAGCAC 7#%###$$6#######$##$$$##
+1378_80_664 133 * 0 0 * * 0 0 TGTCTGCGTTGTATCTCTGGTGTA %##%,%$$#&$$###$#$%##'%#
+1378_85_1786 69 * 0 0 * * 0 0 ATACTATGTCGATCTGTAAAAAAA )&.)#3%(a)$&%-,2#&+.-%0&./
+1378_85_1786 133 * 0 0 * * 0 0 CCCTAGGAGCGTATACCGGACGAG ,'&/%/@,&1,&'/)&,6&&1)((
+1378_86_1011 69 * 0 0 * * 0 0 CTACGTTATTGCTCTGTTTGTCCT ######$%##$$$%###%#$####
+1378_86_1011 133 * 0 0 * * 0 0 AGGCGATGGGATATTATTTTACTT :$###)%##$9$###1$$#$2###
+1378_86_1789 89 chr12.nib:1-132349534 39007065 37 23M = 39007065 0 GCTTTCCATAGATGTGTAATTTC J2K]]Z5!GN?@U]]]VX]UYYP XT:A:U CM:i:1 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_86_1789 181 chr12.nib:1-132349534 39007065 0 24M = 39007065 0 ACAACTTAAATAATCATGGACCGG 02,5$$0&6#%?*,$'#%&/15.1
+1378_91_1596 69 * 0 0 * * 0 0 TTAGCGGTTGACTATCTGCTGACA *&+'#9'(%*'#//,&<),/)'*#
+1378_91_1596 133 * 0 0 * * 0 0 GCTTTTTCATTCGGTGCCTTTGGA '>%/3%=()8'#.%?50$&5>%)%
+1378_94_1595 69 chr7.nib:1-158821424 127518258 0 24M = 127518258 0 CGTGCGACAGCCCATGTTTTCAGA -=..5,3826&*+.+#+#%%6;%#
+1378_94_1595 137 chr7.nib:1-158821424 127518258 37 23M = 127518258 0 TGAGATAAACACCTAACATGCTC M]]FN]]\V]]]Q>T]KIG:LVN XT:A:U CM:i:0 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_95_1039 69 * 0 0 * * 0 0 CGGCGTCCATCTTCGCCTTGAGAT $##.#$##$$#%$#$%%$###$)$
+1378_95_1039 133 * 0 0 * * 0 0 GTTCTGTGCCAGGTGAGGTACGGA &##,./#$&)6##+,'#$$0(##$
+1378_95_1767 65 chr11.nib:1-134452384 65333552 25 23M chr3.nib:1-199501827 123725482 0 CAACTGGTGGCATCTGGACAAAC W[[TZYY]]RO<BI7!!:!!>@2 XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_95_1767 129 chr3.nib:1-199501827 123725482 37 6M1I16M chr11.nib:1-134452384 65333552 0 ATTTATCTGTCTCATTCATTATT <AGB8B"!V]]UO/&JB4DE88E XT:A:U CM:i:2 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_96_1037 69 * 0 0 * * 0 0 ATCCCCCAAGATGCCTGTTGATTG $#$'##$$$#%$$#%###+##$#$
+1378_96_1037 133 * 0 0 * * 0 0 CTGCTGGGCCATTTGACTTACTCA '$#+#(##-%5##+*&###-.$$$
+1378_96_1764 81 chr15.nib:1-100338915 89251272 25 23M chr7.nib:1-158821424 19412615 0 AGAAATGGTCGCACCCTCTGGTT E*2ZEHX\SN]O>SYRL):LIOL XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_96_1764 161 chr7.nib:1-158821424 19412615 37 23M chr15.nib:1-100338915 89251272 0 GTATAGCCCACAACGCCTAATAT ZMBS]UW]UYR\]QPZ[SMYL7C XT:A:U CM:i:0 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_98_1574 69 * 0 0 * * 0 0 GTTCTGCCGGTGTCTGTGGCGGGC $$#+&$$####%$$$###$%#%%#
+1378_98_1574 133 * 0 0 * * 0 0 AGGCGAGTGTGGGGGTTGTTTGAG +%%$#)##%##$####%###$%$#
+1378_107_1647 69 * 0 0 * * 0 0 AGGCCTACTACGCGTCATTGATAG &#$$#$(.#%#$$####&$%##($
+1378_107_1647 133 * 0 0 * * 0 0 GGTCTGGTTCTATGTTGGTCGACT ###'$$#$$$(#%###(#$##$%#
+1378_111_829 69 chr9.nib:1-140273252 82506894 0 24M = 82506894 0 TGCGGCACTTGCTTCTTCGTATTT %#%##%#$%#$#%###$$##&#$$
+1378_111_829 137 chr9.nib:1-140273252 82506894 37 4M1I18M = 82506894 0 GATGCGTAATCTAGTAAAATAAG 0/362//00/5516500210451 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_111_1900 69 * 0 0 * * 0 0 TCCCCTCGCTCGGCTCTGTGCTGT $&%*$#(#)##$#'##%(##$#$%
+1378_111_1900 133 * 0 0 * * 0 0 GCACGCCTTTGGGCTAAGCCGTAA )$)'#%$########$'#&%$#(#
+1378_112_1483 69 * 0 0 * * 0 0 TGTCCAGCTATGCGGCTTCCTCCT %#$+#%#&#$#####%####%$##
+1378_112_1483 133 * 0 0 * * 0 0 TGGAGTGGTGTGTTTGCTGAGCCA #$#)#############$#%#%'%
+1378_125_1287 69 * 0 0 * * 0 0 TGTCTCTGGGGGGCCTGGTTAGGT $##13$'%#$###$$###$$$#&#
+1378_125_1287 133 * 0 0 * * 0 0 TGACGTGGGTTGTCCCGTGAGATT ##$%%#$###$##$$#&%##$(%%
+1378_126_468 117 chr11.nib:1-134452384 72541052 0 24M = 72541052 0 TGCCTCTATACAGATTAGTCCTCT )7,7..?97594@8=,=?813@>7
+1378_126_468 153 chr11.nib:1-134452384 72541052 0 23M = 72541052 0 AGGCAAGACTCTGTCTCAAAAAA PK5G]]PDT\]SEXY[]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:4 X1:i:15713 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_127_664 69 * 0 0 * * 0 0 AGAGGTTGGTGTCTTGTCGCAGCT ##'#$######$$%######$$$#
+1378_127_664 133 * 0 0 * * 0 0 TCGCTTTGCCTATGTTTGTTCGGA #%$%#&##$%#%%###$$###)-'
+1378_129_463 97 chr8.nib:1-146274826 29931771 37 23M chr19.nib:1-63811651 5702213 0 GTAGCTCTGTTTCACATTAGGGG J>AQ[G>C?NM:GD=)*PLORIF XT:A:U CM:i:1 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_129_463 145 chr19.nib:1-63811651 5702213 0 23M chr8.nib:1-146274826 29931771 0 AAAAAAAAAAAAAAAAAAAAAAA JOI:AHGD==@KQB78HF>KA8> XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:583698 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_129_875 69 * 0 0 * * 0 0 TTTCTATGGCTTACGCTGTCTGCC #$($##%####%$#$#####$###
+1378_129_875 133 * 0 0 * * 0 0 GACCTTTACGTATTGGGGGTTGGC ###)###+###$##$#&%##$,#$
+1378_140_1251 69 * 0 0 * * 0 0 ATCCTAGCGCGGTGTCTTGGGGAC #$%1#$$$##$##$#$#$##$%$$
+1378_140_1251 133 * 0 0 * * 0 0 TTTCCTTCGTGTGCGTGCGGAGTG #%#%$##$$$######.$$$%#%(
+1378_141_809 69 * 0 0 * * 0 0 TGTCCTCCAGTGTCTGTTGGGTGT %&,-##$$#(%###$#$$'###'#
+1378_141_809 133 * 0 0 * * 0 0 TCTCGTGGTTTCTTTTTTATGTGT ##%)##$$#####%$#$#%%#'##
+1378_144_983 69 * 0 0 * * 0 0 AGCGCCCGGTTGGTGCGGCTCGTC -$(&%*$#*#))#$$$#%%$#$##
+1378_144_983 133 * 0 0 * * 0 0 GTTCGTTCGTGGTGTACGAGGGTG #(#%#####($#%##$$#%##%#)
+1378_153_270 69 * 0 0 * * 0 0 AGTCCTTGTCCCCTGGGTTTTCCC +''$#&%$%#$##&$$($#&#$$#
+1378_153_270 133 * 0 0 * * 0 0 GGCCGTGTGCGGGTGTAGATTGGA %$##($######&##$&$$$$%##
+1378_155_1689 65 chrX.nib:1-154913754 106941539 37 23M = 106940385 -1154 ATCTCCTCTTCCTTCCATTCCAC \]]]Y]]]]]UV]]]ZYZZ]]RV XT:A:U CM:i:0 SM:i:37 AM:i:37 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_155_1689 129 chrX.nib:1-154913754 106940385 37 23M = 106941539 1154 GACTATGAGGTTTTCATTCAACA ]]]]\\]]]YW]]]WRZ]]WIOK XT:A:U CM:i:0 SM:i:37 AM:i:37 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_157_1580 69 * 0 0 * * 0 0 TGGGCCTCGGTGCCCTTGGTCTGT #%)$##'#$$$&#####%#$#$##
+1378_157_1580 133 * 0 0 * * 0 0 GGGATTGAAGGGATGTATGCTAGG #%$&%#$$'%$%#$##*#%$$$$#
+1378_161_317 69 * 0 0 * * 0 0 TTGGCCGGCAACCCCGGTACCTAA 7<,<'@)@>.)2@/')'&(?/-<(
+1378_161_317 133 * 0 0 * * 0 0 AATCCATACCCACAAAAGCAGGCC .&%','(@''?7//+&)+2.+)0)
+1378_177_735 113 chr2.nib:1-242951149 222173182 25 23M = 222173882 700 TTGTTCAGCGCCGATTGTCAATC KPNICFMS]]]Z]]]]Y]]]]]] XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:1G21
+1378_177_735 177 chr2.nib:1-242951149 222173882 37 23M = 222173182 -700 AGAATTCCTAACAAAATGTGAAG ES6-]]]]]]]]]]]]]]]]]]] XT:A:U CM:i:1 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:0 XG:i:0 MD:Z:23
+1378_181_1684 69 * 0 0 * * 0 0 CGACTCCCGCATTCACGGTCAAGT &*#,##$#&$*$$#$#$$$#%$##
+1378_181_1684 133 * 0 0 * * 0 0 TTTCTGTTGTGGTTTTGTTGGGGT $##'$%'##%##$%$#$$####$*
+1378_187_1407 69 * 0 0 * * 0 0 TGGCGTCCACTCGTGGGTCTATCG $#$'%#$%$%&$%#####$#$#%#
+1378_187_1407 133 * 0 0 * * 0 0 TTGGGTGAAATCTTGTCGAGTGGA ####&##$$###$#####%##%%)
+1378_203_721 97 chr1.nib:1-247249719 245680524 25 23M chr2.nib:1-242951149 213173999 0 GTAAAATTTGTGGAGATTTAAGT ]VEFFEZ]XPW]TOVINQ,;T!! XT:A:U CM:i:2 SM:i:25 AM:i:25 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_203_721 145 chr2.nib:1-242951149 213173999 37 4M1I18M chr1.nib:1-247249719 245680524 0 ACCTAACAAAATTGTTCAATATG F>8AWT<AV]Q9B"+]O@IF=K] XT:A:U CM:i:2 SM:i:37 AM:i:25 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_206_2039 113 chr4.nib:1-191273063 103793427 0 23M chr18.nib:1-76117153 57165542 0 ACACACACACACACACACACACA NKWZVWZ]]XV[]]]]]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:1292040 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_206_2039 177 chr18.nib:1-76117153 57165542 0 23M chr4.nib:1-191273063 103793427 0 CACACACACACACACACACACAC NAJ[SPT[]]]W[]]]]]]]]]] XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:1292040 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py Wed Aug 19 17:55:28 2009 -0400
+++ b/lib/galaxy/tools/actions/metadata.py Thu Aug 20 10:59:13 2009 -0400
@@ -1,4 +1,4 @@
-from . import ToolAction
+from __init__ import ToolAction
from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
import logging
diff -r 3049432643f4 -r bedfce38e652 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Wed Aug 19 17:55:28 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Thu Aug 20 10:59:13 2009 -0400
@@ -1,5 +1,5 @@
import os, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile
-from . import ToolAction
+from __init__ import ToolAction
from galaxy import datatypes, jobs
from galaxy.datatypes import sniff
from galaxy import model, util
diff -r 3049432643f4 -r bedfce38e652 static/scripts/galaxy.workflow_editor.canvas.js
--- a/static/scripts/galaxy.workflow_editor.canvas.js Wed Aug 19 17:55:28 2009 -0400
+++ b/static/scripts/galaxy.workflow_editor.canvas.js Thu Aug 20 10:59:13 2009 -0400
@@ -439,25 +439,11 @@
});
});
},
- enable_auto_save : function() {
- // Implements auto-saving based on whether the inputs change. We consider
- // "changed" to be when a field is accessed and not necessarily modified
- // because of an issue where "onchange" is not triggered when activating
- // another node, or saving the workflow.
- outer_this = this;
- $(".toolFormBody").find("input,textarea,select").each( function() {
- $(this).focus( function() {
- outer_this.active_form_has_changes = true;
- });
- });
- },
check_changes_in_active_form : function() {
// If active form has changed, save it
if (this.active_form_has_changes) {
this.has_changes = true;
- $(".toolFormBody").find("form").each( function() {
- $(this).submit();
- });
+ $("#right-content").find("form").submit();
this.active_form_has_changes = false;
}
},
diff -r 3049432643f4 -r bedfce38e652 templates/workflow/editor.mako
--- a/templates/workflow/editor.mako Wed Aug 19 17:55:28 2009 -0400
+++ b/templates/workflow/editor.mako Thu Aug 20 10:59:13 2009 -0400
@@ -303,6 +303,15 @@
$(this).remove();
make_popupmenu( b, options );
});
+ // Implements auto-saving based on whether the inputs change. We consider
+ // "changed" to be when a field is accessed and not necessarily modified
+ // because of an issue where "onchange" is not triggered when activating
+ // another node, or saving the workflow.
+ $(this).find("input,textarea,select").each( function() {
+ $(this).focus( function() {
+ workflow.active_form_has_changes = true;
+ });
+ });
});
}
diff -r 3049432643f4 -r bedfce38e652 test-data/1.sam
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test-data/1.sam Thu Aug 20 10:59:13 2009 -0400
@@ -0,0 +1,29 @@
+@QNAME FLAG RNAME POS MAPQ CIGAR MRNM MPOS ISIZE SEQ QUAL OPT
+1378_11_329 69 * 0 0 * * 0 0 AGACCGGGCGGGGTGGCGTTCGGT %##+'#######%###$#$##$(#
+1378_11_329 133 * 0 0 * * 0 0 GTTCGTGGCCGGTGGGTGTTTGGG ###$$#$#$&#####$'$#$###$
+1378_17_1788 69 * 0 0 * * 0 0 TGCCGTGTCTTGCTAACGCCGATT #'#$$#$###%%##$$$$######
+1378_17_1788 133 * 0 0 * * 0 0 TGGGTGGATGTGTTGTCGTTCATG #$#$###$#$#######$#$####
+1378_25_2035 69 * 0 0 * * 0 0 CTGCGTGTTGGTGTCTACTGGGGT #%#'##$#$##&%#%$$$%#%#'#
+1378_25_2035 133 * 0 0 * * 0 0 GTGCGTCGGGGAGGGTGCTGTCGG ######%#$%#$$###($###&&%
+1378_28_770 89 chr11.nib:1-134452384 72131356 37 17M1I5M = 72131356 0 CACACTGTGACAGACAGCGCAGC 00/02!!0//1200210!!44/1 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_28_770 181 chr11.nib:1-134452384 72131356 0 24M = 72131356 0 TTGGTGCGCGCGGTTGAGGGTTGG $$(#%%#$%#%####$%%##$###
+1378_33_1945 113 chr2.nib:1-242951149 181247988 0 23M chr12.nib:1-132349534 41710908 0 GAGAGAGAGAGAGAGAGAGAGAG PQRVUMNXYRPUXYXWXSOSZ]M XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_33_1945 177 chr12.nib:1-132349534 41710908 0 23M chr2.nib:1-242951149 181247988 0 AGAGAGAGAGAGAGAGAGAGAGA SQQWZYURVYWX]]YXTSY]]ZM XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:163148 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_34_789 69 * 0 0 * * 0 0 ATGGTGGCTGACGCGTTTGACTGT #$##%#$##$&$#%##$##$###$
+1378_34_789 133 * 0 0 * * 0 0 GGGCTTGCGTTAGTGAGAGGTTGT ###%$%$%%###$####$###$#&
+1378_35_263 115 chr16.nib:1-88827254 19671878 0 23M = 19671877 -1 AGAGAGAGAGAGAGAGAGAGTCT 77543:<55#"4!&=964518A> XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:4 X1:i:137 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_35_263 179 chr16.nib:1-88827254 19671877 0 23M = 19671878 1 GAGAGAGAGAGAGAGAGAGAGTC LE7402DD34FL:27AKE>;432 XT:A:R CM:i:0 SM:i:0 AM:i:0 X0:i:265 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
+1378_43_186 69 * 0 0 * * 0 0 ATACTAGTTGGGACGCGTTGTGCT #$(4%$########$#$###$$$#
+1378_43_186 133 * 0 0 * * 0 0 GCTAGGGTTTGGGTTTGCGGTGGG $%#$########%##%#$###'#'
+1378_51_1671 117 chr2.nib:1-242951149 190342418 0 24M = 190342418 0 CTGGCGTTCTCGGCGTGGATGGGT #####$$##$#%#%%###%$#$##
+1378_51_1671 153 chr2.nib:1-242951149 190342418 37 16M1I6M = 190342418 0 TCTAACTTAGCCTCATAATAGCT /<<!"0///////00/!!0121/ XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_324 117 chr2.nib:1-242951149 80324999 0 24M = 80324999 0 TCCAGTCGCGTTGTTAGGTTCGGA #$#$$$#####%##%%###**#+/
+1378_56_324 153 chr2.nib:1-242951149 80324999 37 8M1I14M = 80324999 0 TTTAGCCCGAAATGCCTAGAGCA 4;6//11!"11100110////00 XT:A:U CM:i:2 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:22
+1378_56_773 69 * 0 0 * * 0 0 TGTCGTGAGGTCACTTATCCCCAT &%#%##%%#####&#$%##$%##$
+1378_56_773 133 * 0 0 * * 0 0 TCTGGTCGGTTTCGGGGAGTGGAA ##%%#&$###$#$##%$####%%$
+1378_62_2027 69 * 0 0 * * 0 0 CTTCCACGATCTGCTCGCTGTGGT (#&&$##$$#$%#%$$$#$###'#
+1378_62_2027 133 * 0 0 * * 0 0 GTTGGCCTGGCCTGCCGTGCTGCG *##),/%##$)#%##1$#'%.#&#
+1378_62_2029 69 * 0 0 * * 0 0 TCTGGGCTGTCTTCGGGTCGGTGT $%$$####$##$$#)##%%#$###
+1378_62_2029 133 * 0 0 * * 0 0 GGCGGTGTGTGGTGCGGCTGTGCG /$$$=(####%####)$$%$-&%#
+1378_67_1795 81 chr16.nib:1-88827254 26739130 0 23M chrY.nib:1-57772954 57401793 0 TGGCATTCCTGTAGGCAGAGAGG AZWWZS]!"QNXZ]VQ]]]/2]] XT:A:R CM:i:2 SM:i:0 AM:i:0 X0:i:3 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:23
+1378_67_1795 161 chrY.nib:1-57772954 57401793 37 23M chr16.nib:1-88827254 26739130 0 GATCACCCAGGTGATGTAACTCC ]WV]]]]WW]]]]]]]]]]PU]] XT:A:U CM:i:0 SM:i:37 AM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:23
\ No newline at end of file
diff -r 3049432643f4 -r bedfce38e652 test/functional/test_sniffing_and_metadata_settings.py
--- a/test/functional/test_sniffing_and_metadata_settings.py Wed Aug 19 17:55:28 2009 -0400
+++ b/test/functional/test_sniffing_and_metadata_settings.py Thu Aug 20 10:59:13 2009 -0400
@@ -226,6 +226,16 @@
assert latest_hda is not None, "Problem retrieving fastqsanger hda from the database"
if not latest_hda.name == '1.fastqsanger' and not latest_hda.extension == 'fastqsanger':
raise AssertionError, "fastqsanger data type was not correctly sniffed."
+ def test_090_sam_datatype( self ):
+ """Testing correctly sniffing sam format upon upload"""
+ self.upload_file( '1.sam' )
+ self.verify_dataset_correctness( '1.sam' )
+ self.check_history_for_string( '1.sam format: <span class="sam">sam</span>, database: \? Info: uploaded sam file' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ assert latest_hda is not None, "Problem retrieving sam hda from the database"
+ if not latest_hda.name == '1.sam' and not latest_hda.extension == 'sam':
+ raise AssertionError, "sam data type was not correctly sniffed."
def test_9999_clean_up( self ):
self.delete_history( id=self.security.encode_id( history1.id ) )
self.logout()
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/3049432643f4
changeset: 2598:3049432643f4
user: Kanwei Li <kanwei(a)gmail.com>
date: Wed Aug 19 17:55:28 2009 -0400
description:
Merge trunk
2 file(s) affected in this change:
templates/history/shared_grid.mako
templates/history/stored_grid.mako
diffs (1946 lines):
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/datatypes/coverage.py
--- a/lib/galaxy/datatypes/coverage.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/datatypes/coverage.py Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
import pkg_resources
pkg_resources.require( "bx-python" )
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from galaxy.datatypes.sniff import *
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,4 +1,4 @@
-import logging, os, sys, time, sets, tempfile
+import logging, os, sys, time, tempfile
from galaxy import util
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Wed Aug 19 17:55:28 2009 -0400
@@ -12,7 +12,7 @@
august 20 2007
"""
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from cgi import escape
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/datatypes/interval.py
--- a/lib/galaxy/datatypes/interval.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/datatypes/interval.py Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
import pkg_resources
pkg_resources.require( "bx-python" )
-import logging, os, sys, time, sets, tempfile, shutil
+import logging, os, sys, time, tempfile, shutil
import data
from galaxy import util
from galaxy.datatypes.sniff import *
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/model/__init__.py Wed Aug 19 17:55:28 2009 -0400
@@ -5,8 +5,7 @@
the relationship cardinalities are obvious (e.g. prefer Dataset to Data)
"""
-import os.path, os, errno
-import sha
+import os.path, os, errno, sys
import galaxy.datatypes
from galaxy.util.bunch import Bunch
from galaxy import util
@@ -14,8 +13,7 @@
import galaxy.datatypes.registry
from galaxy.datatypes.metadata import MetadataCollection
from galaxy.security import RBACAgent, get_permitted_actions
-
-
+from galaxy.util.hash_util import *
import logging
log = logging.getLogger( __name__ )
@@ -40,10 +38,10 @@
def set_password_cleartext( self, cleartext ):
"""Set 'self.password' to the digest of 'cleartext'."""
- self.password = sha.new( cleartext ).hexdigest()
+ self.password = new_secure_hash( text_type=cleartext )
def check_password( self, cleartext ):
"""Check if 'cleartext' matches 'self.password' when hashed."""
- return self.password == sha.new( cleartext ).hexdigest()
+ return self.password == new_secure_hash( text_type=cleartext )
def all_roles( self ):
roles = [ ura.role for ura in self.roles ]
for group in [ uga.group for uga in self.groups ]:
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,15 +1,13 @@
"""
Classes encapsulating galaxy tools and tool configuration.
"""
-
import pkg_resources;
pkg_resources.require( "simplejson" )
import logging, os, string, sys, tempfile, glob, shutil
import simplejson
-import sha, hmac, binascii
-
+import binascii
from UserDict import DictMixin
from galaxy.util.odict import odict
from galaxy.util.bunch import Bunch
@@ -26,6 +24,7 @@
from galaxy.util.none_like import NoneDataset
from galaxy.datatypes import sniff
from cgi import FieldStorage
+from galaxy.util.hash_util import *
log = logging.getLogger( __name__ )
@@ -211,7 +210,7 @@
value["__page__"] = self.page
value = simplejson.dumps( value )
# Make it secure
- a = hmac.new( app.config.tool_secret, value, sha ).hexdigest()
+ a = hmac_new( app.config.tool_secret, value )
b = binascii.hexlify( value )
return "%s:%s" % ( a, b )
def decode( self, value, tool, app ):
@@ -221,7 +220,7 @@
# Extract and verify hash
a, b = value.split( ":" )
value = binascii.unhexlify( b )
- test = hmac.new( app.config.tool_secret, value, sha ).hexdigest()
+ test = hmac_new( app.config.tool_secret, value )
assert a == test
# Restore from string
values = json_fix( simplejson.loads( value ) )
@@ -453,7 +452,6 @@
self.tests = None
# Determine if this tool can be used in workflows
self.is_workflow_compatible = self.check_workflow_compatible()
-
def parse_inputs( self, root ):
"""
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/util/hash_util.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/lib/galaxy/util/hash_util.py Wed Aug 19 17:55:28 2009 -0400
@@ -0,0 +1,28 @@
+import sys, logging
+using_24 = sys.version_info[:2] < ( 2, 5 )
+if using_24:
+ import sha
+else:
+ import hashlib
+import hmac
+
+log = logging.getLogger( __name__ )
+
+"""
+Utility functions for bi-directional Python version compatibility. Python 2.5
+introduced hashlib which replaced sha in Python 2.4 and previous versions.
+"""
+def new_secure_hash( text_type=None ):
+ if using_24:
+ if text_type:
+ return sha.new( text_type ).hexdigest()
+ return sha.new()
+ else:
+ if text_type:
+ return hashlib.sha1( text_type ).hexdigest()
+ return hashlib.sha1()
+def hmac_new( key, value ):
+ if using_24:
+ return hmac.new( key, value, sha ).hexdigest()
+ else:
+ return hmac.new( key, value, hashlib.sha1 ).hexdigest()
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,9 +1,14 @@
-import shutil, StringIO, operator, urllib, gzip, tempfile, sets, string, sys
+import shutil, StringIO, operator, urllib, gzip, tempfile, string, sys
from datetime import datetime, timedelta
from galaxy import util, datatypes
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy.web.controllers.forms import get_all_forms, get_form_widgets
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
import logging
log = logging.getLogger( __name__ )
@@ -1236,16 +1241,16 @@
if v == trans.app.security_agent.permitted_actions.DATASET_ACCESS:
if len( in_roles ) > 1:
# Get the set of all users that are being associated with the dataset
- in_roles_set = sets.Set()
+ in_roles_set = set()
for role in in_roles:
in_roles_set.add( role )
- users_set = sets.Set()
+ users_set = set()
for role in in_roles:
for ura in role.users:
users_set.add( ura.user )
# Make sure that at least 1 user has every role being associated with the dataset
for user in users_set:
- user_roles_set = sets.Set()
+ user_roles_set = set()
for ura in user.roles:
user_roles_set.add( ura.role )
if in_roles_set.issubset( user_roles_set ):
@@ -1421,16 +1426,16 @@
if v == trans.app.security_agent.permitted_actions.DATASET_ACCESS:
if len( in_roles ) > 1:
# Get the set of all users that are being associated with the dataset
- in_roles_set = sets.Set()
+ in_roles_set = set()
for role in in_roles:
in_roles_set.add( role )
- users_set = sets.Set()
+ users_set = set()
for role in in_roles:
for ura in role.users:
users_set.add( ura.user )
# Make sure that at least 1 user has every role being associated with the dataset
for user in users_set:
- user_roles_set = sets.Set()
+ user_roles_set = set()
for ura in user.roles:
user_roles_set.add( ura.role )
if in_roles_set.issubset( user_roles_set ):
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/async.py
--- a/lib/galaxy/web/controllers/async.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/async.py Wed Aug 19 17:55:28 2009 -0400
@@ -6,8 +6,8 @@
from galaxy import jobs, util, datatypes, web
-import logging, urllib
-import sha, hmac
+import logging, urllib, sys
+from galaxy.util.hash_util import *
log = logging.getLogger( __name__ )
@@ -58,7 +58,7 @@
return "Data %s does not exist or has already been deleted" % data_id
if STATUS == 'OK':
- key = hmac.new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id), sha ).hexdigest()
+ key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
if key != data_secret:
return "You do not have permission to alter data %s." % data_id
# push the job into the queue
@@ -116,7 +116,7 @@
trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )
try:
- key = hmac.new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id), sha ).hexdigest()
+ key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
galaxy_url = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
params.update( { 'GALAXY_URL' :galaxy_url } )
params.update( { 'data_id' :data.id } )
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/dataset.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,4 +1,4 @@
-import logging, os, sets, string, shutil, re, socket, mimetypes, smtplib, urllib
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib
from galaxy.web.base.controller import *
from galaxy import util, datatypes, jobs, web, model
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/genetrack.py
--- a/lib/galaxy/web/controllers/genetrack.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/genetrack.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,11 +1,10 @@
-import time, glob, os
+import time, glob, os, sys
from itertools import cycle
-import sha
-
from mako import exceptions
from mako.template import Template
from mako.lookup import TemplateLookup
from galaxy.web.base.controller import *
+from galaxy.util.hash_util import *
try:
import pkg_resources
@@ -265,7 +264,7 @@
tmpl_name, track_maker = conf.PLOT_MAPPER[param.plot]
# check against a hash, display an image that already exists if it was previously created.
- hash = sha.new()
+ hash = new_secure_hash()
hash.update(str(dataset_id))
for key in sorted(kwds.keys()):
hash.update(str(kwds[key]))
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Wed Aug 19 17:55:28 2009 -0400
@@ -161,7 +161,7 @@
status, message = self._list_undelete( trans, histories )
trans.sa_session.flush()
# Render the list view
- return self.stored_list_grid( trans, status=status, message=message, template='/history/stored_grid.mako', **kwargs )
+ return self.stored_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
def _list_delete( self, trans, histories ):
"""Delete histories"""
n_deleted = 0
@@ -239,14 +239,14 @@
if operation == "clone":
if not id:
message = "Select a history to clone"
- return self.shared_list_grid( trans, status='error', message=message, template='/history/shared_grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
# When cloning shared histories, only copy active datasets
new_kwargs = { 'clone_choice' : 'active' }
return self.clone( trans, id, **new_kwargs )
elif operation == 'unshare':
if not id:
message = "Select a history to unshare"
- return self.shared_list_grid( trans, status='error', message=message, template='/history/shared_grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status='error', message=message, template='/history/grid.mako', **kwargs )
ids = util.listify( id )
histories = []
for history_id in ids:
@@ -260,7 +260,7 @@
message = "Unshared %d shared histories" % len( ids )
status = 'done'
# Render the list view
- return self.shared_list_grid( trans, status=status, message=message, template='/history/shared_grid.mako', **kwargs )
+ return self.shared_list_grid( trans, status=status, message=message, template='/history/grid.mako', **kwargs )
@web.expose
def delete_current( self, trans ):
"""Delete just the active history -- this does not require a logged in user."""
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/requests.py
--- a/lib/galaxy/web/controllers/requests.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/requests.py Wed Aug 19 17:55:28 2009 -0400
@@ -152,16 +152,12 @@
helptext=''))
# library associated
if request.library:
- request_details.append(dict(label='Library',
- value=request.library.name,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ value = request.library.name
else:
- request_details.append(dict(label='Library',
- value=None,
- helptext='Associated library where the resultant \
- dataset will be stored'))
-
+ value = None
+ request_details.append( dict( label='Data library',
+ value=value,
+ helptext='Data library where the resultant dataset will be stored' ) )
# form fields
for index, field in enumerate(request.type.request_form.fields):
if field['required']:
@@ -492,18 +488,17 @@
else:
lib_list.add_option(lib.name, lib.id)
if lib_id == 'new':
- lib_list.add_option('Create a new library', 'new', selected=True)
+ lib_list.add_option('Create a new data library', 'new', selected=True)
else:
- lib_list.add_option('Create a new library', 'new')
- widget = dict(label='Library',
+ lib_list.add_option('Create a new data library', 'new')
+ widget = dict(label='Data library',
widget=lib_list,
- helptext='Associated library where the resultant \
- dataset will be stored.')
+ helptext='Data library where the resultant dataset will be stored.')
if lib_id == 'new':
- new_lib = dict(label='Create a new Library',
+ new_lib = dict(label='Create a new data library',
widget=TextField('new_library_name', 40,
util.restore_text( params.get( 'new_library_name', '' ) )),
- helptext='Enter a library name here to request a new library')
+ helptext='Enter a name here to request a new data library')
return [widget, new_lib]
else:
return [widget]
@@ -558,7 +553,7 @@
'''
empty_fields = []
if not request.library:
- empty_fields.append('Library')
+ empty_fields.append('Data library')
# check rest of the fields of the form
for index, field in enumerate(request.type.request_form.fields):
if field['required'] == 'required' and request.values.content[index] in ['', None]:
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Wed Aug 19 17:55:28 2009 -0400
@@ -144,10 +144,9 @@
value=str(request.user.email),
helptext=''))
# library associated
- request_details.append(dict(label='Library',
+ request_details.append(dict(label='Data library',
value=trans.app.model.Library.get(request.library_id).name,
- helptext='Associated library where the resultant \
- dataset will be stored'))
+ helptext='Data library where the resultant dataset will be stored'))
# form fields
for index, field in enumerate(request.type.request_form.fields):
if field['required']:
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,7 +1,7 @@
"""
Contains the main interface in the Universe class
"""
-import logging, os, sets, string, shutil, urllib, re, socket
+import logging, os, string, shutil, urllib, re, socket
from cgi import escape, FieldStorage
from galaxy import util, datatypes, jobs, web, util
from galaxy.web.base.controller import *
@@ -60,7 +60,6 @@
trans.response.set_content_type('text/xml')
return trans.fill_template_mako( "root/history_as_xml.mako", history=history, show_deleted=util.string_as_bool( show_deleted ) )
else:
- template = "root/history.mako"
show_deleted = util.string_as_bool( show_deleted )
query = trans.sa_session.query( model.HistoryDatasetAssociation ) \
.filter( model.HistoryDatasetAssociation.history == history ) \
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Wed Aug 19 17:55:28 2009 -0400
@@ -117,7 +117,6 @@
tool_state_string = util.object_to_string(state.encode(tool, trans.app))
# Setup context for template
history = trans.get_history()
- template = "tool_form.mako"
vars = dict( tool_state=state, errors = {} )
# Is the "add frame" stuff neccesary here?
add_frame = AddFrameData()
@@ -125,17 +124,13 @@
if from_noframe is not None:
add_frame.wiki_url = trans.app.config.wiki_url
add_frame.from_noframe = True
- return trans.fill_template( template, history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
-
-
+ return trans.fill_template( "tool_form.mako", history=history, toolbox=toolbox, tool=tool, util=util, add_frame=add_frame, **vars )
@web.expose
def redirect( self, trans, redirect_url=None, **kwd ):
if not redirect_url:
return trans.show_error_message( "Required URL for redirection missing" )
trans.log_event( "Redirecting to: %s" % redirect_url )
return trans.fill_template( 'root/redirect.mako', redirect_url=redirect_url )
-
-
@web.json
def upload_async_create( self, trans, tool_id=None, **kwd ):
"""
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Wed Aug 19 17:55:28 2009 -0400
@@ -93,8 +93,8 @@
"""
Exception to make throwing errors from deep in controllers easier
"""
- def __init__( self, message, type="info" ):
- self.message = message
+ def __init__( self, err_msg, type="info" ):
+ self.err_msg = err_msg
self.type = type
def error( message ):
@@ -117,7 +117,7 @@
self.security = galaxy_app.security
def handle_controller_exception( self, e, trans, **kwargs ):
if isinstance( e, MessageException ):
- return trans.show_message( e.message, e.type )
+ return trans.show_message( e.err_msg, e.type )
def make_body_iterable( self, trans, body ):
if isinstance( body, FormBuilder ):
body = trans.show_form( body )
diff -r ea6708c96cd1 -r 3049432643f4 lib/galaxy/webapps/reports/controllers/root.py
--- a/lib/galaxy/webapps/reports/controllers/root.py Wed Aug 19 17:27:00 2009 -0400
+++ b/lib/galaxy/webapps/reports/controllers/root.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,8 +1,8 @@
-import sys, os, operator, sets, string, shutil, re, socket, urllib
+import sys, os, operator, string, shutil, re, socket, urllib, time
from galaxy import web
from cgi import escape, FieldStorage
from galaxy.webapps.reports.base.controller import *
-import logging, sets, time
+import logging
log = logging.getLogger( __name__ )
class Report( BaseController ):
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/center.mako
--- a/templates/admin/center.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/center.mako Wed Aug 19 17:55:28 2009 -0400
@@ -17,7 +17,7 @@
<li>
<strong>Manage groups</strong> - provides a view of all groups along with the members of the group and the roles associated with
each group (both private and non-private roles). Non-private roles include a link to a page that allows you to manage the users
- and groups that are associated with the role. The page also includes a view of the library datasets that are associated with the
+ and groups that are associated with the role. The page also includes a view of the data library datasets that are associated with the
role and the permissions applied to each dataset.
</li>
<p/>
@@ -32,9 +32,9 @@
<p/>
<ul>
<li>
- <strong>Manage libraries</strong> - Dataset libraries enable a Galaxy administrator to upload datasets into a library. Currently,
- only administrators can create dataset libraries, but permission to perform the following functions on the library can be granted to
- users (a library item is one of: a library, a library folder, a library dataset).
+ <strong>Manage data libraries</strong> - Data libraries enable a Galaxy administrator to upload datasets into a data library. Currently,
+ only administrators can create data libraries, but permission to perform the following functions on the data library can be granted to
+ users (a library item is one of: a data library, a library folder, a library dataset).
<p/>
<ul>
<li><strong>add library item</strong> - Role members can add library items to this library or folder</li>
@@ -42,12 +42,12 @@
<li><strong>manage library permissions</strong> - Role members can manage permissions applied to this library item</li>
</ul>
<p/>
- The default behavior is for no permissions to be applied to a library item, but applied permissions are inherited downward, so it is
- important to set desired permissions on a new library when it is created. When this is done, new folders and datasets added to the
- library will automatically inherit those permissions. In the same way, permissions can be applied to a folder, which will be
+ The default behavior is for no permissions to be applied to a data library item, but applied permissions are inherited downward, so it is
+ important to set desired permissions on a new data library when it is created. When this is done, new folders and datasets added to the
+ data library will automatically inherit those permissions. In the same way, permissions can be applied to a folder, which will be
automatically inherited by all contained datasets and sub-folders.
<p/>
- The "Libraries" menu item allows users to access the datasets in a library as long as they are not restricted from accessing them.
+ The "Data Libraries" menu item allows users to access the datasets in a data library as long as they are not restricted from accessing them.
Importing a library dataset into a history will not make a copy of the dataset, but will be a "pointer" to the dataset on disk. This
approach allows for multiple users to use a single (possibly very large) dataset file.
</li>
@@ -72,7 +72,7 @@
</ul>
</li>
</ul>
-<p><strong>Data Security and Dataset Libraries</strong></p>
+<p><strong>Data Security and Data Libraries</strong></p>
<p/>
<strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
@@ -121,8 +121,8 @@
<strong>access</strong> - users associated with the role can import this dataset into their history for analysis.
<p>
If no roles with the "access" permission are associated with a dataset, the dataset is "public" and may be accessed by
- anyone. Public library datasets will be accessible to all users (as well as anyone not logged in during a Galaxy session)
- from the list of libraries displayed when the "Libraries" menu item is selected.
+ anyone. Public data library datasets will be accessible to all users (as well as anyone not logged in during a Galaxy session)
+ from the list of data libraries displayed when the "Data Libraries" menu item is selected.
</p>
<p>
Associating a dataset with a role that includes the "access" permission restricts the set of users that can access it.
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/dataset_security/role.mako
--- a/templates/admin/dataset_security/role.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/dataset_security/role.mako Wed Aug 19 17:55:28 2009 -0400
@@ -84,7 +84,7 @@
<br clear="left"/>
<br/>
%if len( library_dataset_actions ) > 0:
- <h3>Library datasets associated with role '${role.name}'</h3>
+ <h3>Data library datasets associated with role '${role.name}'</h3>
<table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%">
<tr>
<td>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/index.mako
--- a/templates/admin/index.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/index.mako Wed Aug 19 17:55:28 2009 -0400
@@ -89,7 +89,7 @@
</div>
<div class="toolSectionBody">
<div class="toolSectionBg">
- <div class="toolTitle"><a href="${h.url_for( controller='admin', action='browse_libraries' )}" target="galaxy_main">Manage libraries</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='admin', action='browse_libraries' )}" target="galaxy_main">Manage data libraries</a></div>
</div>
</div>
<div class="toolSectionPad"></div>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/browse_libraries.mako
--- a/templates/admin/library/browse_libraries.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/browse_libraries.mako Wed Aug 19 17:55:28 2009 -0400
@@ -1,19 +1,19 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<%def name="title()">Browse Libraries</%def>
+<%def name="title()">Browse Data Libraries</%def>
<h2>
%if deleted:
Deleted
%endif
- Libraries
+ Data Libraries
</h2>
<ul class="manage-table-actions">
%if not deleted:
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='library', new=True )}"><span>Create a new library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library', new=True )}"><span>Create a new data library</span></a>
</li>
<li>
<a class="action-button" href="${h.url_for( controller='admin', action='deleted_libraries' )}"><span>Manage deleted libraries</span></a>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/browse_library.mako Wed Aug 19 17:55:28 2009 -0400
@@ -162,16 +162,16 @@
%if deleted:
Deleted
%endif
- Library '${library.name}'
+ Data Library “${library.name}”
</h2>
<ul class="manage-table-actions">
%if not deleted:
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library.root_folder.id )}"><span>Add datasets to this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library.id, folder_id=library.root_folder.id )}"><span>Add datasets to this data library</span></a>
</li>
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='folder', new=True, id=library.root_folder.id, library_id=library.id )}">Add a folder to this library</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='folder', new=True, id=library.root_folder.id, library_id=library.id )}">Add a folder to this data library</a>
</li>
%endif
</ul>
@@ -200,24 +200,24 @@
library_item_ids = {}
library_item_ids[ 'library' ] = library.id
%>
- <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this data library's information</a>
## Editing templates disabled until we determine optimal approach to re-linking library item to new version of form definition
##%if library.info_association:
## <% form_id = library.info_association[0].template.id %>
- ## <a class="action-button" href="${h.url_for( controller='forms', action='edit', form_id=form_id, show_form=True )}">Edit this library's information template</a>
+ ## <a class="action-button" href="${h.url_for( controller='forms', action='edit', form_id=form_id, show_form=True )}">Edit this data library's information template</a>
##%else:
%if not library.info_association:
- <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, add=True )}">Add an information template to this library</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, add=True )}">Add an information template to this data library</a>
%endif
- <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a>
- <a class="action-button" confirm="Click OK to delete the library named '${library.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Delete this library and its contents</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this data library's permissions</a>
+ <a class="action-button" confirm="Click OK to delete the library named '${library.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Delete this data library and its contents</a>
%if show_deleted:
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted data library items</a>
%else:
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted data library items</a>
%endif
%elif not library.purged:
- <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Undelete this library</a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Undelete this data library</a>
%endif
</div>
</th>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/folder_info.mako
--- a/templates/admin/library/folder_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/folder_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/folder_permissions.mako
--- a/templates/admin/library/folder_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/folder_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/ldda_edit_info.mako
--- a/templates/admin/library/ldda_edit_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/ldda_edit_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -12,7 +12,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/ldda_info.mako
--- a/templates/admin/library/ldda_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/ldda_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -20,7 +20,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id, deleted=library.deleted, show_deleted=show_deleted )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id, deleted=library.deleted, show_deleted=show_deleted )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/ldda_permissions.mako
--- a/templates/admin/library/ldda_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/ldda_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -15,7 +15,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/library_dataset_info.mako
--- a/templates/admin/library/library_dataset_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/library_dataset_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/library_dataset_permissions.mako
--- a/templates/admin/library/library_dataset_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/library_dataset_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/library_info.mako
--- a/templates/admin/library/library_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/library_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/library_permissions.mako
--- a/templates/admin/library/library_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/library_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/new_dataset.mako
--- a/templates/admin/library/new_dataset.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/new_dataset.mako Wed Aug 19 17:55:28 2009 -0400
@@ -4,7 +4,7 @@
<% import os, os.path %>
-<b>Create new library datasets</b>
+<b>Create new data library datasets</b>
<a id="upload-librarydataset--popup" class="popup-arrow" style="display: none;">▼</a>
<div popupmenu="upload-librarydataset--popup">
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder_id, replace_id=replace_id, upload_option='upload_file' )}">Upload files</a>
@@ -16,7 +16,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
@@ -142,7 +142,7 @@
<textarea name="message" rows="3" cols="35"></textarea>
</div>
<div class="toolParamHelp" style="clear: both;">
- This information will be displayed in the "Information" column for this dataset in the library browser
+ This information will be displayed in the "Information" column for this dataset in the data library browser
</div>
<div style="clear: both"></div>
</div>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/new_folder.mako
--- a/templates/admin/library/new_folder.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/new_folder.mako Wed Aug 19 17:55:28 2009 -0400
@@ -4,7 +4,7 @@
<br/<br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/new_library.mako
--- a/templates/admin/library/new_library.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/new_library.mako Wed Aug 19 17:55:28 2009 -0400
@@ -6,13 +6,13 @@
%endif
<div class="toolForm">
- <div class="toolFormTitle">Create a new library</div>
+ <div class="toolFormTitle">Create a new data library</div>
<div class="toolFormBody">
<form name="library" action="${h.url_for( controller='admin', action='library' )}" method="post" >
<div class="form-row">
<label>Name:</label>
<div style="float: left; width: 250px; margin-right: 10px;">
- <input type="text" name="name" value="New Library" size="40"/>
+ <input type="text" name="name" value="New data library" size="40"/>
</div>
<div style="clear: both"></div>
</div>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/library/select_info_template.mako
--- a/templates/admin/library/select_info_template.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/library/select_info_template.mako Wed Aug 19 17:55:28 2009 -0400
@@ -4,7 +4,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/admin/requests/show_request.mako
--- a/templates/admin/requests/show_request.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/admin/requests/show_request.mako Wed Aug 19 17:55:28 2009 -0400
@@ -48,7 +48,7 @@
%if not rd['value']:
<i>None</i>
%else:
- %if rd['label'] == 'Library':
+ %if rd['label'] == 'Data library':
<a href="${h.url_for( controller='admin', action='browse_library', id=request.library.id )}">${rd['value']}</a>
%else:
${rd['value']}
diff -r ea6708c96cd1 -r 3049432643f4 templates/base_panels.mako
--- a/templates/base_panels.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/base_panels.mako Wed Aug 19 17:55:28 2009 -0400
@@ -139,14 +139,14 @@
${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
- ${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
+ ${tab( "libraries", "Data Libraries", h.url_for( controller='library', action='index' ))}
%if trans.request_types():
<td class="tab">
<a>Lab</a>
<div class="submenu">
<ul>
- <li><a target="requests" href="${h.url_for( controller='requests', action='index' )}">Sequencing Requests</a></li>
+ <li><a href="${h.url_for( controller='requests', action='index' )}">Sequencing Requests</a></li>
</ul>
</div>
</td>
diff -r ea6708c96cd1 -r 3049432643f4 templates/history/grid.mako
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/templates/history/grid.mako Wed Aug 19 17:55:28 2009 -0400
@@ -0,0 +1,196 @@
+<%inherit file="/base.mako"/>
+<%def name="title()">${grid.title}</%def>
+
+%if message:
+ <p>
+ <div class="${message_type}message transient-message">${message}</div>
+ <div style="clear: both"></div>
+ </p>
+%endif
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ <script type="text/javascript">
+ ## TODO: generalize and move into galaxy.base.js
+ $(document).ready(function() {
+ $(".grid").each( function() {
+ var grid = this;
+ var checkboxes = $(this).find("input.grid-row-select-checkbox");
+ var update = $(this).find( "span.grid-selected-count" );
+ $(checkboxes).each( function() {
+ $(this).change( function() {
+ var n = $(checkboxes).filter("[checked]").size();
+ update.text( n );
+ });
+ })
+ });
+ });
+ ## Can this be moved into base.mako?
+ %if refresh_frames:
+ %if 'masthead' in refresh_frames:
+ ## Refresh masthead == user changes (backward compatibility)
+ if ( parent.user_changed ) {
+ %if trans.user:
+ parent.user_changed( "${trans.user.email}", ${int( app.config.is_admin_user( trans.user ) )} );
+ %else:
+ parent.user_changed( null, false );
+ %endif
+ }
+ %endif
+ %if 'history' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_history ) {
+ parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history')}";
+ if ( parent.force_right_panel ) {
+ parent.force_right_panel( 'show' );
+ }
+ }
+ %endif
+ %if 'tools' in refresh_frames:
+ if ( parent.frames && parent.frames.galaxy_tools ) {
+ parent.frames.galaxy_tools.location.href="${h.url_for( controller='root', action='tool_menu')}";
+ if ( parent.force_left_panel ) {
+ parent.force_left_panel( 'show' );
+ }
+ }
+ %endif
+ %endif
+ </script>
+</%def>
+
+<%def name="stylesheets()">
+ <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
+ <style>
+ ## Not generic to all grids -- move to base?
+ .count-box {
+ min-width: 1.1em;
+ padding: 5px;
+ border-width: 1px;
+ border-style: solid;
+ text-align: center;
+ display: inline-block;
+ }
+ </style>
+</%def>
+
+%if grid.standard_filters:
+ <div class="grid-header">
+ <h2>${grid.title}</h2>
+ <span class="title">Filter:</span>
+ %for i, filter in enumerate( grid.standard_filters ):
+ %if i > 0:
+ <span>|</span>
+ %endif
+ <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
+ %endfor
+ </div>
+%endif
+
+<form name="history_actions" action="${url()}" method="post" >
+ <table class="grid">
+ <thead>
+ <tr>
+ <th></th>
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ href = ""
+ extra = ""
+ if column.sortable:
+ if sort_key == column.key:
+ if sort_order == "asc":
+ href = url( sort=( "-" + column.key ) )
+ extra = "↓"
+ else:
+ href = url( sort=( column.key ) )
+ extra = "↑"
+ else:
+ href = url( sort=column.key )
+ %>
+ <th\
+ %if column.ncells > 1:
+ colspan="${column.ncells}"
+ %endif
+ >
+ %if href:
+ <a href="${href}">${column.label}</a>
+ %else:
+ ${column.label}
+ %endif
+ <span>${extra}</span>
+ </th>
+ %endif
+ %endfor
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
+ %for i, item in enumerate( query ):
+ <tr \
+ %if current_item == item:
+ class="current" \
+ %endif
+ >
+ ## Item selection column
+ <td style="width: 1.5em;">
+ <input type="checkbox" name="id" value=${trans.security.encode_id( item.id )} class="grid-row-select-checkbox" />
+ </td>
+ ## Data columns
+ %for column in grid.columns:
+ %if column.visible:
+ <%
+ # Link
+ link = column.get_link( trans, grid, item )
+ if link:
+ href = url( **link )
+ else:
+ href = None
+ # Value (coerced to list so we can loop)
+ value = column.get_value( trans, grid, item )
+ if column.ncells == 1:
+ value = [ value ]
+ %>
+ %for cellnum, v in enumerate( value ):
+ <%
+ # Attach popup menu?
+ if column.attach_popup and cellnum == 0:
+ extra = '<a id="grid-%d-popup" class="popup-arrow" style="display: none;">▼</a>' % i
+ else:
+ extra = ""
+ %>
+ %if href:
+ <td><a href="${href}">${v}</a> ${extra}</td>
+ %else:
+ <td >${v}${extra}</td>
+ %endif
+ </td>
+ %endfor
+ %endif
+ %endfor
+ ## Actions column
+ <td>
+ <div popupmenu="grid-${i}-popup">
+ %for operation in grid.operations:
+ %if operation.allowed( item ):
+ <a class="action-button" href="${url( operation=operation.label, id=item.id )}">${operation.label}</a>
+ %endif
+ %endfor
+ </div>
+ </td>
+ </tr>
+ %endfor
+ </tbody>
+ <tfoot>
+ <tr>
+ <td></td>
+ <td colspan="100">
+ For <span class="grid-selected-count"></span> selected histories:
+ %for operation in grid.operations:
+ %if operation.allow_multiple:
+ <input type="submit" name="operation" value="${operation.label}" class="action-button">
+ %endif
+ %endfor
+ </td>
+ </tr>
+ </tfoot>
+ </table>
+</form>
diff -r ea6708c96cd1 -r 3049432643f4 templates/history/shared_grid.mako
--- a/templates/history/shared_grid.mako Wed Aug 19 17:27:00 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,197 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-
-<%def name="title()">${grid.title}</%def>
-
-<%def name="javascripts()">
- ${parent.javascripts()}
- <script type="text/javascript">
- ## TODO: generalize and move into galaxy.base.js
- $(document).ready(function() {
- $(".grid").each( function() {
- var grid = this;
- var checkboxes = $(this).find("input.grid-row-select-checkbox");
- var update = $(this).find( "span.grid-selected-count" );
- $(checkboxes).each( function() {
- $(this).change( function() {
- var n = $(checkboxes).filter("[checked]").size();
- update.text( n );
- });
- })
- });
- });
- ## Can this be moved into base.mako?
- %if refresh_frames:
- %if 'masthead' in refresh_frames:
- ## Refresh masthead == user changes (backward compatibility)
- if ( parent.user_changed ) {
- %if trans.user:
- parent.user_changed( "${trans.user.email}", ${int( app.config.is_admin_user( trans.user ) )} );
- %else:
- parent.user_changed( null, false );
- %endif
- }
- %endif
- %if 'history' in refresh_frames:
- if ( parent.frames && parent.frames.galaxy_history ) {
- parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history')}";
- if ( parent.force_right_panel ) {
- parent.force_right_panel( 'show' );
- }
- }
- %endif
- %if 'tools' in refresh_frames:
- if ( parent.frames && parent.frames.galaxy_tools ) {
- parent.frames.galaxy_tools.location.href="${h.url_for( controller='root', action='tool_menu')}";
- if ( parent.force_left_panel ) {
- parent.force_left_panel( 'show' );
- }
- }
- %endif
- %endif
- </script>
-</%def>
-
-<%def name="stylesheets()">
- <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
- <style>
- ## Not generic to all grids -- move to base?
- .count-box {
- min-width: 1.1em;
- padding: 5px;
- border-width: 1px;
- border-style: solid;
- text-align: center;
- display: inline-block;
- }
- </style>
-</%def>
-
-%if grid.standard_filters:
- <div class="grid-header">
- <h2>${grid.title}</h2>
- <span class="title">Filter:</span>
- %for i, filter in enumerate( grid.standard_filters ):
- %if i > 0:
- <span>|</span>
- %endif
- <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
- %endfor
- </div>
-%endif
-
-%if message:
- <p>
- <div class="${message_type}message transient-message">${message}</div>
- <div style="clear: both"></div>
- </p>
-%endif
-%if msg:
- ${render_msg( msg, messagetype )}
-%endif
-
-<form name="history_shared_by_others" action="${url()}" method="post" >
- <table class="grid">
- <thead>
- <tr>
- <th></th>
- %for column in grid.columns:
- %if column.visible:
- <%
- href = ""
- extra = ""
- if column.sortable:
- if sort_key == column.key:
- if sort_order == "asc":
- href = url( sort=( "-" + column.key ) )
- extra = "↓"
- else:
- href = url( sort=( column.key ) )
- extra = "↑"
- else:
- href = url( sort=column.key )
- %>
- <th\
- %if column.ncells > 1:
- colspan="${column.ncells}"
- %endif
- >
- %if href:
- <a href="${href}">${column.label}</a>
- %else:
- ${column.label}
- %endif
- <span>${extra}</span>
- </th>
- %endif
- %endfor
- <th></th>
- </tr>
- </thead>
- <tbody>
- %for i, history in enumerate( query ):
- <tr>
- ## Item selection column
- <td style="width: 1.5em;">
- <input type="checkbox" name="id" value=${trans.security.encode_id( history.id )} class="grid-row-select-checkbox" />
- </td>
- ## Data columns
- %for column in grid.columns:
- %if column.visible:
- <%
- # Link
- link = column.get_link( trans, grid, history )
- if link:
- href = url( **link )
- else:
- href = None
- # Value (coerced to list so we can loop)
- value = column.get_value( trans, grid, history )
- if column.ncells == 1:
- value = [ value ]
- %>
- %for cellnum, v in enumerate( value ):
- <%
- # Attach popup menu?
- if column.attach_popup and cellnum == 0:
- extra = '<a id="grid-%d-popup" class="popup-arrow" style="display: none;">▼</a>' % i
- else:
- extra = ""
- %>
- %if href:
- <td><a href="${href}">${v}</a> ${extra}</td>
- %else:
- <td >${v}${extra}</td>
- %endif
- </td>
- %endfor
- %endif
- %endfor
- ## Actions column
- <td>
- <div popupmenu="grid-${i}-popup">
- %for operation in grid.operations:
- %if operation.allowed( history ):
- <a class="action-button" href="${url( operation=operation.label, id=history.id )}">${operation.label}</a>
- %endif
- %endfor
- </div>
- </td>
- </tr>
- %endfor
- </tbody>
- <tfoot>
- <tr>
- <td></td>
- <td colspan="100">
- For <span class="grid-selected-count"></span> selected histories:
- %for operation in grid.operations:
- %if operation.allow_multiple:
- <input type="submit" name="operation" value="${operation.label}" class="action-button">
- %endif
- %endfor
- </td>
- </tr>
- </tfoot>
- </table>
-</form>
diff -r ea6708c96cd1 -r 3049432643f4 templates/history/stored_grid.mako
--- a/templates/history/stored_grid.mako Wed Aug 19 17:27:00 2009 -0400
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,196 +0,0 @@
-<%inherit file="/base.mako"/>
-<%def name="title()">${grid.title}</%def>
-
-%if message:
- <p>
- <div class="${message_type}message transient-message">${message}</div>
- <div style="clear: both"></div>
- </p>
-%endif
-
-<%def name="javascripts()">
- ${parent.javascripts()}
- <script type="text/javascript">
- ## TODO: generalize and move into galaxy.base.js
- $(document).ready(function() {
- $(".grid").each( function() {
- var grid = this;
- var checkboxes = $(this).find("input.grid-row-select-checkbox");
- var update = $(this).find( "span.grid-selected-count" );
- $(checkboxes).each( function() {
- $(this).change( function() {
- var n = $(checkboxes).filter("[checked]").size();
- update.text( n );
- });
- })
- });
- });
- ## Can this be moved into base.mako?
- %if refresh_frames:
- %if 'masthead' in refresh_frames:
- ## Refresh masthead == user changes (backward compatibility)
- if ( parent.user_changed ) {
- %if trans.user:
- parent.user_changed( "${trans.user.email}", ${int( app.config.is_admin_user( trans.user ) )} );
- %else:
- parent.user_changed( null, false );
- %endif
- }
- %endif
- %if 'history' in refresh_frames:
- if ( parent.frames && parent.frames.galaxy_history ) {
- parent.frames.galaxy_history.location.href="${h.url_for( controller='root', action='history')}";
- if ( parent.force_right_panel ) {
- parent.force_right_panel( 'show' );
- }
- }
- %endif
- %if 'tools' in refresh_frames:
- if ( parent.frames && parent.frames.galaxy_tools ) {
- parent.frames.galaxy_tools.location.href="${h.url_for( controller='root', action='tool_menu')}";
- if ( parent.force_left_panel ) {
- parent.force_left_panel( 'show' );
- }
- }
- %endif
- %endif
- </script>
-</%def>
-
-<%def name="stylesheets()">
- <link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
- <style>
- ## Not generic to all grids -- move to base?
- .count-box {
- min-width: 1.1em;
- padding: 5px;
- border-width: 1px;
- border-style: solid;
- text-align: center;
- display: inline-block;
- }
- </style>
-</%def>
-
-%if grid.standard_filters:
- <div class="grid-header">
- <h2>${grid.title}</h2>
- <span class="title">Filter:</span>
- %for i, filter in enumerate( grid.standard_filters ):
- %if i > 0:
- <span>|</span>
- %endif
- <span class="filter"><a href="${url( filter.get_url_args() )}">${filter.label}</a></span>
- %endfor
- </div>
-%endif
-
-<form name="history_actions" action="${url()}" method="post" >
- <table class="grid">
- <thead>
- <tr>
- <th></th>
- %for column in grid.columns:
- %if column.visible:
- <%
- href = ""
- extra = ""
- if column.sortable:
- if sort_key == column.key:
- if sort_order == "asc":
- href = url( sort=( "-" + column.key ) )
- extra = "↓"
- else:
- href = url( sort=( column.key ) )
- extra = "↑"
- else:
- href = url( sort=column.key )
- %>
- <th\
- %if column.ncells > 1:
- colspan="${column.ncells}"
- %endif
- >
- %if href:
- <a href="${href}">${column.label}</a>
- %else:
- ${column.label}
- %endif
- <span>${extra}</span>
- </th>
- %endif
- %endfor
- <th></th>
- </tr>
- </thead>
- <tbody>
- %for i, item in enumerate( query ):
- <tr \
- %if current_item == item:
- class="current" \
- %endif
- >
- ## Item selection column
- <td style="width: 1.5em;">
- <input type="checkbox" name="id" value=${trans.security.encode_id( item.id )} class="grid-row-select-checkbox" />
- </td>
- ## Data columns
- %for column in grid.columns:
- %if column.visible:
- <%
- # Link
- link = column.get_link( trans, grid, item )
- if link:
- href = url( **link )
- else:
- href = None
- # Value (coerced to list so we can loop)
- value = column.get_value( trans, grid, item )
- if column.ncells == 1:
- value = [ value ]
- %>
- %for cellnum, v in enumerate( value ):
- <%
- # Attach popup menu?
- if column.attach_popup and cellnum == 0:
- extra = '<a id="grid-%d-popup" class="popup-arrow" style="display: none;">▼</a>' % i
- else:
- extra = ""
- %>
- %if href:
- <td><a href="${href}">${v}</a> ${extra}</td>
- %else:
- <td >${v}${extra}</td>
- %endif
- </td>
- %endfor
- %endif
- %endfor
- ## Actions column
- <td>
- <div popupmenu="grid-${i}-popup">
- %for operation in grid.operations:
- %if operation.allowed( item ):
- <a class="action-button" href="${url( operation=operation.label, id=item.id )}">${operation.label}</a>
- %endif
- %endfor
- </div>
- </td>
- </tr>
- %endfor
- </tbody>
- <tfoot>
- <tr>
- <td></td>
- <td colspan="100">
- For <span class="grid-selected-count"></span> selected histories:
- %for operation in grid.operations:
- %if operation.allow_multiple:
- <input type="submit" name="operation" value="${operation.label}" class="action-button">
- %endif
- %endfor
- </td>
- </tr>
- </tfoot>
- </table>
-</form>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/browse_libraries.mako
--- a/templates/library/browse_libraries.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/browse_libraries.mako Wed Aug 19 17:55:28 2009 -0400
@@ -1,9 +1,9 @@
<%inherit file="/base.mako"/>
<%namespace file="/message.mako" import="render_msg" />
-<%def name="title()">Browse Libraries</%def>
+<%def name="title()">Browse Data Libraries</%def>
-<h2>Libraries</h2>
+<h2>Data Libraries</h2>
%if msg:
${render_msg( msg, messagetype )}
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/browse_library.mako
--- a/templates/library/browse_library.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/browse_library.mako Wed Aug 19 17:55:28 2009 -0400
@@ -2,7 +2,7 @@
<%namespace file="/message.mako" import="render_msg" />
<% from galaxy import util %>
-<%def name="title()">Browse Library</%def>
+<%def name="title()">Browse data library</%def>
<%def name="stylesheets()">
<link href="${h.url_for('/static/style/base.css')}" rel="stylesheet" type="text/css" />
<link href="${h.url_for('/static/style/library.css')}" rel="stylesheet" type="text/css" />
@@ -38,14 +38,21 @@
descendents = descendents.add( child_descendents );
});
// Set up expand / hide link
+ // HACK: assume descendents are invisible. The caller actually
+ // ensures this for the root node. However, if we start
+ // remembering folder states, we'll need something
+ // more sophisticated here.
+ var visible = false;
$(q).find( "span.expandLink").click( function() {
- if ( children.is( ":visible" ) ) {
+ if ( visible ) {
descendents.hide();
descendents.removeClass( "expanded" );
q.removeClass( "expanded" );
+ visible = false;
} else {
children.show();
q.addClass( "expanded" );
+ visible = true;
}
});
// Check/uncheck boxes in subfolders.
@@ -211,7 +218,7 @@
%endfor
</%def>
-<h2>Library “${library.name}”</h2>
+<h2>Data Library “${library.name}”</h2>
<ul class="manage-table-actions">
%if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/folder_info.mako
--- a/templates/library/folder_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/folder_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/folder_permissions.mako
--- a/templates/library/folder_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/folder_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/ldda_edit_info.mako
--- a/templates/library/ldda_edit_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/ldda_edit_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -12,7 +12,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/ldda_info.mako
--- a/templates/library/ldda_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/ldda_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -19,7 +19,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/ldda_permissions.mako
--- a/templates/library/ldda_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/ldda_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -15,7 +15,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/library_dataset_info.mako
--- a/templates/library/library_dataset_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/library_dataset_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/library_dataset_permissions.mako
--- a/templates/library/library_dataset_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/library_dataset_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -11,7 +11,7 @@
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/library_info.mako
--- a/templates/library/library_info.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/library_info.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library.id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library.id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/library_permissions.mako
--- a/templates/library/library_permissions.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/library_permissions.mako Wed Aug 19 17:55:28 2009 -0400
@@ -5,7 +5,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library.id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library.id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/new_dataset.mako
--- a/templates/library/new_dataset.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/new_dataset.mako Wed Aug 19 17:55:28 2009 -0400
@@ -16,7 +16,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
@@ -68,7 +68,7 @@
%elif upload_option == 'upload_directory':
<div class="form-row">
<%
- # Directories of files from the Libraries view are restricted to a
+ # Directories of files from the Data Libraries view are restricted to a
# sub-directory named the same as the current user's email address
# contained within the configured setting for user_library_import_dir
user_library_import_dir = os.path.join( trans.app.config.user_library_import_dir, trans.user.email )
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/new_folder.mako
--- a/templates/library/new_folder.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/new_folder.mako Wed Aug 19 17:55:28 2009 -0400
@@ -4,7 +4,7 @@
<br/<br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/library/select_info_template.mako
--- a/templates/library/select_info_template.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/library/select_info_template.mako Wed Aug 19 17:55:28 2009 -0400
@@ -4,7 +4,7 @@
<br/><br/>
<ul class="manage-table-actions">
<li>
- <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this library</span></a>
+ <a class="action-button" href="${h.url_for( controller='library', action='browse_library', id=library_id )}"><span>Browse this data library</span></a>
</li>
</ul>
diff -r ea6708c96cd1 -r 3049432643f4 templates/requests/edit_request.mako
--- a/templates/requests/edit_request.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/requests/edit_request.mako Wed Aug 19 17:55:28 2009 -0400
@@ -58,7 +58,7 @@
<div class="form-row">
<label>${field['label']}</label>
${field['widget'].get_html()}
- %if field['label'] == 'Library' and new_library:
+ %if field['label'] == 'Data library' and new_library:
${new_library.get_html()}
%endif
<div class="toolParamHelp" style="clear: both;">
diff -r ea6708c96cd1 -r 3049432643f4 templates/requests/new_request.mako
--- a/templates/requests/new_request.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/requests/new_request.mako Wed Aug 19 17:55:28 2009 -0400
@@ -58,7 +58,7 @@
<div class="form-row">
<label>${field['label']}</label>
${field['widget'].get_html()}
- %if field['label'] == 'Library' and new_library:
+ %if field['label'] == 'Data library' and new_library:
${new_library.get_html()}
%endif
<div class="toolParamHelp" style="clear: both;">
diff -r ea6708c96cd1 -r 3049432643f4 templates/requests/show_request.mako
--- a/templates/requests/show_request.mako Wed Aug 19 17:27:00 2009 -0400
+++ b/templates/requests/show_request.mako Wed Aug 19 17:55:28 2009 -0400
@@ -79,7 +79,7 @@
%if not rd['value']:
<i>None</i>
%else:
- %if rd['label'] == 'Library':
+ %if rd['label'] == 'Data library':
%if rd['value']:
<a href="${h.url_for( controller='library', action='browse_library', id=request.library.id )}">${rd['value']}</a>
%else:
diff -r ea6708c96cd1 -r 3049432643f4 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Wed Aug 19 17:27:00 2009 -0400
+++ b/test/base/twilltestcase.py Wed Aug 19 17:55:28 2009 -0400
@@ -1003,7 +1003,7 @@
"""Create a new library"""
self.home()
self.visit_url( "%s/admin/library?new=True" % self.url )
- self.check_page_for_string( 'Create a new library' )
+ self.check_page_for_string( 'Create a new data library' )
tc.fv( "1", "1", name ) # form field 1 is the field named name...
tc.fv( "1", "2", description ) # form field 1 is the field named name...
tc.submit( "create_library_button" )
diff -r ea6708c96cd1 -r 3049432643f4 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Wed Aug 19 17:27:00 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Wed Aug 19 17:55:28 2009 -0400
@@ -1451,7 +1451,7 @@
check_edit_page2( latest_3_lddas )
self.home()
def test_195_upload_directory_of_files_from_libraries_view( self ):
- """Testing uploading a directory of files to a root folder from the Libraries view"""
+ """Testing uploading a directory of files to a root folder from the Data Libraries view"""
# admin_user will not have the option sto upload a directory of files from the
# Libraries view since a sub-directory named the same as their email is not contained
# in the configured user_library_import_dir. However, since members of role_one have
diff -r ea6708c96cd1 -r 3049432643f4 tools/data_source/genbank.py
--- a/tools/data_source/genbank.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/data_source/genbank.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,6 +1,6 @@
#!/usr/bin/env python
from Bio import GenBank
-import sys, os, sets, textwrap
+import sys, os, textwrap
assert sys.version_info[:2] >= ( 2, 4 )
diff -r ea6708c96cd1 -r 3049432643f4 tools/data_source/ucsc_proxy.py
--- a/tools/data_source/ucsc_proxy.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/data_source/ucsc_proxy.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,6 +1,6 @@
#!/usr/bin/env python
import urllib
-import sys, os, sets
+import sys, os
assert sys.version_info[:2] >= ( 2, 4 )
diff -r ea6708c96cd1 -r 3049432643f4 tools/new_operations/get_flanks.py
--- a/tools/new_operations/get_flanks.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/new_operations/get_flanks.py Wed Aug 19 17:55:28 2009 -0400
@@ -9,7 +9,7 @@
-o, --off=N: Offset
"""
-import sys, sets, re, os
+import sys, re, os
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
from bx.cookbook import doc_optparse
diff -r ea6708c96cd1 -r 3049432643f4 tools/new_operations/operation_filter.py
--- a/tools/new_operations/operation_filter.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/new_operations/operation_filter.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,8 +1,13 @@
# runs after the job (and after the default post-filter)
-import sets, os
+import os
from galaxy import eggs
from galaxy import jobs
from galaxy.tools.parameters import DataToolParameter
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
#def exec_before_process(app, inp_data, out_data, param_dict, tool=None):
# """Sets the name of the data"""
@@ -11,8 +16,8 @@
# raise Exception, '<p><font color="yellow">Both Queries must be from the same genome build</font></p>'
def validate_input( trans, error_map, param_values, page_param_map ):
- dbkeys = sets.Set()
- data_param_names = sets.Set()
+ dbkeys = set()
+ data_param_names = set()
data_params = 0
for name, param in page_param_map.iteritems():
if isinstance( param, DataToolParameter ):
diff -r ea6708c96cd1 -r 3049432643f4 tools/new_operations/subtract_query.py
--- a/tools/new_operations/subtract_query.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/new_operations/subtract_query.py Wed Aug 19 17:55:28 2009 -0400
@@ -5,12 +5,16 @@
Subtract an entire query from another query
usage: %prog in_file_1 in_file_2 begin_col end_col output
"""
-
-import sys, sets, re
-
+import sys, re
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
from bx.cookbook import doc_optparse
+
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
assert sys.version_info[:2] >= ( 2, 4 )
diff -r ea6708c96cd1 -r 3049432643f4 tools/regVariation/windowSplitter.py
--- a/tools/regVariation/windowSplitter.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/regVariation/windowSplitter.py Wed Aug 19 17:55:28 2009 -0400
@@ -7,7 +7,7 @@
-l, --cols=N,N,N,N: Columns for chrom, start, end, strand in file
"""
-import sys, sets, re, os
+import sys, re, os
from galaxy import eggs
import pkg_resources; pkg_resources.require( "bx-python" )
diff -r ea6708c96cd1 -r 3049432643f4 tools/stats/column_maker.py
--- a/tools/stats/column_maker.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/stats/column_maker.py Wed Aug 19 17:55:28 2009 -0400
@@ -2,7 +2,7 @@
# This tool takes a tab-delimited textfile as input and creates another column in the file which is the result of
# a computation performed on every row in the original file. The tool will skip over invalid lines within the file,
# informing the user about the number of lines skipped.
-import sys, sets, re, os.path
+import sys, re, os.path
from galaxy import eggs
from galaxy.tools import validation
from galaxy.datatypes import metadata
diff -r ea6708c96cd1 -r 3049432643f4 tools/stats/filtering.py
--- a/tools/stats/filtering.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/stats/filtering.py Wed Aug 19 17:55:28 2009 -0400
@@ -2,8 +2,13 @@
# This tool takes a tab-delimited text file as input and creates filters on columns based on certain properties.
# The tool will skip over invalid lines within the file, informing the user about the number of lines skipped.
-import sys, sets, re, os.path
+import sys, re, os.path
from galaxy import eggs
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
assert sys.version_info[:2] >= ( 2, 4 )
@@ -13,7 +18,7 @@
for item in items_to_strip:
if filter_condition.find( item ) >= 0:
filter_condition = filter_condition.replace( item, ' ' )
- operands = sets.Set( filter_condition.split( ' ' ) )
+ operands = set( filter_condition.split( ' ' ) )
return operands
def stop_err( msg ):
diff -r ea6708c96cd1 -r 3049432643f4 tools/stats/grouping.py
--- a/tools/stats/grouping.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/stats/grouping.py Wed Aug 19 17:55:28 2009 -0400
@@ -3,7 +3,7 @@
"""
This tool provides the SQL "group by" functionality.
"""
-import sys, string, re, commands, tempfile, random, sets
+import sys, string, re, commands, tempfile, random
from rpy import *
def stop_err(msg):
diff -r ea6708c96cd1 -r 3049432643f4 tools/stats/gsummary.py
--- a/tools/stats/gsummary.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/stats/gsummary.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,7 +1,12 @@
#!/usr/bin/python
-import sys, sets, re, tempfile
+import sys, re, tempfile
from rpy import *
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
assert sys.version_info[:2] >= ( 2, 4 )
@@ -33,7 +38,7 @@
for word in re.compile( '[a-zA-Z]+' ).findall( expression ):
if word and not word in math_allowed:
stop_err( "Invalid expression '%s': term '%s' is not recognized or allowed" %( expression, word ) )
- symbols = sets.Set()
+ symbols = set()
for symbol in re.compile( '[^a-z0-9\s]+' ).findall( expression ):
if symbol and not symbol in ops_allowed:
stop_err( "Invalid expression '%s': operator '%s' is not recognized or allowed" % ( expression, symbol ) )
diff -r ea6708c96cd1 -r 3049432643f4 tools/visualization/build_ucsc_custom_track_code.py
--- a/tools/visualization/build_ucsc_custom_track_code.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/visualization/build_ucsc_custom_track_code.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,6 +1,10 @@
# runs after the job (and after the default post-filter)
-from sets import Set as set
+# Older py compatibility
+try:
+ set()
+except:
+ from sets import Set as set
def validate_input( trans, error_map, param_values, page_param_map ):
dbkeys = set()
diff -r ea6708c96cd1 -r 3049432643f4 tools/visualization/genetrack_code.py
--- a/tools/visualization/genetrack_code.py Wed Aug 19 17:27:00 2009 -0400
+++ b/tools/visualization/genetrack_code.py Wed Aug 19 17:55:28 2009 -0400
@@ -1,4 +1,4 @@
-import sets, os
+import os
from galaxy import eggs
from galaxy import jobs
from galaxy.tools.parameters import DataToolParameter
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/ab5f39f838c0
changeset: 2597:ab5f39f838c0
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Fri Aug 21 17:14:35 2009 -0400
description:
Better naming for collected datasets in JobToOutputDatasetAssociation
1 file(s) affected in this change:
lib/galaxy/tools/__init__.py
diffs (21 lines):
diff -r b77721ef035d -r ab5f39f838c0 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Fri Aug 21 17:03:42 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Fri Aug 21 17:14:35 2009 -0400
@@ -1543,7 +1543,7 @@
job = assoc.job
break
if job:
- assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s__' % designation, child_dataset )
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_child_file_%s|%s__' % ( name, designation ), child_dataset )
assoc.job = job
assoc.flush()
child_dataset.state = outdata.state
@@ -1594,7 +1594,7 @@
job = assoc.job
break
if job:
- assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s__' % designation, primary_data )
+ assoc = self.app.model.JobToOutputDatasetAssociation( '__new_primary_file_%s|%s__' % ( name, designation ), primary_data )
assoc.job = job
assoc.flush()
primary_data.state = outdata.state
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/2c1916d89194
changeset: 2592:2c1916d89194
user: guru
date: Fri Aug 21 11:40:51 2009 -0400
description:
Change dbkey of unmapped liftover dataset to the input dataset's dbkey.
1 file(s) affected in this change:
tools/extract/liftOver_wrapper_code.py
diffs (11 lines):
diff -r 0fb05cc2b05f -r 2c1916d89194 tools/extract/liftOver_wrapper_code.py
--- a/tools/extract/liftOver_wrapper_code.py Fri Aug 21 09:56:35 2009 -0400
+++ b/tools/extract/liftOver_wrapper_code.py Fri Aug 21 11:40:51 2009 -0400
@@ -3,7 +3,6 @@
to_dbkey = param_dict['to_dbkey'].split('.')[0].split('To')[1]
to_dbkey = to_dbkey[0].lower()+to_dbkey[1:]
out_data['out_file1'].set_dbkey(to_dbkey)
- out_data['out_file2'].set_dbkey(to_dbkey)
out_data['out_file1'].name = out_data['out_file1'].name + " [ MAPPED COORDINATES ]"
out_data['out_file2'].name = out_data['out_file2'].name + " [ UNMAPPED COORDINATES ]"
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/108533bf35b8
changeset: 2595:108533bf35b8
user: rc
date: Fri Aug 21 15:18:17 2009 -0400
description:
Merge with 2c1916d89194f6802102cab13e67bebcf68d1d75
0 file(s) affected in this change:
diffs (131 lines):
diff -r 55b0c25aa164 -r 108533bf35b8 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Fri Aug 21 15:17:26 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Fri Aug 21 15:18:17 2009 -0400
@@ -20,12 +20,10 @@
for upload_dataset in incoming['files']:
f = upload_dataset['file_data']
if isinstance( f, FieldStorage ):
- # very small files can be StringIOs
- if 'name' in dir( f.file ) and f.file.name != '<fdopen>':
- local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
- f.file.close()
- else:
- local_filename = datatypes.sniff.stream_to_file( f.file, prefix="strio_upload_file_" )[0]
+ assert not isinstance( f.file, StringIO.StringIO )
+ assert f.file.name != '<fdopen>'
+ local_filename = util.mkstemp_ln( f.file.name, 'upload_file_data_' )
+ f.file.close()
upload_dataset['file_data'] = dict( filename = f.filename,
local_filename = local_filename )
if upload_dataset['url_paste'].strip() != '':
diff -r 55b0c25aa164 -r 108533bf35b8 lib/galaxy/web/framework/base.py
--- a/lib/galaxy/web/framework/base.py Fri Aug 21 15:17:26 2009 -0400
+++ b/lib/galaxy/web/framework/base.py Fri Aug 21 15:18:17 2009 -0400
@@ -216,11 +216,18 @@
# tempfiles. Necessary for externalizing the upload tool. It's a little hacky
# but for performance reasons it's way better to use Paste's tempfile than to
# create a new one and copy.
-import cgi
+import cgi, tempfile
class FieldStorage( cgi.FieldStorage ):
def make_file(self, binary=None):
- import tempfile
return tempfile.NamedTemporaryFile()
+ def read_lines(self):
+ # Always make a new file
+ self.file = self.make_file()
+ self.__file = None
+ if self.outerboundary:
+ self.read_lines_to_outerboundary()
+ else:
+ self.read_lines_to_eof()
cgi.FieldStorage = FieldStorage
class Request( webob.Request ):
diff -r 55b0c25aa164 -r 108533bf35b8 static/welcome.html
--- a/static/welcome.html Fri Aug 21 15:17:26 2009 -0400
+++ b/static/welcome.html Fri Aug 21 15:18:17 2009 -0400
@@ -71,7 +71,7 @@
<table border="0" cellpadding="0" cellspacing="0" width="100%">
<tr>
<td>
- <a href="javascript:parent.show_in_overlay({url:'http://screencast.g2.bx.psu.edu/galaxy/quickie1_TabSeq/flow.html',width:640,height:500,scroll:'no'})">
+ <a href="javascript:parent.show_in_overlay({url:'http://screencast.g2.bx.psu.edu/galaxy/quickie1_TabSeq/quickie1_TabSeq.flv',width:640,height:500,scroll:'no'})">
<div class="quickie">
<img src="images/qk/quickie1_small.png" border="0">
</div>
diff -r 55b0c25aa164 -r 108533bf35b8 tools/extract/liftOver_wrapper_code.py
--- a/tools/extract/liftOver_wrapper_code.py Fri Aug 21 15:17:26 2009 -0400
+++ b/tools/extract/liftOver_wrapper_code.py Fri Aug 21 15:18:17 2009 -0400
@@ -3,7 +3,6 @@
to_dbkey = param_dict['to_dbkey'].split('.')[0].split('To')[1]
to_dbkey = to_dbkey[0].lower()+to_dbkey[1:]
out_data['out_file1'].set_dbkey(to_dbkey)
- out_data['out_file2'].set_dbkey(to_dbkey)
out_data['out_file1'].name = out_data['out_file1'].name + " [ MAPPED COORDINATES ]"
out_data['out_file2'].name = out_data['out_file2'].name + " [ UNMAPPED COORDINATES ]"
diff -r 55b0c25aa164 -r 108533bf35b8 tools/metag_tools/split_paired_reads.py
--- a/tools/metag_tools/split_paired_reads.py Fri Aug 21 15:17:26 2009 -0400
+++ b/tools/metag_tools/split_paired_reads.py Fri Aug 21 15:18:17 2009 -0400
@@ -1,7 +1,7 @@
#! /usr/bin/python
"""
-Split Solexa paired end reads
+Split fixed length paired end reads
"""
import os, sys
@@ -12,9 +12,13 @@
outfile_end1 = open(sys.argv[2], 'w')
outfile_end2 = open(sys.argv[3], 'w')
- for i, line in enumerate(file(infile)):
+ i = 0
+
+ for line in file( infile ):
line = line.rstrip()
- if not line or line.startswith('#'): continue
+
+ if not line:
+ continue
end1 = ''
end2 = ''
@@ -42,5 +46,9 @@
outfile_end1.write('%s\n' %(end1))
outfile_end2.write('%s\n' %(end2))
+ i += 1
+
+ if i % 4 != 0 :
+ sys.stderr.write("WARNING: Number of lines in the input file was not divisible by 4.\nCheck consistency of the input fastq file.\n")
outfile_end1.close()
outfile_end2.close()
\ No newline at end of file
diff -r 55b0c25aa164 -r 108533bf35b8 tools/solid_tools/maq_cs_wrapper.py
--- a/tools/solid_tools/maq_cs_wrapper.py Fri Aug 21 15:17:26 2009 -0400
+++ b/tools/solid_tools/maq_cs_wrapper.py Fri Aug 21 15:18:17 2009 -0400
@@ -48,9 +48,9 @@
cmd1 = "solid2fastq_modified.pl 'yes' %s %s %s %s %s %s %s 2>&1" %(tmpf.name,tmpr.name,tmps.name,f3_read_fname,f3_qual_fname,r3_read_fname,r3_qual_fname)
try:
os.system(cmd1)
- os.system('zcat -f %s >> %s' %(tmpf.name,tmpffastq.name))
- os.system('zcat -f %s >> %s' %(tmpr.name,tmprfastq.name))
- os.system('zcat -f %s >> %s' %(tmps.name,tmpsfastq.name))
+ os.system('gunzip -c %s >> %s' %(tmpf.name,tmpffastq.name))
+ os.system('gunzip -c %s >> %s' %(tmpr.name,tmprfastq.name))
+ os.system('gunzip -c %s >> %s' %(tmps.name,tmpsfastq.name))
except Exception, eq:
stop_err("Error converting data to fastq format." + str(eq))
@@ -135,7 +135,7 @@
cmd1 = "solid2fastq_modified.pl 'no' %s %s %s %s %s %s %s 2>&1" %(tmpf.name,None,None,f3_read_fname,f3_qual_fname,None,None)
try:
os.system(cmd1)
- os.system('zcat -f %s >> %s' %(tmpf.name,tmpfastq.name))
+ os.system('gunzip -c %s >> %s' %(tmpf.name,tmpfastq.name))
tmpf.close()
except:
stop_err("Error converting data to fastq format.")
1
0

24 Aug '09
details: http://www.bx.psu.edu/hg/galaxy/rev/fc4c0c7e5da9
changeset: 2593:fc4c0c7e5da9
user: rc
date: Thu Aug 20 16:54:44 2009 -0400
description:
Fixed a bug in rendering form wrt AddressField
Fixed other bugs admin's request UI
6 file(s) affected in this change:
lib/galaxy/web/controllers/forms.py
lib/galaxy/web/controllers/requests_admin.py
lib/galaxy/web/framework/__init__.py
templates/admin/requests/grid.mako
test/base/twilltestcase.py
test/functional/test_forms_and_requests.py
diffs (290 lines):
diff -r a7f9325bb319 -r fc4c0c7e5da9 lib/galaxy/web/controllers/forms.py
--- a/lib/galaxy/web/controllers/forms.py Thu Aug 20 11:43:28 2009 -0400
+++ b/lib/galaxy/web/controllers/forms.py Thu Aug 20 16:54:44 2009 -0400
@@ -421,12 +421,14 @@
return [ fdc.latest_form for fdc in fdc_list ]
-def get_form_widgets( trans, form, contents=[], **kwd ):
+def get_form_widgets( trans, form, contents=[], user=None, **kwd ):
'''
Return the list of widgets that comprise a form definition,
including field contents if any.
'''
params = util.Params( kwd )
+ if not user:
+ user = trans.user
widgets = []
for index, field in enumerate( form.fields ):
field_name = 'field_%i' % index
@@ -458,7 +460,7 @@
field_widget.set_size( 3, 40 )
field_widget.value = value
elif field['type'] == 'AddressField':
- field_widget.user = trans.user
+ field_widget.user = user
field_widget.value = value
field_widget.params = params
elif field[ 'type' ] == 'SelectField':
diff -r a7f9325bb319 -r fc4c0c7e5da9 lib/galaxy/web/controllers/requests_admin.py
--- a/lib/galaxy/web/controllers/requests_admin.py Thu Aug 20 11:43:28 2009 -0400
+++ b/lib/galaxy/web/controllers/requests_admin.py Thu Aug 20 16:54:44 2009 -0400
@@ -177,10 +177,9 @@
widget=TextField('desc', 40, desc),
helptext='(Optional)'))
# libraries selectbox
- libraries = get_authorized_libs(trans, trans.user)
- libui = self.__library_ui(libraries, request, **kwd)
+ libui = self.__library_ui(trans, trans.user, request, **kwd)
widgets = widgets + libui
- widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, **kwd)
+ widgets = widgets + get_form_widgets(trans, request.type.request_form, request.values.content, request.user, **kwd)
return trans.fill_template( '/admin/requests/edit_request.mako',
select_request_type=select_request_type,
request_type=request.type,
@@ -673,13 +672,9 @@
util.restore_text( params.get( 'desc', '' ) )),
helptext='(Optional)'))
# libraries selectbox
- if not user:
- libraries = []
- else:
- libraries = get_authorized_libs(trans, user)
- libui = self.__library_ui(libraries, **kwd)
+ libui = self.__library_ui(trans, user, **kwd)
widgets = widgets + libui
- widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], **kwd)
+ widgets = widgets + get_form_widgets(trans, request_type.request_form, contents=[], user=user, **kwd)
return trans.fill_template( '/admin/requests/new_request.mako',
select_request_type=select_request_type,
request_type=request_type,
@@ -706,9 +701,13 @@
select_user.add_option(user.email, user.id)
return select_user
- def __library_ui(self, libraries, request=None, **kwd):
+ def __library_ui(self, trans, user, request=None, **kwd):
params = util.Params( kwd )
lib_id = params.get( 'library_id', 'none' )
+ if not user:
+ libraries = trans.app.model.Library.filter(trans.app.model.Library.table.c.deleted == False).order_by(trans.app.model.Library.name).all()
+ else:
+ libraries = get_authorized_libs(trans, user)
lib_list = SelectField('library_id', refresh_on_change=True,
refresh_on_change_values=['new'])
if request and lib_id == 'none':
@@ -724,13 +723,12 @@
else:
lib_list.add_option(lib.name, lib.id)
if lib_id == 'new':
- lib_list.add_option('Create a new library', 'new', selected=True)
+ lib_list.add_option('Create a new data library', 'new', selected=True)
else:
- lib_list.add_option('Create a new library', 'new')
- widget = dict(label='Library',
+ lib_list.add_option('Create a new data library', 'new')
+ widget = dict(label='Data library',
widget=lib_list,
- helptext='Associated library where the resultant \
- dataset will be stored.')
+ helptext='Data library where the resultant dataset will be stored.')
if lib_id == 'new':
new_lib = dict(label='Create a new Library',
widget=TextField('new_library_name', 40,
diff -r a7f9325bb319 -r fc4c0c7e5da9 lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Thu Aug 20 11:43:28 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Thu Aug 20 16:54:44 2009 -0400
@@ -641,7 +641,6 @@
self.name = name
self.action = action
self.submit_text = submit_text
- #self.submit_name = submit_text+"_button"
self.inputs = []
def add_input( self, type, name, label, value=None, error=None, help=None, use_label=True ):
self.inputs.append( FormInput( type, label, name, value, error, help, use_label ) )
diff -r a7f9325bb319 -r fc4c0c7e5da9 templates/admin/requests/grid.mako
--- a/templates/admin/requests/grid.mako Thu Aug 20 11:43:28 2009 -0400
+++ b/templates/admin/requests/grid.mako Thu Aug 20 16:54:44 2009 -0400
@@ -76,7 +76,7 @@
<div class="grid-header">
<h2>${grid.title}</h2>
- %if len(trans.user.requests):
+ %if len(trans.app.model.Request.query().all()):
##<span class="title">Filter:</span>
%for i, filter in enumerate( grid.standard_filters ):
%if i > 0:
diff -r a7f9325bb319 -r fc4c0c7e5da9 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Thu Aug 20 11:43:28 2009 -0400
+++ b/test/base/twilltestcase.py Thu Aug 20 16:54:44 2009 -0400
@@ -1077,6 +1077,17 @@
for index, field_value in enumerate(fields):
tc.fv( "1", "field_%i" % index, field_value )
tc.submit( "create_request_button" )
+ def create_request_admin( self, request_type_id, user_id, name, desc, library_id, fields ):
+ self.home()
+ self.visit_url( "%s/requests_admin/new?create=True&select_request_type=%i" % (self.url, request_type_id) )
+ self.check_page_for_string( 'Add a new request' )
+ tc.fv( "1", "select_user", str(user_id) )
+ tc.fv( "1", "name", name )
+ tc.fv( "1", "desc", desc )
+ tc.fv( "1", "library_id", str(library_id) )
+ for index, field_value in enumerate(fields):
+ tc.fv( "1", "field_%i" % index, field_value )
+ tc.submit( "create_request_button" )
def add_samples( self, request_id, request_name, samples ):
self.home()
self.visit_url( "%s/requests/list?sort=-create_time&operation=show_request&id=%s" % ( self.url, self.security.encode_id( request_id ) ))
diff -r a7f9325bb319 -r fc4c0c7e5da9 test/functional/test_forms_and_requests.py
--- a/test/functional/test_forms_and_requests.py Thu Aug 20 11:43:28 2009 -0400
+++ b/test/functional/test_forms_and_requests.py Thu Aug 20 16:54:44 2009 -0400
@@ -100,28 +100,13 @@
global request_type
request_type = galaxy.model.RequestType.filter( and_( galaxy.model.RequestType.table.c.name==request_type_name ) ).all()[-1]
assert request_type is not None, 'Problem retrieving request type named "%s" from the database' % request_type_name
- def test_025_create_address( self ):
- """Testing address creation"""
- #self.create_address( user_address1 )
- #self.check_page_for_string( 'Address <b>%s</b> has been added' % user_address1[ 'short_desc' ] )
- ## TODO: FIX HACK
- ## the user address creation should be done as a test.
- global user_address
- user_address = galaxy.model.UserAddress()
- user_address.user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
- user_address.desc = address1[ 'short_desc' ]
- user_address.name = address1[ 'name' ]
- user_address.institution = address1[ 'institution' ]
- user_address.address = address1[ 'address1' ]+' '+address1[ 'address2' ]
- user_address.city = address1[ 'city' ]
- user_address.state = address1[ 'state' ]
- user_address.postal_code = address1[ 'postal_code' ]
- user_address.country = address1[ 'country' ]
- user_address.phone = address1[ 'phone' ]
- user_address.flush()
- user_address.user.refresh()
- def test_030_create_request( self ):
- """Testing creating and submitting a request"""
+ def test_025_create_address_and_library( self ):
+ """Testing address & library creation"""
+ # first create a regular user
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
# first create a library for the request so that it can be submitted later
lib_name = 'TestLib001'
self.create_library( lib_name, '' )
@@ -144,12 +129,48 @@
break
if not admin_user_private_role:
raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
+ global regular_user1
+ regular_user1 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ).first()
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ # Get the regular user's private role for later use
+ global regular_user1_private_role
+ regular_user1_private_role = None
+ for role in regular_user1.all_roles():
+ if role.name == regular_user1.email and role.description == 'Private Role for %s' % regular_user1.email:
+ regular_user1_private_role = role
+ break
+ if not regular_user1_private_role:
+ raise AssertionError( "Private role not found for user '%s'" % regular_user1.email )
# Set permissions on the library, sort for later testing
permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
permissions_out = []
- # Role one members are: admin_user, regular_user1, regular_user3. Each of these users will be permitted to
+ # Role one members are: admin_user, regular_user1. Each of these users will be permitted to
# LIBRARY_ADD, LIBRARY_MODIFY, LIBRARY_MANAGE for library items.
- self.set_library_permissions( str( library_one.id ), library_one.name, str( admin_user_private_role.id ), permissions_in, permissions_out )
+ self.set_library_permissions( str( library_one.id ), library_one.name, str( regular_user1_private_role.id ), permissions_in, permissions_out )
+ # create address
+ #self.create_address( user_address1 )
+ #self.check_page_for_string( 'Address <b>%s</b> has been added' % user_address1[ 'short_desc' ] )
+ ## TODO: FIX HACK
+ ## the user address creation should be done as a test.
+ global user_address
+ user_address = galaxy.model.UserAddress()
+ user_address.user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ).first()
+ user_address.desc = address1[ 'short_desc' ]
+ user_address.name = address1[ 'name' ]
+ user_address.institution = address1[ 'institution' ]
+ user_address.address = address1[ 'address1' ]+' '+address1[ 'address2' ]
+ user_address.city = address1[ 'city' ]
+ user_address.state = address1[ 'state' ]
+ user_address.postal_code = address1[ 'postal_code' ]
+ user_address.country = address1[ 'country' ]
+ user_address.phone = address1[ 'phone' ]
+ user_address.flush()
+ user_address.user.refresh()
+ def test_030_create_request( self ):
+ """Testing creating and submitting a request as a regular user"""
+ # login as a regular user
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
# set field values
fields = ['field one value', 'field two value', str(user_address.id)]
# create the request
@@ -162,7 +183,6 @@
galaxy.model.Request.table.c.deleted==False ) ).first()
# check if the request's state is now set to 'unsubmitted'
assert request_one.state is not request_one.states.UNSUBMITTED, "The state of the request '%s' should be set to '%s'" % ( request_one.name, request_one.states.UNSUBMITTED )
-
# sample fields
samples = [ ( 'Sample One', [ 'S1 Field 0 Value' ] ),
( 'Sample Two', [ 'S2 Field 0 Value' ] ) ]
@@ -181,6 +201,8 @@
def test_035_request_lifecycle( self ):
"""Testing request lifecycle as it goes through all the states"""
# goto admin manage requests page
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
self.home()
self.visit_page( 'requests_admin/list' )
self.check_page_for_string( request_one.name )
@@ -203,10 +225,43 @@
request_one.refresh()
# check if the request's state is now set to 'complete'
assert request_one.state is not request_one.states.COMPLETE, "The state of the request '%s' should be set to '%s'" % ( request_one.name, request_one.states.COMPLETE )
+# def test_40_admin_create_request_on_behalf_of_regular_user( self ):
+# """Testing creating and submitting a request as an admin on behalf of a regular user"""
+# self.logout()
+# self.login( email='test(a)bx.psu.edu' )
+## permissions_in = [ k for k, v in galaxy.model.Library.permitted_actions.items() ]
+## permissions_out = []
+## self.set_library_permissions( str( library_one.id ), library_one.name, str( admin_user_private_role.id ), permissions_in, permissions_out )
+# # set field values
+# fields = ['field one value', 'field two value', str(user_address.id)]
+# # create the request
+# request_name, request_desc = 'Request Two', 'Request Two Description'
+# self.create_request_admin(request_type.id, regular_user1.id, request_name, request_desc, library_one.id, fields)
+# self.check_page_for_string( request_name )
+# self.check_page_for_string( request_desc )
+# global request_two
+# request_one = galaxy.model.Request.filter( and_( galaxy.model.Request.table.c.name==request_name,
+# galaxy.model.Request.table.c.deleted==False ) ).first()
+# # check if the request's state is now set to 'unsubmitted'
+# assert request_two.state is not request_two.states.UNSUBMITTED, "The state of the request '%s' should be set to '%s'" % ( request_two.name, request_two.states.UNSUBMITTED )
+# # sample fields
+# samples = [ ( 'Sample One', [ 'S1 Field 0 Value' ] ),
+# ( 'Sample Two', [ 'S2 Field 0 Value' ] ) ]
+# # add samples to this request
+# self.add_samples( request_two.id, request_two.name, samples )
+# for sample_name, fields in samples:
+# self.check_page_for_string( sample_name )
+# self.check_page_for_string( 'Unsubmitted' )
+# for field_value in fields:
+# self.check_page_for_string( field_value )
+# # submit the request
+# self.submit_request( request_two.id, request_two.name )
+# request_two.refresh()
+# # check if the request's state is now set to 'submitted'
+# assert request_two.state is not request_two.states.SUBMITTED, "The state of the request '%s' should be set to '%s'" % ( request_two.name, request_two.states.SUBMITTED )
-
\ No newline at end of file
1
0