galaxy-dev
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- 10007 discussions
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/73a8b43f1d97
changeset: 2433:73a8b43f1d97
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jun 08 12:49:26 2009 -0400
description:
Allow the uploading of composite datatypes. A new grouping parameter, UploadDataset, is used to contain and process the file_data/url_paste/space_to_tab used to upload a file - multiple sets are displayed when uploading a composite datatype (similar to a repeat). Composite files can now be declared to the datatypes registry (required for proper uploading), but they are stored in the same manner as before (the extra_files_path) and should be backwards compatible. When uploading a composite datatype, only one dataset can be uploaded at a time. The ability to upload multiple datasets (url_paste (contents or urls) + file_data) for non-composite datatypes remains unchanged.
A more structured way of storing these files (rather than dumping in a directory) is worth considering.
15 file(s) affected in this change:
lib/galaxy/datatypes/data.py
lib/galaxy/datatypes/genetics.py
lib/galaxy/datatypes/registry.py
lib/galaxy/tools/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/tools/parameters/__init__.py
lib/galaxy/tools/parameters/basic.py
lib/galaxy/tools/parameters/grouping.py
lib/galaxy/tools/parameters/validation.py
lib/galaxy/util/__init__.py
lib/galaxy/web/controllers/tool_runner.py
lib/galaxy/web/form_builder.py
templates/base_panels.mako
templates/tool_form.mako
tools/data_source/upload.xml
diffs (1641 lines):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/data.py Mon Jun 08 12:49:26 2009 -0400
@@ -1,5 +1,7 @@
import logging, os, sys, time, sets, tempfile
from galaxy import util
+from galaxy.util.odict import odict
+from galaxy.util.bunch import Bunch
from cgi import escape
import metadata
from metadata import MetadataElement #import directly to maintain ease of use in Datatype class definitions
@@ -48,10 +50,16 @@
"""If False, the peek is regenerated whenever a dataset of this type is copied"""
copy_safe_peek = True
+ #Composite datatypes
+ composite_type = None
+ composite_files = odict()
+ primary_file_name = 'index'
+
def __init__(self, **kwd):
"""Initialize the datatype"""
object.__init__(self, **kwd)
self.supported_display_apps = self.supported_display_apps.copy()
+ self.composite_files = self.composite_files.copy()
def write_from_stream(self, dataset, stream):
"""Writes data from a stream"""
fd = open(dataset.file_name, 'wb')
@@ -242,7 +250,49 @@
def after_edit( self, dataset ):
"""This function is called on the dataset after metadata is edited."""
dataset.clear_associated_files( metadata_safe = True )
-
+ def __new_composite_file( self, optional = False, mimetype = None, description = None, substitute_name_with_metadata = None, **kwds ):
+ kwds[ 'optional' ] = optional
+ kwds[ 'mimetype' ] = mimetype
+ kwds[ 'description' ] = description
+ kwds[ 'substitute_name_with_metadata' ] = substitute_name_with_metadata
+ return Bunch( **kwds )
+ def add_composite_file( self, name, **kwds ):
+ #self.composite_files = self.composite_files.copy()
+ self.composite_files[ name ] = self.__new_composite_file( **kwds )
+
+
+ def __substitute_composite_key( self, key, composite_file, dataset = None ):
+ if composite_file.substitute_name_with_metadata:
+ if dataset:
+ meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+ else:
+ meta_value = self.spec[composite_file.substitute_name_with_metadata].default
+ return key % meta_value
+ return key
+ @property
+ def writable_files( self, dataset = None ):
+ files = odict()
+ if self.composite_type != 'auto_primary_file':
+ files[ self.primary_file_name ] = self.__new_composite_file()
+ for key, value in self.get_composite_files( dataset = dataset ).iteritems():
+ files[ key ] = value
+ return files
+ def get_composite_files( self, dataset = None ):
+ def substitute_composite_key( key, composite_file ):
+ if composite_file.substitute_name_with_metadata:
+ if dataset:
+ meta_value = str( dataset.metadata.get( composite_file.substitute_name_with_metadata ) )
+ else:
+ meta_value = self.metadata_spec[ composite_file.substitute_name_with_metadata ].default
+ return key % meta_value
+ return key
+ files = odict()
+ for key, value in self.composite_files.iteritems():
+ files[ substitute_composite_key( key, value ) ] = value
+ return files
+ def generate_auto_primary_file( self, dataset = None ):
+ raise Exception( "generate_auto_primary_file is not implemented for this datatype." )
+
@property
def has_resolution(self):
return False
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/genetics.py
--- a/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/genetics.py Mon Jun 08 12:49:26 2009 -0400
@@ -117,15 +117,26 @@
"""class to use for rgenetics"""
"""Add metadata elements"""
- MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", readonly=True)
+ MetadataElement( name="base_name", desc="base name for all transformed versions of this genetic dataset", default="galaxy", readonly=True)
file_ext="html"
-
+ composite_type = 'auto_primary_file'
+
def missing_meta( self, dataset ):
"""Checks for empty meta values"""
for key, value in dataset.metadata.items():
if not value:
return True
return False
+
+ def generate_primary_file( self, dataset = None ):
+ rval = ['<html><head><title>Files for Composite Dataset (%s)</title></head><p/>This composite dataset is composed of the following files:<p/><ul>' % ( self.file_ext ) ]
+ for composite_name, composite_file in self.get_composite_files( dataset = dataset ).iteritems():
+ opt_text = ''
+ if composite_file.optional:
+ opt_text = ' (optional)'
+ rval.append( '<li><a href="%s">%s</a>%s' % ( composite_name, composite_name, opt_text ) )
+ rval.append( '</ul></html>' )
+ return "\n".join( rval )
class SNPMatrix(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
@@ -148,6 +159,12 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="lped"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.ped', description = 'Pedigree File', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', description = 'Map File', substitute_name_with_metadata = 'base_name' )
+
class Pphe(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
@@ -180,11 +197,33 @@
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="pbed"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.bim', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.bed', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.fam', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' )
+
class Eigenstratgeno(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
"""
file_ext="eigenstratgeno"
+
+ def __init__( self, **kwd ):
+ Rgenetics.__init__( self, **kwd )
+ self.add_composite_file( '%s.eigenstratgeno', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.ind', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s.map', substitute_name_with_metadata = 'base_name' )
+ self.add_composite_file( '%s_fo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_fo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_fo.map', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.eigenstratgeno', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.ind', substitute_name_with_metadata = 'base_name', optional = 'True' )
+ self.add_composite_file( '%s_oo.map', substitute_name_with_metadata = 'base_name', optional = 'True' )
+
+
class Eigenstratpca(Rgenetics):
"""fake class to distinguish different species of Rgenetics data collections
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/datatypes/registry.py Mon Jun 08 12:49:26 2009 -0400
@@ -67,6 +67,15 @@
indexer_config = indexer.get( 'file', None )
if indexer_config:
self.indexers.append( (indexer_config, extension) )
+ for composite_file in elem.findall( 'composite_file' ):
+ # add composite files
+ name = composite_file.get( 'name', None )
+ if name is None:
+ log.warning( "You must provide a name for your composite_file (%s)." % composite_file )
+ optional = composite_file.get( 'optional', False )
+ mimetype = composite_file.get( 'mimetype', None )
+ self.datatypes_by_extension[extension].add_composite_file( name, optional=optional, mimetype=mimetype )
+
except Exception, e:
self.log.warning( 'Error loading datatype "%s", problem: %s' % ( extension, str( e ) ) )
# Load datatype sniffers from the config
@@ -294,3 +303,7 @@
ret_data = None
return ( convert_ext, ret_data )
return ( None, None )
+
+ def get_composite_extensions( self ):
+ return [ ext for ( ext, d_type ) in self.datatypes_by_extension.iteritems() if d_type.composite_type is not None ]
+
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -591,7 +591,7 @@
group = Repeat()
group.name = elem.get( "name" )
group.title = elem.get( "title" )
- group.inputs = self.parse_input_elem( elem, enctypes, context )
+ group.inputs = self.parse_input_elem( elem, enctypes, context )
rval[group.name] = group
elif elem.tag == "conditional":
group = Conditional()
@@ -609,6 +609,16 @@
case.inputs = self.parse_input_elem( case_elem, enctypes, context )
group.cases.append( case )
rval[group.name] = group
+ elif elem.tag == "upload_dataset":
+ group = UploadDataset()
+ group.name = elem.get( "name" )
+ group.title = elem.get( "title" )
+ group.file_type_name = elem.get( 'file_type_name', group.file_type_name )
+ group.default_file_type = elem.get( 'default_file_type', group.default_file_type )
+ rval[ group.file_type_name ].refresh_on_change = True
+ rval[ group.file_type_name ].refresh_on_change_values = self.app.datatypes_registry.get_composite_extensions()
+ group.inputs = self.parse_input_elem( elem, enctypes, context )
+ rval[ group.name ] = group
elif elem.tag == "param":
param = self.parse_param_elem( elem, enctypes, context )
rval[param.name] = param
@@ -951,6 +961,56 @@
group_state['__current_case__'] = current_case
# Store the value of the test element
group_state[ input.test_param.name ] = value
+ elif isinstance( input, UploadDataset ):
+ group_state = state[input.name]
+ group_errors = []
+ group_old_errors = old_errors.get( input.name, None )
+ any_group_errors = False
+ d_type = input.get_datatype( trans, context )
+ writable_files = d_type.writable_files
+ #remove extra files
+ while len( group_state ) > len( writable_files ):
+ del group_state[-1]
+ if group_old_errors:
+ del group_old_errors[-1]
+ # Update state
+ max_index = -1
+ for i, rep_state in enumerate( group_state ):
+ rep_index = rep_state['__index__']
+ max_index = max( max_index, rep_index )
+ rep_prefix = "%s_%d|" % ( key, rep_index )
+ if group_old_errors:
+ rep_old_errors = group_old_errors[i]
+ else:
+ rep_old_errors = {}
+ rep_errors = self.update_state( trans,
+ input.inputs,
+ rep_state,
+ incoming,
+ prefix=rep_prefix,
+ context=context,
+ update_only=update_only,
+ old_errors=rep_old_errors,
+ changed_dependencies=changed_dependencies,
+ item_callback=item_callback )
+ if rep_errors:
+ any_group_errors = True
+ group_errors.append( rep_errors )
+ else:
+ group_errors.append( {} )
+ #add new fileupload as needed
+ offset = 1
+ while len( writable_files ) > len( group_state ):
+ new_state = {}
+ new_state['__index__'] = max_index + offset
+ offset += 1
+ self.fill_in_new_state( trans, input.inputs, new_state, context )
+ group_state.append( new_state )
+ if any_group_errors:
+ group_errors.append( {} )
+ # Were there *any* errors for any repetition?
+ if any_group_errors:
+ errors[input.name] = group_errors
else:
if key not in incoming \
and "__force_update__" + key not in incoming \
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Mon Jun 08 12:49:26 2009 -0400
@@ -19,19 +19,15 @@
except:
log.exception( 'failure removing temporary file: %s' % filename )
def execute( self, tool, trans, incoming={}, set_output_hid = True ):
- data_file = incoming['file_data']
- file_type = incoming['file_type']
- dbkey = incoming['dbkey']
- url_paste = incoming['url_paste']
- is_multi_byte = False
- space_to_tab = False
- if 'space_to_tab' in incoming:
- if incoming['space_to_tab'] not in ["None", None]:
- space_to_tab = True
+ dataset_upload_inputs = []
+ for input_name, input in tool.inputs.iteritems():
+ if input.type == "upload_dataset":
+ dataset_upload_inputs.append( input )
+ assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
# Get any precreated datasets (when using asynchronous uploads)
async_datasets = []
self.precreated_datasets = []
- if incoming['async_datasets'] not in ["None", "", None]:
+ if incoming.get( 'async_datasets', None ) not in ["None", "", None]:
async_datasets = incoming['async_datasets'].split(',')
for id in async_datasets:
try:
@@ -45,8 +41,39 @@
log.error( 'Got a precreated dataset (%s) but it does not belong to current user (%s)' % ( data.id, trans.user.id ) )
else:
self.precreated_datasets.append( data )
- temp_name = ""
data_list = []
+ for dataset_upload_input in dataset_upload_inputs:
+ uploaded_datasets = dataset_upload_input.get_uploaded_datasets( trans, incoming )
+ for uploaded_dataset in uploaded_datasets:
+ precreated_dataset = self.get_precreated_dataset( uploaded_dataset.precreated_name )
+ dataset = self.add_file( trans, uploaded_dataset.primary_file, uploaded_dataset.name, uploaded_dataset.file_type, uploaded_dataset.is_multi_byte, uploaded_dataset.dbkey, space_to_tab = uploaded_dataset.space_to_tab, info = uploaded_dataset.info, precreated_dataset = precreated_dataset )
+ if uploaded_dataset.composite_files:
+ os.mkdir( dataset.extra_files_path ) #make extra files path
+ for name, value in uploaded_dataset.composite_files.iteritems():
+ #what about binary files here, need to skip converting newlines
+ if value is None and not dataset.datatype.writable_files[ name ].optional:
+ dataset.info = "A required composite data file was not provided (%s)" % name
+ dataset.state = dataset.states.ERROR
+ break
+ elif value is not None:
+ if value.space_to_tab:
+ sniff.convert_newlines_sep2tabs( value.filename )
+ else:
+ sniff.convert_newlines( value.filename )
+ shutil.move( value.filename, os.path.join( dataset.extra_files_path, name ) )
+ data_list.append( dataset )
+ #clean up extra temp names
+ uploaded_dataset.clean_up_temp_files()
+
+ #cleanup unclaimed precreated datasets:
+ for data in self.precreated_datasets:
+ log.info( 'Cleaned up unclaimed precreated dataset (%s).' % ( data.id ) )
+ data.state = data.states.ERROR
+ data.info = 'No file contents were available.'
+
+ if data_list:
+ trans.app.model.flush()
+
# Create the job object
job = trans.app.model.Job()
job.session_id = trans.get_galaxy_session().id
@@ -56,104 +83,14 @@
# For backward compatibility, some tools may not have versions yet.
job.tool_version = tool.version
except:
- job.tool_version = "1.0.0"
+ job.tool_version = "1.0.1"
job.state = trans.app.model.Job.states.UPLOAD
job.flush()
log.info( 'tool %s created job id %d' % ( tool.id, job.id ) )
trans.log_event( 'created job id %d' % job.id, tool_id=tool.id )
- if 'local_filename' in dir( data_file ):
- # Use the existing file
- file_name = data_file.filename
- file_name = file_name.split( '\\' )[-1]
- file_name = file_name.split( '/' )[-1]
- precreated_dataset = self.get_precreated_dataset( file_name )
- try:
- data_list.append( self.add_file( trans, data_file.local_filename, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using datafile.local_filename %s: %s' % ( data_file.local_filename, str( e ) ) )
- self.remove_tempfile( data_file.local_filename )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- elif 'filename' in dir( data_file ):
- file_name = data_file.filename
- file_name = file_name.split( '\\' )[-1]
- file_name = file_name.split( '/' )[-1]
- precreated_dataset = self.get_precreated_dataset( file_name )
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, file_name, file_type, is_multi_byte, dbkey, space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using file temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- if url_paste not in [ None, "" ]:
- if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
- # If we were sent a DATA_URL from an external application in a post, NAME and INFO
- # values should be in the request
- if 'NAME' in incoming and incoming[ 'NAME' ] not in [ "None", None ]:
- NAME = incoming[ 'NAME' ]
- else:
- NAME = ''
- if 'INFO' in incoming and incoming[ 'INFO' ] not in [ "None", None ]:
- INFO = incoming[ 'INFO' ]
- else:
- INFO = "uploaded url"
- url_paste = url_paste.replace( '\r', '' ).split( '\n' )
- name_set_from_line = False #if we are setting the name from the line, it needs to be the line that creates that dataset
- for line in url_paste:
- line = line.strip()
- if line:
- if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ):
- continue # non-url line, ignore
- if not NAME or name_set_from_line:
- NAME = line
- name_set_from_line = True
- precreated_dataset = self.get_precreated_dataset( NAME )
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, NAME, file_type, is_multi_byte, dbkey, info="uploaded url", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using url_paste temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- else:
- precreated_dataset = self.get_precreated_dataset( 'Pasted Entry' )
- is_valid = False
- for line in url_paste:
- line = line.rstrip( '\r\n' )
- if line:
- is_valid = True
- break
- if is_valid:
- try:
- temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
- except Exception, e:
- log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- try:
- data_list.append( self.add_file( trans, temp_name, 'Pasted Entry', file_type, is_multi_byte, dbkey, info="pasted entry", space_to_tab=space_to_tab, precreated_dataset=precreated_dataset ) )
- except Exception, e:
- log.exception( 'exception in add_file using StringIO.StringIO( url_paste ) temp_name %s: %s' % ( str( temp_name ), str( e ) ) )
- self.remove_tempfile( temp_name )
- return self.upload_empty( trans, job, "Error:", str( e ), precreated_dataset=precreated_dataset )
- else:
- return self.upload_empty( trans, job, "No data error:", "you pasted no data.", precreated_dataset=precreated_dataset )
- if self.empty:
- return self.upload_empty( trans, job, "Empty file error:", "you attempted to upload an empty file." )
- elif len( data_list ) < 1:
- return self.upload_empty( trans, job, "No data error:", "either you pasted no data, the url you specified is invalid, or you have not specified a file." )
+
#if we could make a 'real' job here, then metadata could be set before job.finish() is called
- hda = data_list[0] #only our first hda is being added as input for the job, why?
+ hda = data_list[0] #only our first hda is being added as output for the job, why?
job.state = trans.app.model.Job.states.OK
file_size_str = datatypes.data.nice_size( hda.dataset.file_size )
job.info = "%s, size: %s" % ( hda.info, file_size_str )
@@ -162,7 +99,7 @@
log.info( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ) )
trans.log_event( 'job id %d ended ok, file size: %s' % ( job.id, file_size_str ), tool_id=tool.id )
return dict( output=hda )
-
+
def upload_empty(self, trans, job, err_code, err_msg, precreated_dataset = None):
if precreated_dataset is not None:
data = precreated_dataset
@@ -188,7 +125,7 @@
trans.log_event( 'job id %d ended with errors, err_msg: %s' % ( job.id, err_msg ), tool_id=job.tool_id )
return dict( output=data )
- def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
+ def add_file( self, trans, temp_name, file_name, file_type, is_multi_byte, dbkey, info=None, space_to_tab=False, precreated_dataset=None ):
data_type = None
# See if we have an empty file
if not os.path.getsize( temp_name ) > 0:
@@ -254,7 +191,7 @@
data_type = 'binary'
if not data_type:
# We must have a text file
- if self.check_html( temp_name ):
+ if trans.app.datatypes_registry.get_datatype_by_extension( file_type ).composite_type != 'auto_primary_file' and self.check_html( temp_name ):
raise BadFileException( "you attempted to upload an inappropriate file." )
if data_type != 'binary' and data_type != 'zip':
if space_to_tab:
@@ -404,7 +341,7 @@
return self.precreated_datasets.pop( names.index( name ) )
else:
return None
-
+
class BadFileException( Exception ):
pass
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/__init__.py
--- a/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -20,7 +20,7 @@
be nice to unify all the places that recursively visit inputs.
"""
for input in inputs.itervalues():
- if isinstance( input, Repeat ):
+ if isinstance( input, Repeat ) or isinstance( input, UploadDataset ):
for i, d in enumerate( input_values[ input.name ] ):
index = d['__index__']
new_name_prefix = name_prefix + "%s_%d|" % ( input.name, index )
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400
@@ -23,6 +23,7 @@
def __init__( self, tool, param, context=None ):
self.tool = tool
self.refresh_on_change = False
+ self.refresh_on_change_values = []
self.name = param.get("name")
self.type = param.get("type")
self.label = util.xml_text(param, "label")
@@ -301,7 +302,7 @@
self.name = elem.get( 'name' )
self.ajax = str_bool( elem.get( 'ajax-upload' ) )
def get_html_field( self, trans=None, value=None, other_values={} ):
- return form_builder.FileField( self.name, self.ajax )
+ return form_builder.FileField( self.name, ajax = self.ajax, value = value )
def from_html( self, value, trans=None, other_values={} ):
# Middleware or proxies may encode files in special ways (TODO: this
# should be pluggable)
@@ -325,10 +326,11 @@
"""
return "multipart/form-data"
def to_string( self, value, app ):
- if value is None:
+ if value in [ None, '' ]:
return None
- else:
- raise Exception( "FileToolParameter cannot be persisted" )
+ elif isinstance( value, unicode ) or isinstance( value, str ):
+ return value
+ raise Exception( "FileToolParameter cannot be persisted" )
def to_python( self, value, app ):
if value is None:
return None
@@ -401,13 +403,13 @@
>>> print p.name
blah
>>> print p.get_html()
- <select name="blah">
+ <select name="blah" last_selected_value="y">
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
</select>
>>> print p.get_html( value="z" )
- <select name="blah">
+ <select name="blah" last_selected_value="z">
<option value="x">I am X</option>
<option value="y">I am Y</option>
<option value="z" selected>I am Z</option>
@@ -426,13 +428,13 @@
>>> print p.name
blah
>>> print p.get_html()
- <select name="blah" multiple>
+ <select name="blah" multiple last_selected_value="z">
<option value="x">I am X</option>
<option value="y" selected>I am Y</option>
<option value="z" selected>I am Z</option>
</select>
>>> print p.get_html( value=["x","y"])
- <select name="blah" multiple>
+ <select name="blah" multiple last_selected_value="y">
<option value="x" selected>I am X</option>
<option value="y" selected>I am Y</option>
<option value="z">I am Z</option>
@@ -520,7 +522,7 @@
return form_builder.TextField( self.name, value=(value or "") )
if value is not None:
if not isinstance( value, list ): value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change )
+ field = form_builder.SelectField( self.name, self.multiple, self.display, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
options = self.get_options( trans, context )
for text, optval, selected in options:
if isinstance( optval, UnvalidatedValue ):
@@ -676,7 +678,7 @@
>>> # hg17 should be selected by default
>>> print p.get_html( trans ) # doctest: +ELLIPSIS
- <select name="blah">
+ <select name="blah" last_selected_value="hg17">
<option value="?">unspecified (?)</option>
...
<option value="hg18">Human Mar. 2006 (hg18)</option>
@@ -687,7 +689,7 @@
>>> # If the user selected something else already, that should be used
>>> # instead
>>> print p.get_html( trans, value='hg18' ) # doctest: +ELLIPSIS
- <select name="blah">
+ <select name="blah" last_selected_value="hg18">
<option value="?">unspecified (?)</option>
...
<option value="hg18" selected>Human Mar. 2006 (hg18)</option>
@@ -942,7 +944,7 @@
return form_builder.TextArea( self.name, value=value )
else:
return form_builder.TextField( self.name, value=(value or "") )
- return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value )
+ return form_builder.DrillDownField( self.name, self.multiple, self.display, self.refresh_on_change, self.get_options( trans, value, other_values ), value, refresh_on_change_values = self.refresh_on_change_values )
def from_html( self, value, trans=None, other_values={} ):
if self.need_late_validation( trans, other_values ):
@@ -1108,7 +1110,7 @@
if value is not None:
if type( value ) != list:
value = [ value ]
- field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change )
+ field = form_builder.SelectField( self.name, self.multiple, None, self.refresh_on_change, refresh_on_change_values = self.refresh_on_change_values )
# CRUCIAL: the dataset_collector function needs to be local to DataToolParameter.get_html_field()
def dataset_collector( hdas, parent_hid ):
for i, hda in enumerate( hdas ):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/grouping.py Mon Jun 08 12:49:26 2009 -0400
@@ -4,6 +4,14 @@
from basic import ToolParameter
from galaxy.util.expressions import ExpressionContext
+
+import logging
+log = logging.getLogger( __name__ )
+
+import StringIO, os, urllib
+from galaxy.datatypes import sniff
+from galaxy.util.bunch import Bunch
+from galaxy.util.odict import odict
class Group( object ):
def __init__( self ):
@@ -74,7 +82,322 @@
input.visit_inputs( new_prefix, d[input.name], callback )
def get_initial_value( self, trans, context ):
return []
+
+class UploadDataset( Group ):
+ type = "upload_dataset"
+ def __init__( self ):
+ Group.__init__( self )
+ self.title = None
+ self.inputs = None
+ self.file_type_name = 'file_type'
+ self.default_file_type = 'txt'
+ self.file_type_to_ext = { 'auto':self.default_file_type }
+ def get_file_type( self, context ):
+ return context.get( self.file_type_name, self.default_file_type )
+ def get_datatype_ext( self, trans, context ):
+ ext = self.get_file_type( context )
+ if ext in self.file_type_to_ext:
+ ext = self.file_type_to_ext[ext] #when using autodetect, we will use composite info from 'text', i.e. only the main file
+ return ext
+ def get_datatype( self, trans, context ):
+ ext = self.get_datatype_ext( trans, context )
+ return trans.app.datatypes_registry.get_datatype_by_extension( ext )
+ @property
+ def title_plural( self ):
+ if self.title.endswith( "s" ):
+ return self.title
+ else:
+ return self.title + "s"
+ def group_title( self, context ):
+ return "%s (%s)" % ( self.title, context.get( self.file_type_name, self.default_file_type ) )
+ def title_by_index( self, trans, index, context ):
+ d_type = self.get_datatype( trans, context )
+ for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+ if i == index:
+ rval = composite_name
+ if composite_file.description:
+ rval = "%s (%s)" % ( rval, composite_file.description )
+ if composite_file.optional:
+ rval = "%s [optional]" % rval
+ return rval
+ return None
+ def value_to_basic( self, value, app ):
+ rval = []
+ for d in value:
+ rval_dict = {}
+ # Propogate __index__
+ if '__index__' in d:
+ rval_dict['__index__'] = d['__index__']
+ for input in self.inputs.itervalues():
+ rval_dict[ input.name ] = input.value_to_basic( d[input.name], app )
+ rval.append( rval_dict )
+ return rval
+ def value_from_basic( self, value, app, ignore_errors=False ):
+ rval = []
+ for i, d in enumerate( value ):
+ rval_dict = {}
+ # If the special __index__ key is not set, create it (for backward
+ # compatibility)
+ rval_dict['__index__'] = d.get( '__index__', i )
+ # Restore child inputs
+ for input in self.inputs.itervalues():
+ if ignore_errors and input.name not in d: #this wasn't tested
+ rval_dict[ input.name ] = input.get_initial_value( None, d )
+ else:
+ rval_dict[ input.name ] = input.value_from_basic( d[input.name], app, ignore_errors )
+ rval.append( rval_dict )
+ return rval
+ def visit_inputs( self, prefix, value, callback ):
+ for i, d in enumerate( value ):
+ for input in self.inputs.itervalues():
+ new_prefix = prefix + "%s_%d|" % ( self.name, i )
+ if isinstance( input, ToolParameter ):
+ callback( new_prefix, input, d[input.name], parent = d )
+ else:
+ input.visit_inputs( new_prefix, d[input.name], callback )
+ def get_initial_value( self, trans, context ):
+ d_type = self.get_datatype( trans, context )
+ rval = []
+ for i, ( composite_name, composite_file ) in enumerate( d_type.writable_files.iteritems() ):
+ rval_dict = {}
+ rval_dict['__index__'] = i # create __index__
+ for input in self.inputs.itervalues():
+ rval_dict[ input.name ] = input.get_initial_value( trans, context ) #input.value_to_basic( d[input.name], app )
+ rval.append( rval_dict )
+ return rval
+ def get_uploaded_datasets( self, trans, context, override_name = None, override_info = None ):
+ def get_data_file_filename( data_file, is_multi_byte = False, override_name = None, override_info = None ):
+ dataset_name = override_name
+ dataset_info = override_info
+ def get_file_name( file_name ):
+ file_name = file_name.split( '\\' )[-1]
+ file_name = file_name.split( '/' )[-1]
+ return file_name
+ if 'local_filename' in dir( data_file ):
+ # Use the existing file
+ return data_file.local_filename, get_file_name( data_file.filename ), is_multi_byte
+ elif 'filename' in dir( data_file ):
+ #create a new tempfile
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( data_file.file, prefix='upload' )
+ precreated_name = get_file_name( data_file.filename )
+ if not dataset_name:
+ dataset_name = precreated_name
+ if not dataset_info:
+ dataset_info = 'uploaded file'
+ return temp_name, get_file_name( data_file.filename ), is_multi_byte, dataset_name, dataset_info
+ except Exception, e:
+ log.exception( 'exception in sniff.stream_to_file using file %s: %s' % ( data_file.filename, str( e ) ) )
+ self.remove_temp_file( temp_name )
+ return None, None, is_multi_byte, None, None
+ def filenames_from_url_paste( url_paste, group_incoming, override_name = None, override_info = None ):
+ filenames = []
+ if url_paste not in [ None, "" ]:
+ if url_paste.lstrip().lower().startswith( 'http://' ) or url_paste.lstrip().lower().startswith( 'ftp://' ):
+ url_paste = url_paste.replace( '\r', '' ).split( '\n' )
+ for line in url_paste:
+ line = line.strip()
+ if line:
+ if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ):
+ continue # non-url line, ignore
+ precreated_name = line
+ dataset_name = override_name
+ if not dataset_name:
+ dataset_name = line
+ dataset_info = override_info
+ if not dataset_info:
+ dataset_info = 'uploaded url'
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( urllib.urlopen( line ), prefix='url_paste' )
+ except Exception, e:
+ temp_name = None
+ precreated_name = str( e )
+ log.exception( 'exception in sniff.stream_to_file using url_paste %s: %s' % ( url_paste, str( e ) ) )
+ try:
+ self.remove_temp_file( temp_name )
+ except:
+ pass
+ yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
+ #yield ( None, str( e ), False, dataset_name, dataset_info )
+ else:
+ dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
+ if override_name:
+ dataset_name = override_name
+ if override_info:
+ dataset_info = override_info
+ is_valid = False
+ for line in url_paste: #Trim off empty lines from begining
+ line = line.rstrip( '\r\n' )
+ if line:
+ is_valid = True
+ break
+ if is_valid:
+ try:
+ temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( url_paste ), prefix='strio_url_paste' )
+ except Exception, e:
+ log.exception( 'exception in sniff.stream_to_file using StringIO.StringIO( url_paste ) %s: %s' % ( url_paste, str( e ) ) )
+ temp_name = None
+ precreated_name = str( e )
+ try:
+ self.remove_temp_file( temp_name )
+ except:
+ pass
+ yield ( temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info )
+ #yield ( None, str( e ), False, dataset_name, dataset_info )
+ def get_one_filename( context ):
+ data_file = context['file_data']
+ url_paste = context['url_paste']
+ name = context.get( 'NAME', None )
+ info = context.get( 'INFO', None )
+ warnings = []
+ is_multi_byte = False
+ space_to_tab = False
+ if context.get( 'space_to_tab', None ) not in ["None", None]:
+ space_to_tab = True
+ temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
+ if temp_name:
+ if url_paste.strip():
+ warnings.append( "All file contents specified in the paste box were ignored." )
+ else: #we need to use url_paste
+ #file_names = filenames_from_url_paste( url_paste, context, override_name = name, override_info = info )
+ for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):#file_names:
+ if temp_name:
+ break
+ ###this check will cause an additional file to be retrieved and created...so lets not do that
+ #try: #check to see if additional paste contents were available
+ # file_names.next()
+ # warnings.append( "Additional file contents were specified in the paste box, but ignored." )
+ #except StopIteration:
+ # pass
+ return temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings
+
+ def get_filenames( context ):
+ rval = []
+ data_file = context['file_data']
+ url_paste = context['url_paste']
+ name = context.get( 'NAME', None )
+ info = context.get( 'INFO', None )
+ warnings = []
+ is_multi_byte = False
+ space_to_tab = False
+ if context.get( 'space_to_tab', None ) not in ["None", None]:
+ space_to_tab = True
+ temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info = get_data_file_filename( data_file, is_multi_byte = is_multi_byte, override_name = name, override_info = info )
+ if temp_name:
+ rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ for temp_name, precreated_name, is_multi_byte, dataset_name, dataset_info in filenames_from_url_paste( url_paste, context, override_name = name, override_info = info ):
+ if temp_name:
+ rval.append( ( temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info ) )
+ return rval
+ class UploadedDataset( Bunch ):
+ def __init__( self, **kwd ):
+ Bunch.__init__( self, **kwd )
+ self.primary_file = None
+ self.composite_files = odict()
+ self.dbkey = None
+ self.warnings = []
+
+ self._temp_filenames = [] #store all created filenames here, delete on cleanup
+ def register_temp_file( self, filename ):
+ if isinstance( filename, list ):
+ self._temp_filenames.extend( filename )
+ else:
+ self._temp_filenames.append( filename )
+ def remove_temp_file( self, filename ):
+ try:
+ os.unlink( filename )
+ except Exception, e:
+ pass
+ #log.warning( str( e ) )
+ def clean_up_temp_files( self ):
+ for filename in self._temp_filenames:
+ self.remove_temp_file( filename )
+
+ file_type = self.get_file_type( context )
+ d_type = self.get_datatype( trans, context )
+ dbkey = context.get( 'dbkey', None )
+ writable_files = d_type.writable_files
+ writable_files_offset = 0
+ groups_incoming = [ None for filename in writable_files ]
+ for group_incoming in context.get( self.name, [] ):
+ i = int( group_incoming['__index__'] )
+ groups_incoming[ i ] = group_incoming
+
+ if d_type.composite_type is not None:
+ #handle uploading of composite datatypes
+ #Only one Dataset can be created
+
+ dataset = UploadedDataset()
+ dataset.file_type = file_type
+ dataset.datatype = d_type
+ dataset.dbkey = dbkey
+
+ temp_name = None
+ precreated_name = None
+ is_multi_byte = False
+ space_to_tab = False
+ warnings = []
+
+ dataset_name = None
+ dataset_info = None
+ if dataset.datatype.composite_type == 'auto_primary_file':
+ #replace sniff here with just creating an empty file
+ temp_name, is_multi_byte = sniff.stream_to_file( StringIO.StringIO( d_type.generate_primary_file() ), prefix='upload_auto_primary_file' )
+ precreated_name = dataset_name = 'Uploaded Composite Dataset (%s)' % ( file_type )
+ else:
+ temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( groups_incoming[ 0 ] )
+ writable_files_offset = 1
+ if temp_name is None:#remove this before finish, this should create an empty dataset
+ raise Exception( 'No primary dataset file was available for composite upload' )
+ dataset.primary_file = temp_name
+ dataset.is_multi_byte = is_multi_byte
+ dataset.space_to_tab = space_to_tab
+ dataset.precreated_name = precreated_name
+ dataset.name = dataset_name
+ dataset.info = dataset_info
+ dataset.warnings.extend( warnings )
+ dataset.register_temp_file( temp_name )
+
+ keys = writable_files.keys()
+ for i, group_incoming in enumerate( groups_incoming[ writable_files_offset : ] ):
+ key = keys[ i + writable_files_offset ]
+ if group_incoming is None and not writable_files[ key ].optional:
+ dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+ dataset.composite_files[ key ] = None
+ else:
+ temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, warnings = get_one_filename( group_incoming )
+ if temp_name:
+ dataset.composite_files[ key ] = Bunch( filename = temp_name, precreated_name = precreated_name, is_multi_byte = is_multi_byte, space_to_tab = space_to_tab, warnings = warnings, info = dataset_info, name = dataset_name )
+ dataset.register_temp_file( temp_name )
+ else:
+ dataset.composite_files[ key ] = None
+ if not writable_files[ key ].optional:
+ dataset.warnings.append( "A required composite file (%s) was not specified." % ( key ) )
+ return [ dataset ]
+ else:
+ rval = []
+ for temp_name, precreated_name, is_multi_byte, space_to_tab, dataset_name, dataset_info, in get_filenames( context[ self.name ][0] ):
+ dataset = UploadedDataset()
+ dataset.file_type = file_type
+ dataset.datatype = d_type
+ dataset.dbkey = dbkey
+ dataset.primary_file = temp_name
+ dataset.is_multi_byte = is_multi_byte
+ dataset.space_to_tab = space_to_tab
+ dataset.name = dataset_name
+ dataset.info = dataset_info
+ dataset.precreated_name = precreated_name
+ dataset.register_temp_file( temp_name )
+ rval.append( dataset )
+ return rval
+ def remove_temp_file( self, filename ):
+ try:
+ os.unlink( filename )
+ except Exception, e:
+ log.warning( str( e ) )
+
+
class Conditional( Group ):
type = "conditional"
def __init__( self ):
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/tools/parameters/validation.py
--- a/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/tools/parameters/validation.py Mon Jun 08 12:49:26 2009 -0400
@@ -72,14 +72,18 @@
"""
@classmethod
def from_element( cls, param, elem ):
- return cls( elem.get( 'message' ), elem.text )
- def __init__( self, message, expression ):
+ return cls( elem.get( 'message' ), elem.text, elem.get( 'substitute_value_in_message' ) )
+ def __init__( self, message, expression, substitute_value_in_message ):
self.message = message
+ self.substitute_value_in_message = substitute_value_in_message
# Save compiled expression, code objects are thread safe (right?)
- self.expression = compile( expression, '<string>', 'eval' )
+ self.expression = compile( expression, '<string>', 'eval' )
def validate( self, value, history=None ):
if not( eval( self.expression, dict( value=value ) ) ):
- raise ValueError( self.message )
+ message = self.message
+ if self.substitute_value_in_message:
+ message = message % value
+ raise ValueError( message )
class InRangeValidator( Validator ):
"""
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/util/__init__.py
--- a/lib/galaxy/util/__init__.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/util/__init__.py Mon Jun 08 12:49:26 2009 -0400
@@ -146,6 +146,7 @@
elif isinstance( value, list ):
return map(sanitize_text, value)
else:
+ print value
raise Exception, 'Unknown parameter type (%s)' % ( type( value ) )
class Params:
@@ -222,7 +223,7 @@
pass
if not value and not new_value:
new_value = tool.param_trans_dict[ key ][1]
- if key not in self.NEVER_SANITIZE and sanitize:
+ if sanitize and not ( key in self.NEVER_SANITIZE or True in [ key.endswith( "|%s" % nonsanitize_parameter ) for nonsanitize_parameter in self.NEVER_SANITIZE ] ): #sanitize check both ungrouped and grouped parameters by name
self.__dict__[ new_key ] = sanitize_param( new_value )
else:
self.__dict__[ new_key ] = new_value
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/controllers/tool_runner.py
--- a/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/web/controllers/tool_runner.py Mon Jun 08 12:49:26 2009 -0400
@@ -3,6 +3,8 @@
"""
from galaxy.web.base.controller import *
+from galaxy.util.bunch import Bunch
+from galaxy.tools import DefaultToolState
import logging
log = logging.getLogger( __name__ )
@@ -75,32 +77,51 @@
tool = self.get_toolbox().tools_by_id.get( tool_id, None )
if not tool:
return False # bad tool_id
- params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool )
+ #params = util.Params( kwd, sanitize=tool.options.sanitize, tool=tool )
+ if "tool_state" in kwd:
+ encoded_state = util.string_to_object( kwd["tool_state"] )
+ tool_state = DefaultToolState()
+ tool_state.decode( encoded_state, tool, trans.app )
+ else:
+ tool_state = tool.new_state( trans )
+ errors = tool.update_state( trans, tool.inputs, tool_state.inputs, kwd, update_only = True )
datasets = []
- if params.file_data not in [ None, "" ]:
- name = params.file_data
- if name.count('/'):
- name = name.rsplit('/',1)[1]
- if name.count('\\'):
- name = name.rsplit('\\',1)[1]
- datasets.append( create_dataset( name, trans.history ) )
- if params.url_paste not in [ None, "" ]:
- url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
- url = False
- for line in url_paste:
- line = line.rstrip( '\r\n' ).strip()
- if not line:
- continue
- elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ):
- url = True
- datasets.append( create_dataset( line, trans.history ) )
- else:
- if url:
- continue # non-url when we've already processed some urls
- else:
- # pasted data
- datasets.append( create_dataset( 'Pasted Entry', trans.history ) )
- break
+ dataset_upload_inputs = []
+ for input_name, input in tool.inputs.iteritems():
+ if input.type == "upload_dataset":
+ dataset_upload_inputs.append( input )
+ assert dataset_upload_inputs, Exception( "No dataset upload groups were found." )
+ for dataset_upload_input in dataset_upload_inputs:
+ d_type = dataset_upload_input.get_datatype( trans, kwd )
+
+ if d_type.composite_type is not None:
+ datasets.append( create_dataset( 'Uploaded Composite Dataset (%s)' % dataset_upload_input.get_datatype_ext( trans, kwd ), trans.history ) )
+ else:
+ params = Bunch( ** tool_state.inputs[dataset_upload_input.name][0] )
+ if params.file_data not in [ None, "" ]:
+ name = params.file_data
+ if name.count('/'):
+ name = name.rsplit('/',1)[1]
+ if name.count('\\'):
+ name = name.rsplit('\\',1)[1]
+ datasets.append( create_dataset( name, trans.history ) )
+ if params.url_paste not in [ None, "" ]:
+ url_paste = params.url_paste.replace( '\r', '' ).split( '\n' )
+ url = False
+ for line in url_paste:
+ line = line.rstrip( '\r\n' ).strip()
+ if not line:
+ continue
+ elif line.lower().startswith( 'http://' ) or line.lower().startswith( 'ftp://' ):
+ url = True
+ datasets.append( create_dataset( line, trans.history ) )
+ else:
+ if url:
+ continue # non-url when we've already processed some urls
+ else:
+ # pasted data
+ datasets.append( create_dataset( 'Pasted Entry', trans.history ) )
+ break
if datasets:
trans.model.flush()
return [ d.id for d in datasets ]
diff -r c0c50620b89d -r 73a8b43f1d97 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py Mon Jun 08 12:35:38 2009 -0400
+++ b/lib/galaxy/web/form_builder.py Mon Jun 08 12:49:26 2009 -0400
@@ -78,17 +78,21 @@
>>> print FileField( "foo" ).get_html()
<input type="file" name="foo">
- >>> print FileField( "foo", True ).get_html()
+ >>> print FileField( "foo", ajax = True ).get_html()
<input type="file" name="foo" galaxy-ajax-upload="true">
"""
- def __init__( self, name, ajax=False ):
+ def __init__( self, name, value = None, ajax=False ):
self.name = name
self.ajax = ajax
+ self.value = value
def get_html( self, prefix="" ):
+ value_text = ""
+ if self.value:
+ value_text = ' value="%s"' % self.value
+ ajax_text = ""
if self.ajax:
- return '<input type="file" name="%s%s" galaxy-ajax-upload="true">' % ( prefix, self.name )
- else:
- return '<input type="file" name="%s%s">' % ( prefix, self.name )
+ ajax_text = ' galaxy-ajax-upload="true"'
+ return '<input type="file" name="%s%s"%s%s>' % ( prefix, self.name, ajax_text, value_text )
class HiddenField(BaseField):
"""
@@ -120,7 +124,7 @@
>>> t.add_option( "automatic", 3 )
>>> t.add_option( "bazooty", 4, selected=True )
>>> print t.get_html()
- <select name="bar">
+ <select name="bar" last_selected_value="4">
<option value="3">automatic</option>
<option value="4" selected>bazooty</option>
</select>
@@ -140,7 +144,7 @@
<div><input type="checkbox" name="bar" value="3">automatic</div>
<div><input type="checkbox" name="bar" value="4" checked>bazooty</div>
"""
- def __init__( self, name, multiple=None, display=None, refresh_on_change=False ):
+ def __init__( self, name, multiple=None, display=None, refresh_on_change = False, refresh_on_change_values = [] ):
self.name = name
self.multiple = multiple or False
self.options = list()
@@ -152,8 +156,11 @@
raise Exception, "Unknown display type: %s" % display
self.display = display
self.refresh_on_change = refresh_on_change
+ self.refresh_on_change_values = refresh_on_change_values
if self.refresh_on_change:
self.refresh_on_change_text = ' refresh_on_change="true"'
+ if self.refresh_on_change_values:
+ self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
else:
self.refresh_on_change_text = ''
def add_option( self, text, value, selected = False ):
@@ -195,11 +202,17 @@
def get_html_default( self, prefix="" ):
if self.multiple: multiple = " multiple"
else: multiple = ""
- rval = [ '<select name="%s%s"%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text ) ]
+ rval = []
+ last_selected_value = ""
for text, value, selected in self.options:
- if selected: selected_text = " selected"
+ if selected:
+ selected_text = " selected"
+ last_selected_value = value
else: selected_text = ""
rval.append( '<option value="%s"%s>%s</option>' % ( value, selected_text, text ) )
+ if last_selected_value:
+ last_selected_value = ' last_selected_value="%s"' % last_selected_value
+ rval.insert( 0, '<select name="%s%s"%s%s%s>' % ( prefix, self.name, multiple, self.refresh_on_change_text, last_selected_value ) )
rval.append( '</select>' )
return "\n".join( rval )
@@ -253,7 +266,7 @@
</li>
</ul></div>
"""
- def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [] ):
+ def __init__( self, name, multiple=None, display=None, refresh_on_change=False, options = [], value = [], refresh_on_change_values = [] ):
self.name = name
self.multiple = multiple or False
self.options = options
@@ -270,8 +283,11 @@
raise Exception, "Unknown display type: %s" % display
self.display = display
self.refresh_on_change = refresh_on_change
+ self.refresh_on_change_values = refresh_on_change_values
if self.refresh_on_change:
self.refresh_on_change_text = ' refresh_on_change="true"'
+ if self.refresh_on_change_values:
+ self.refresh_on_change_text = '%s refresh_on_change_values="%s"' % ( self.refresh_on_change_text, ",".join( self.refresh_on_change_values ) )
else:
self.refresh_on_change_text = ''
def get_html( self, prefix="" ):
@@ -308,6 +324,7 @@
rval.append( '</ul></div>' )
return '\n'.join( rval )
+
def get_suite():
"""Get unittest suite for this module"""
import doctest, sys
diff -r c0c50620b89d -r 73a8b43f1d97 templates/base_panels.mako
--- a/templates/base_panels.mako Mon Jun 08 12:35:38 2009 -0400
+++ b/templates/base_panels.mako Mon Jun 08 12:49:26 2009 -0400
@@ -59,19 +59,19 @@
<script type="text/javascript" src="${h.url_for('/static/scripts/galaxy.panels.js')}"></script>
<script type="text/javascript">
- ensure_dd_helper();
+ ensure_dd_helper();
- %if self.has_left_panel:
+ %if self.has_left_panel:
var lp = make_left_panel( $("#left"), $("#center"), $("#left-border" ) );
force_left_panel = lp.force_panel;
%endif
- %if self.has_right_panel:
+ %if self.has_right_panel:
var rp = make_right_panel( $("#right"), $("#center"), $("#right-border" ) );
handle_minwidth_hint = rp.handle_minwidth_hint;
force_right_panel = rp.force_panel;
%endif
-
+
</script>
## Handle AJAX (actually hidden iframe) upload tool
<![if !IE]>
@@ -81,34 +81,36 @@
##$(this.contentDocument).find("input[galaxy-ajax-upload]").each( function() {
##$("iframe")[0].contentDocument.body.innerHTML = "HELLO"
##$(this.contentWindow.document).find("input[galaxy-ajax-upload]").each( function() {
- $(this).contents().find("input[galaxy-ajax-upload]").each( function() {
- var error_set = false;
- $(this).parents("form").submit( function() {
- // Make a synchronous request to create the datasets first
- var async_datasets;
- $.ajax( {
- async: false,
- type: "POST",
- url: "${h.url_for(controller='tool_runner', action='upload_async_create')}",
- data: $(this).formSerialize(),
- dataType: "json",
- success: function( d, s ) { async_datasets = d.join() }
- } );
- if (async_datasets == '') {
- if (! error_set) {
- $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' );
- error_set = true;
+ $(this).contents().find("form").each( function() {
+ if ( $(this).find("input[galaxy-ajax-upload]").length > 0 ){
+ $(this).submit( function() {
+ var error_set = false;
+ // Make a synchronous request to create the datasets first
+ var async_datasets;
+ $.ajax( {
+ async: false,
+ type: "POST",
+ url: "${h.url_for(controller='tool_runner', action='upload_async_create')}",
+ data: $(this).formSerialize(),
+ dataType: "json",
+ success: function( d, s ) { async_datasets = d.join() }
+ } );
+ if (async_datasets == '') {
+ if (! error_set) {
+ $("iframe#galaxy_main").contents().find("body").prepend( '<div class="errormessage">No data was entered in the upload form. You may choose to upload a file, paste some data directly in the data box, or enter URL(s) to fetch from.</div><p/>' );
+ error_set = true;
+ }
+ return false;
+ } else {
+ $(this).find("input[name=async_datasets]").val( async_datasets );
+ $(this).append("<input type='hidden' name='ajax_upload' value='true'>");
}
+ // iframe submit is required for nginx (otherwise the encoding is wrong)
+ $(this).ajaxSubmit( { iframe: true } );
+ $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
return false;
- } else {
- $(this).find("input[name=async_datasets]").val( async_datasets );
- $(this).append("<input type='hidden' name='ajax_upload' value='true'>");
- }
- // iframe submit is required for nginx (otherwise the encoding is wrong)
- $(this).ajaxSubmit( { iframe: true } );
- $("iframe#galaxy_main").attr("src","${h.url_for(controller='tool_runner', action='upload_async_message')}");
- return false;
- });
+ });
+ }
});
});
});
@@ -120,88 +122,88 @@
<%def name="masthead()">
<div class="title" style="float: left;">
- <a target="_blank" href="${app.config.wiki_url}">
- <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
- </a>
- Galaxy
- %if app.config.brand:
- <span class='brand'>/${app.config.brand}</span>
- %endif
+ <a target="_blank" href="${app.config.wiki_url}">
+ <img border="0" src="${h.url_for('/static/images/galaxyIcon_noText.png')}" style="width: 26px; vertical-align: top;">
+ </a>
+ Galaxy
+ %if app.config.brand:
+ <span class='brand'>/${app.config.brand}</span>
+ %endif
</div>
<div style="position: absolute; left: 50%;">
<div class="tab-group" style="position: relative; left: -50%;">
-
- <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )">
- <%
- cls = "tab"
- if extra_class:
- cls += " " + extra_class
- if self.active_view == id:
- cls += " active"
- style = ""
- if not visible:
- style = "display: none;"
- %>
- <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
- </%def>
- ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
+ <%def name="tab( id, display, href, target='_parent', visible=True, extra_class='' )">
+ <%
+ cls = "tab"
+ if extra_class:
+ cls += " " + extra_class
+ if self.active_view == id:
+ cls += " active"
+ style = ""
+ if not visible:
+ style = "display: none;"
+ %>
+ <span class="${cls}" style="${style}"><a target="${target}" href="${href}">${display}</a></span>
+ </%def>
+
+ ## ${tab( "tracks", "View Data", h.url_for( controller='tracks', action='dbkeys' ), target="galaxy_main")}
- ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
+ ${tab( "analysis", "Analyze Data", h.url_for( controller='root', action='index' ))}
- ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
+ ${tab( "workflow", "Workflow", h.url_for( controller='workflow', action='index' ))}
${tab( "libraries", "Libraries", h.url_for( controller='library', action='index' ))}
- ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
-
- <span class="tab">
- <a>Help</a>
- <div class="submenu">
- <ul>
- <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
- <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li>
- <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li>
- </ul>
- </div>
- </span>
+ ${tab( "admin", "Admin", h.url_for( controller='admin', action='index' ), extra_class="admin-only", visible=( trans.user and app.config.is_admin_user( trans.user ) ) )}
- <span class="tab">
- <a>User</a>
- <%
- if trans.user:
- user_email = trans.user.email
- style1 = "display: none;"
- style2 = "";
- else:
- user_email = ""
- style1 = ""
- style2 = "display: none;"
- %>
- <div class="submenu">
- <ul class="loggedout-only" style="${style1}">
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li>
- %if app.config.allow_user_creation:
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li>
- %endif
- </ul>
- <ul class="loggedin-only" style="${style2}">
- <li>Logged in as <span id="user-email">${user_email}</span></li>
- <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li>
- <%
- if app.config.require_login:
- logout_target = ""
- logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' )
- else:
- logout_target = "galaxy_main"
- logout_url = h.url_for( controller='user', action='logout' )
- %>
- <li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
- </ul>
- </div>
- </span>
-
+ <span class="tab">
+ <a>Help</a>
+ <div class="submenu">
+ <ul>
+ <li><a href="${app.config.get( "bugs_email", "mailto:galaxy-bugs@bx.psu.edu" )}">Email comments, bug reports, or suggestions</a></li>
+ <li><a target="_blank" href="${app.config.get( "wiki_url", "http://g2.trac.bx.psu.edu/" )}">Galaxy Wiki</a></li>
+ <li><a target="_blank" href="${app.config.get( "screencasts_url", "http://g2.trac.bx.psu.edu/wiki/ScreenCasts" )}">Video tutorials (screencasts)</a></li>
+ </ul>
+ </div>
+ </span>
+
+ <span class="tab">
+ <a>User</a>
+ <%
+ if trans.user:
+ user_email = trans.user.email
+ style1 = "display: none;"
+ style2 = "";
+ else:
+ user_email = ""
+ style1 = ""
+ style2 = "display: none;"
+ %>
+ <div class="submenu">
+ <ul class="loggedout-only" style="${style1}">
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='login' )}">Login</a></li>
+ %if app.config.allow_user_creation:
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='create' )}">Register</a></li>
+ %endif
+ </ul>
+ <ul class="loggedin-only" style="${style2}">
+ <li>Logged in as <span id="user-email">${user_email}</span></li>
+ <li><a target="galaxy_main" href="${h.url_for( controller='user', action='index' )}">Preferences</a></li>
+ <%
+ if app.config.require_login:
+ logout_target = ""
+ logout_url = h.url_for( controller='root', action='index', m_c='user', m_a='logout' )
+ else:
+ logout_target = "galaxy_main"
+ logout_url = h.url_for( controller='user', action='logout' )
+ %>
+ <li><a target="${logout_target}" href="${logout_url}">Logout</a></li>
+ </ul>
+ </div>
+ </span>
+
</div>
</div>
@@ -213,32 +215,32 @@
<div id="overlay"
%if not self.overlay_visible:
- style="display: none;"
+ style="display: none;"
%endif
>
- ##
- <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div>
-
- ## Need a table here for centering in IE6
- <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0"
- %if not self.overlay_visible:
- style="display: none;"
- %endif
- ><tr><td>
- <div class="dialog-box-wrapper">
- <div class="dialog-box">
- <div class="unified-panel-header">
- <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
- </div>
- <div class="body" style="max-height: 600px; overflow: auto;">${content}</div>
- <div>
- <div class="buttons" style="display: none; float: right;"></div>
- <div class="extra_buttons" style="display: none; padding: 5px;"></div>
- <div style="clear: both;"></div>
- </div>
- </div>
- </div>
- </td></tr></table>
+ ##
+ <div id="overlay-background" style="position: absolute; width: 100%; height: 100%;"></div>
+
+ ## Need a table here for centering in IE6
+ <table class="dialog-box-container" border="0" cellpadding="0" cellspacing="0"
+ %if not self.overlay_visible:
+ style="display: none;"
+ %endif
+ ><tr><td>
+ <div class="dialog-box-wrapper">
+ <div class="dialog-box">
+ <div class="unified-panel-header">
+ <div class="unified-panel-header-inner"><span class='title'>${title}</span></div>
+ </div>
+ <div class="body" style="max-height: 600px; overflow: auto;">${content}</div>
+ <div>
+ <div class="buttons" style="display: none; float: right;"></div>
+ <div class="extra_buttons" style="display: none; padding: 5px;"></div>
+ <div style="clear: both;"></div>
+ </div>
+ </div>
+ </div>
+ </td></tr></table>
</div>
</%def>
@@ -268,7 +270,7 @@
${self.message_box_content()}
%endif
</div>
- ${self.overlay()}
+ ${self.overlay()}
%if self.has_left_panel:
<div id="left">
${self.left_panel()}
diff -r c0c50620b89d -r 73a8b43f1d97 templates/tool_form.mako
--- a/templates/tool_form.mako Mon Jun 08 12:35:38 2009 -0400
+++ b/templates/tool_form.mako Mon Jun 08 12:49:26 2009 -0400
@@ -15,7 +15,39 @@
<script type="text/javascript">
$( function() {
$( "select[refresh_on_change='true']").change( function() {
- $( "#tool_form" ).submit();
+ var refresh = false;
+ var refresh_on_change_values = $( this )[0].attributes.getNamedItem( 'refresh_on_change_values' )
+ if ( refresh_on_change_values ) {
+ refresh_on_change_values = refresh_on_change_values.value.split( ',' );
+ var last_selected_value = $( this )[0].attributes.getNamedItem( 'last_selected_value' );
+ for( i= 0; i < refresh_on_change_values.length; i++ ) {
+ if ( $( this )[0].value == refresh_on_change_values[i] || ( last_selected_value && last_selected_value.value == refresh_on_change_values[i] ) ){
+ refresh = true;
+ break;
+ }
+ }
+ }
+ else {
+ refresh = true;
+ }
+ if ( refresh ){
+ $( ':file' ).each( function() {
+ var file_value = $( this )[0].value;
+ if ( file_value ) {
+ //disable file input, since we don't want to upload the file on refresh
+ var file_name = $( this )[0].name;
+ $( this )[0].name = 'replaced_file_input_' + file_name
+ $( this )[0].disable = true;
+ //create a new hidden field which stores the filename and has the original name of the file input
+ var new_file_input = document.createElement( 'input' );
+ new_file_input.type = 'hidden';
+ new_file_input.value = file_value;
+ new_file_input.name = file_name;
+ document.getElementById( 'tool_form' ).appendChild( new_file_input );
+ }
+ } );
+ $( "#tool_form" ).submit();
+ }
});
});
%if not add_frame.debug:
@@ -72,6 +104,38 @@
%>
${row_for_param( group_prefix, input.test_param, group_state, group_errors, other_values )}
${do_inputs( input.cases[current_case].inputs, group_state, group_errors, group_prefix, other_values )}
+ %elif input.type == "upload_dataset":
+ %if input.get_datatype( trans, other_values ).composite_type is None: #have non-composite upload appear as before
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][0]
+ else:
+ rep_errors = dict()
+ %>
+ ${do_inputs( input.inputs, tool_state[input.name][0], rep_errors, prefix + input.name + "_" + str( 0 ) + "|", other_values )}
+ %else:
+ <div class="repeat-group">
+ <div class="form-title-row"><b>${input.group_title( other_values )}</b></div>
+ <%
+ repeat_state = tool_state[input.name]
+ %>
+ %for i in range( len( repeat_state ) ):
+ <div class="repeat-group-item">
+ <%
+ if input.name in errors:
+ rep_errors = errors[input.name][i]
+ else:
+ rep_errors = dict()
+ index = repeat_state[i]['__index__']
+ %>
+ <div class="form-title-row"><b>File Contents for ${input.title_by_index( trans, i, other_values )}</b></div>
+ ${do_inputs( input.inputs, repeat_state[i], rep_errors, prefix + input.name + "_" + str(index) + "|", other_values )}
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_${index}_remove" value="Remove ${input.title} ${i+1}"></div>
+ </div>
+ %endfor
+ ##<div class="form-row"><input type="submit" name="${prefix}${input.name}_add" value="Add new ${input.title}"></div>
+ </div>
+ %endif
%else:
${row_for_param( prefix, input, tool_state, errors, other_values )}
%endif
@@ -127,7 +191,7 @@
<br/>
%endif
-<div class="toolForm" id="$tool.id">
+<div class="toolForm" id="${tool.id}">
%if tool.has_multiple_pages:
<div class="toolFormTitle">${tool.name} (step ${tool_state.page+1} of ${tool.npages})</div>
%else:
diff -r c0c50620b89d -r 73a8b43f1d97 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml Mon Jun 08 12:35:38 2009 -0400
+++ b/tools/data_source/upload.xml Mon Jun 08 12:49:26 2009 -0400
@@ -1,17 +1,11 @@
<?xml version="1.0"?>
-<tool name="Upload File" id="upload1">
+<tool name="Upload File" id="upload1" version="1.0.1">
<description>
from your computer
</description>
<action module="galaxy.tools.actions.upload" class="UploadToolAction"/>
<inputs>
- <param name="async_datasets" type="hidden" value="None"/>
- <param name="file_data" type="file" size="30" label="File" ajax-upload="true"/>
- <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
- <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand.">
- <option value="Yes">Yes</option>
- </param>
<param name="file_type" type="select" label="File Format" help="Which format? See help below">
<options from_parameter="tool.app.datatypes_registry.upload_file_formats" transform_lines="[ "%s%s%s" % ( line, self.separator, line ) for line in obj ]">
<column name="value" index="1"/>
@@ -20,6 +14,16 @@
<filter type="add_value" name="Auto-detect" value="auto" index="0"/>
</options>
</param>
+ <param name="async_datasets" type="hidden" value="None"/>
+ <upload_dataset name="files" title="Specify Files for Dataset" file_type_name="file_type">
+ <param name="file_data" type="file" size="30" label="File" ajax-upload="true">
+ <validator type="expression" message="You will need to reselect the file you specified (%s)." substitute_value_in_message="True">not ( ( isinstance( value, unicode ) or isinstance( value, str ) ) and value != "" )</validator> <!-- use validator to post message to user about needing to reselect the file, since most browsers won't accept the value attribute for file inputs -->
+ </param>
+ <param name="url_paste" type="text" area="true" size="5x35" label="URL/Text" help="Here you may specify a list of URLs (one per line) or paste the contents of a file."/>
+ <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand.">
+ <option value="Yes">Yes</option>
+ </param>
+ </upload_dataset>
<param name="dbkey" type="genomebuild" label="Genome" />
</inputs>
<help>
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/c0c50620b89d
changeset: 2432:c0c50620b89d
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Jun 08 12:35:38 2009 -0400
description:
Functional test fixes for purging a user.
2 file(s) affected in this change:
test/functional/test_history_functions.py
test/functional/test_security_and_libraries.py
diffs (215 lines):
diff -r 1a24a530a3ae -r c0c50620b89d test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Mon Jun 08 10:31:59 2009 -0400
+++ b/test/functional/test_history_functions.py Mon Jun 08 12:35:38 2009 -0400
@@ -114,11 +114,10 @@
self.login( email=regular_user1.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete history3_copy1
+ self.delete_history( id=str( history3_copy1.id ) )
self.logout()
self.login( email=admin_user.email )
- # Need to delete history3_copy1
- history3_copy1.deleted = True
- history3_copy1.flush()
# Test sharing a history with an invalid user
email = 'jack(a)jill.com'
check_str = '%s is not a valid Galaxy user.' % email
@@ -138,48 +137,55 @@
self.share_history( id, email, check_str )
# We need to keep track of all shared histories so they can later be deleted
history3_copy_name = "%s from %s" % ( history3.name, admin_user.email )
- history3_copies = galaxy.model.History \
+ history3_to_use_for_regular_user2 = galaxy.model.History \
.filter( and_( galaxy.model.History.table.c.name==history3_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history3_copy2 = history3_copies[0]
- history3_copy3 = history3_copies[1]
- history4_copy_name = "%s from %s" % ( history4.name, admin_user.email )
- history4_copyies = galaxy.model.History \
- .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ .first()
+ assert history3_to_use_for_regular_user2 is not None, "Problem retrieving history3_to_use_for_regular_user2 from database"
+ history3_to_use_for_regular_user3 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history3_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user3.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history4_copy1 = history4_copyies[0]
- history4_copy2 = history4_copyies[1]
+ .first()
+ assert history3_to_use_for_regular_user3 is not None, "Problem retrieving history3_to_use_for_regular_user3 from database"
+ history4_copy_name = "%s from %s" % ( history4.name, admin_user.email )
+ history4_to_use_for_regular_user2 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history4_to_use_for_regular_user2 is not None, "Problem retrieving history4_to_use_for_regular_user2 from database"
+ history4_to_use_for_regular_user3 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user3.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history4_to_use_for_regular_user3 is not None, "Problem retrieving history4_to_use_for_regular_user3 from database"
self.logout()
self.login( email=regular_user2.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history3_to_use_for_regular_user2.id ) )
+ self.delete_history( id=str( history4_to_use_for_regular_user2.id ) )
self.logout()
self.login( email=regular_user3.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history3_to_use_for_regular_user3.id ) )
+ self.delete_history( id=str( history4_to_use_for_regular_user3.id ) )
self.logout()
self.login( email=admin_user.email )
- # Need to delete the copied histories, so later test runs are valid
- history3_copy2.deleted = True
- history3_copy2.flush()
- history3_copy3.deleted = True
- history3_copy3.flush()
- history4_copy1.deleted = True
- history4_copy1.flush()
- history4_copy1.deleted = True
- history4_copy1.flush()
- history4_copy2.deleted = True
- history4_copy2.flush()
def test_030_change_permissions_on_current_history( self ):
"""Testing changing permissions on the current history"""
global history5
@@ -222,8 +228,7 @@
self.visit_url( "%s/history/list" % self.url )
self.check_page_for_string( history5_copy1.name )
# Need to delete history5_copy1 on the history list page for regular_user1
- history5_copy1.deleted = True
- history5_copy1.flush()
+ self.delete_history( id=str( history5_copy1.id ) )
self.logout()
self.login( email=admin_user.email )
def test_040_sharing_history_by_making_new_sharing_role( self ):
@@ -278,8 +283,7 @@
# Make sure 2.bed is accessible since it is associated with a sharing role
self.display_history_item( str( hda_2_bed.id ), check_str='chr1' )
# Need to delete history5_copy2 on the history list page for regular_user1
- history5_copy2.deleted = True
- history5_copy2.flush()
+ self.delete_history( id=str( history5_copy2.id ) )
def test_045_sharing_private_history_with_multiple_users_by_changing_no_permissions( self ):
"""Testing sharing a restricted history with multiple users, making no permission changes"""
self.logout()
@@ -301,47 +305,44 @@
action_check_str=action_check_str )
# We need to keep track of all shared histories so they can later be deleted
history5_copy_name = "%s from %s" % ( history5.name, admin_user.email )
- history5_copies = galaxy.model.History \
+ history5_to_use_for_regular_user1 = galaxy.model.History \
.filter( and_( galaxy.model.History.table.c.name==history5_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user1.id,
galaxy.model.History.table.c.deleted==False ) ) \
.order_by( desc( galaxy.model.History.table.c.create_time ) ) \
- .limit( 2 ) \
- .all()
- history5_copy3 = history5_copies[0]
- assert history5_copy3 is not None, "Problem retrieving history5_copy3 from database"
- history5_copy4 = history5_copies[1]
- assert history5_copy4 is not None, "Problem retrieving history5_copy4 from database"
- # Make sure test1(a)bx.psu.edu received a copy of history5 with both datasets accessible
- if history5_copy3.user_id == regular_user1.id:
- history_to_use_for_regular_user_1 = history5_copy3
- history_to_use_for_regular_user_2 = history5_copy4
- elif history5_copy4.user_id == regular_user1.id:
- history_to_use_for_regular_user_1 = history5_copy4
- history_to_use_for_regular_user_2 = history5_copy3
- else:
- raise AssertionError, "Copies of history5 were not correctly associated with users"
+ .first()
+ assert history5_to_use_for_regular_user1 is not None, "Problem retrieving history5_to_use_for_regular_user1 from database"
+ history5_to_use_for_regular_user2 = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history5_copy_name,
+ galaxy.model.History.table.c.user_id==regular_user2.id,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .first()
+ assert history5_to_use_for_regular_user2 is not None, "Problem retrieving history5_to_use_for_regular_user2 from database"
self.logout()
self.login( email=regular_user1.email )
check_str = '%s from %s' % ( history5.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history_to_use_for_regular_user_1.id ), name=history_to_use_for_regular_user_1.name )
+ self.switch_history( id=str( history5_to_use_for_regular_user1.id ), name=history5_to_use_for_regular_user1.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
+ # Need to delete the copied histories, so later test runs are valid
+ self.delete_history( id=str( history5_to_use_for_regular_user1.id ) )
self.logout()
# Make sure test2(a)bx.psu.edu received a copy of history5, with only 1.bed accessible
self.login( email=regular_user2.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history_to_use_for_regular_user_2.id ), name=history_to_use_for_regular_user_2.name )
+ self.switch_history( id=str( history5_to_use_for_regular_user2.id ), name=history5_to_use_for_regular_user2.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
hda_1_bed = galaxy.model.HistoryDatasetAssociation \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_to_use_for_regular_user1.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
.first()
assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
hda_2_bed = galaxy.model.HistoryDatasetAssociation \
- .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_to_use_for_regular_user1.id,
galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
.first()
assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
@@ -355,10 +356,7 @@
pass
self.check_history_for_string( 'You do not have permission to view this dataset' )
# Need to delete the copied histories, so later test runs are valid
- history5_copy3.deleted = True
- history5_copy3.flush()
- history5_copy4.deleted = True
- history5_copy4.flush()
+ self.delete_history( id=str( history5_to_use_for_regular_user2.id ) )
def test_050_sharing_private_history_by_choosing_to_not_share( self ):
"""Testing sharing a restricted history with multiple users by choosing not to share"""
self.logout()
diff -r 1a24a530a3ae -r c0c50620b89d test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Mon Jun 08 10:31:59 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Mon Jun 08 12:35:38 2009 -0400
@@ -1668,16 +1668,8 @@
history.refresh()
if not history.deleted:
raise AssertionError( 'User %s has active history id %d after their account was marked as purged.' % ( regular_user3.email, hda.id ) )
- # Make sure HistoryDatasetAssociation deleted
- for hda in history.datasets:
- hda.refresh()
- if not hda.deleted:
- raise AssertionError( 'HistoryDatasetAssociation id %d was not deleted.' % hda.id )
- # Make sure Dataset deleted
- d = galaxy.model.Dataset.filter( galaxy.model.Dataset.table.c.id==hda.dataset_id ).first()
- d.refresh()
- if not d.deleted:
- raise AssertionError( 'Dataset id %d was not deleted.' % d.id )
+ # NOTE: Not all hdas / datasets will be deleted at the time a history is deleted - the cleanup_datasets.py script
+ # is responsible for this.
# Make sure UserGroupAssociations deleted
if regular_user3.groups:
raise AssertionError( 'User %s has active group id %d after their account was marked as purged.' % ( regular_user3.email, uga.id ) )
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/3190f7d6b572
changeset: 2435:3190f7d6b572
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Mon Jun 08 16:52:55 2009 -0400
description:
Update the way the testing framework decides whether a refresh_on_change is required.
1 file(s) affected in this change:
test/base/twilltestcase.py
diffs (30 lines):
diff -r 73d8b2acef0a -r 3190f7d6b572 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon Jun 08 16:47:01 2009 -0400
+++ b/test/base/twilltestcase.py Mon Jun 08 16:52:55 2009 -0400
@@ -548,22 +548,11 @@
# Check for refresh_on_change attribute, submit a change if required
if 'refresh_on_change' in control.attrs.keys():
changed = False
- for elem in kwd[control.name]:
- # For DataToolParameter, control.value is the index of the DataToolParameter select list,
- # but elem is the filename. The following loop gets the filename of that index.
- param_text = ''
- for param in tc.show().split('<select'):
- param = ('<select' + param.split('select>')[0] + 'select>').replace('selected', 'selected="yes"')
- if param.find('on_chang') != -1 and param.find('name="%s"' % control.name) != -1:
- tree = ElementTree.fromstring(param)
- for option in tree.findall('option'):
- if option.get('value') in control.value:
- param_text = option.text.strip()
- break
- break
- if elem not in control.value and param_text.find(elem) == -1 :
+ item_labels = [ item.attrs[ 'label' ] for item in control.get_items() if item.selected ] #For DataToolParameter, control.value is the HDA id, but kwd contains the filename. This loop gets the filename/label for the selected values.
+ for value in kwd[ control.name ]:
+ if value not in control.value and True not in [ value in item_label for item_label in item_labels ]:
changed = True
- break
+ break
if changed:
# Clear Control and set to proper value
control.clear()
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/73d8b2acef0a
changeset: 2434:73d8b2acef0a
user: James Taylor <james(a)jamestaylor.org>
date: Mon Jun 08 16:47:01 2009 -0400
description:
Allow UnvalidatedValues to be used when generating output labels (fixes issue #59)
1 file(s) affected in this change:
lib/galaxy/tools/parameters/basic.py
diffs (38 lines):
diff -r 73a8b43f1d97 -r 73d8b2acef0a lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py Mon Jun 08 12:49:26 2009 -0400
+++ b/lib/galaxy/tools/parameters/basic.py Mon Jun 08 16:47:01 2009 -0400
@@ -535,9 +535,10 @@
def from_html( self, value, trans=None, context={} ):
if self.need_late_validation( trans, context ):
if self.multiple:
- #While it is generally allowed that a select value can be '',
- #we do not allow this to be the case in a dynamically generated multiple select list being set in workflow building mode
- #we instead treat '' as 'No option Selected' (None)
+ # While it is generally allowed that a select value can be '',
+ # we do not allow this to be the case in a dynamically
+ # generated multiple select list being set in workflow building
+ # mode we instead treat '' as 'No option Selected' (None)
if value == '':
value = None
else:
@@ -565,9 +566,9 @@
if isinstance( value, list ):
if not(self.repeat):
assert self.multiple, "Multiple values provided but parameter is not expecting multiple values"
- return self.separator.join( value )
+ return self.separator.join( map( str, value ) )
else:
- return value
+ return str(value)
def value_to_basic( self, value, app ):
if isinstance( value, UnvalidatedValue ):
return { "__class__": "UnvalidatedValue", "value": value.value }
@@ -1331,6 +1332,8 @@
"""
def __init__( self, value ):
self.value = value
+ def __str__( self ):
+ return str( self.value )
class RuntimeValue( object ):
"""
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/73847f425801
changeset: 2427:73847f425801
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Jun 01 10:27:04 2009 -0400
description:
Add EuPathDb data source tool config.
1 file(s) affected in this change:
tools/data_source/eupathdb.xml
diffs (36 lines):
diff -r e50551db6e60 -r 73847f425801 tools/data_source/eupathdb.xml
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tools/data_source/eupathdb.xml Mon Jun 01 10:27:04 2009 -0400
@@ -0,0 +1,32 @@
+<!--
+ If the value of 'URL_method' is 'get', the request will consist of the value of 'URL' coming back in
+ the initial response. If value of 'URL_method' is 'post', any additional params coming back in the
+ initial response ( in addition to 'URL' ) will be encoded and appended to URL and a post will be performed.
+-->
+<tool name="EuPathDB" id="eupathdb" tool_type="data_source" url_method="post">
+ <description>server</description>
+ <command interpreter="python">data_source.py $output</command>
+ <inputs action="http://galaxy.eupathdb.org/eupathdb.galaxy/queries_tools.jsp" check_values="false" method="post">
+ <display>go to EuPathDB server $GALAXY_URL</display>
+ <param name="GALAXY_URL" type="baseurl" value="/tool_runner?tool_id=eupathdb" />
+ </inputs>
+ <request_param_translation>
+ <request_param galaxy_name="URL" remote_name="URL" missing="">
+ <add_to_url>
+ <param_from_source name="dbkey" missing="?" />
+ <param_from_source name="wdk_history_id" missing="" />
+ <param_from_source name="wdkReportFormat" missing="tabular" />
+ <param_from_source name="selectedFields" missing="" />
+ <param_from_source name="includeHeader" missing="yes" />
+ <param_from_source name="downloadType" missing="plain" />
+ </add_to_url>
+ </request_param>
+ <request_param galaxy_name="format" remote_name="wdkReportFormat" missing="tabular" />
+ <request_param galaxy_name="dbkey" remote_name="dbkey" missing="?" />
+ </request_param_translation>
+ <uihints minwidth="800"/>
+ <outputs>
+ <data name="output" format="txt" />
+ </outputs>
+ <options sanitize="False" refresh="True"/>
+</tool>
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/8ac9be4bd716
changeset: 2429:8ac9be4bd716
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Jun 05 11:33:19 2009 -0400
description:
Revert upload.py - didn't mean to commit it.
1 file(s) affected in this change:
lib/galaxy/tools/actions/upload.py
diffs (28 lines):
diff -r 9a71b89082fe -r 8ac9be4bd716 lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Fri Jun 05 11:15:25 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Fri Jun 05 11:33:19 2009 -0400
@@ -245,14 +245,12 @@
parts = file_name.split( "." )
if len( parts ) > 1:
ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' or ext == 'novoindex' ):
+ if not( ext == 'ab1' or ext == 'scf' ):
raise BadFileException( "you attempted to upload an inappropriate file." )
if ext == 'ab1' and file_type != 'ab1':
raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
elif ext == 'scf' and file_type != 'scf':
raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
- elif ext == 'novoindex' and file_type != 'novoindex':
- raise BadFileException( "you must manually set the 'File Format' to 'NovoIndex' when uploading novoindex files." )
data_type = 'binary'
if not data_type:
# We must have a text file
@@ -338,7 +336,7 @@
return ( False, False, None )
zip_file = zipfile.ZipFile( temp_name, "r" )
# Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf, or .txt files
+ # 1. Archives can only include .ab1, .scf or .txt files
# 2. All file extensions within an archive must be the same
name = zip_file.namelist()[0]
test_ext = name.split( "." )[1].strip().lower()
1
0
details: http://www.bx.psu.edu/hg/galaxy/rev/373962edbe90
changeset: 2430:373962edbe90
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Jun 05 16:48:03 2009 -0400
description:
Functional test fixes.
2 file(s) affected in this change:
test/functional/test_history_functions.py
test/functional/test_security_and_libraries.py
diffs (173 lines):
diff -r 8ac9be4bd716 -r 373962edbe90 test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Fri Jun 05 11:33:19 2009 -0400
+++ b/test/functional/test_history_functions.py Fri Jun 05 16:48:03 2009 -0400
@@ -24,9 +24,9 @@
global regular_user3
regular_user3 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test3(a)bx.psu.edu' ).first()
assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
- self.logout()
def test_005_deleting_histories( self ):
"""Testing deleting histories"""
+ self.logout()
self.login( email='test(a)bx.psu.edu' )
global admin_user
admin_user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
@@ -104,20 +104,18 @@
check_str = "You can't send histories to yourself."
self.share_history( str( history3.id ), 'test(a)bx.psu.edu', check_str )
# Share a history with 1 valid user
- name = history3.name
- email = 'test1(a)bx.psu.edu'
- check_str = 'Histories (%s) have been shared with: %s' % ( name, email )
- self.share_history( str( history3.id ), email, check_str )
+ check_str = 'Histories (%s) have been shared with: %s' % ( history3.name, regular_user1.email )
+ self.share_history( str( history3.id ), regular_user1.email, check_str )
# We need to keep track of all shared histories so they can later be deleted
global history3_copy1
history3_copy1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
assert history3_copy1 is not None, "Problem retrieving history3_copy1 from database"
self.logout()
- self.login( email='test1(a)bx.psu.edu' )
- check_str = '%s from test(a)bx.psu.edu' % history3.name
+ self.login( email=regular_user1.email )
+ check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
self.logout()
- self.login( email='test(a)bx.psu.edu' )
+ self.login( email=admin_user.email )
# Need to delete history3_copy1
history3_copy1.deleted = True
history3_copy1.flush()
@@ -135,7 +133,7 @@
self.upload_file( '2.bed', dbkey='hg18' )
id = '%s,%s' % ( str( history3.id ), str( history4.id ) )
name = '%s,%s' % ( history3.name, history4.name )
- email = 'test2@bx.psu.edu,test3@bx.psu.edu'
+ email = '%s,%s' % ( regular_user2.email, regular_user3.email )
check_str = 'Histories (%s) have been shared with: %s' % ( name, email )
self.share_history( id, email, check_str )
# We need to keep track of all shared histories so they can later be deleted
@@ -158,19 +156,19 @@
history4_copy1 = history4_copyies[0]
history4_copy2 = history4_copyies[1]
self.logout()
- self.login( email='test2(a)bx.psu.edu' )
+ self.login( email=regular_user2.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
self.logout()
- self.login( email='test3(a)bx.psu.edu' )
+ self.login( email=regular_user3.email )
check_str = '%s from %s' % ( history3.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
check_str = '%s from %s' % ( history4.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
self.logout()
- self.login( email='test(a)bx.psu.edu' )
+ self.login( email=admin_user.email )
# Need to delete the copied histories, so later test runs are valid
history3_copy2.deleted = True
history3_copy2.flush()
@@ -213,6 +211,7 @@
raise AssertionError, "The 'access' permission is not set for history5_dataset1.actions"
def test_035_sharing_history_by_making_datasets_public( self ):
"""Testing sharing a restricted history by making the datasets public"""
+ # We're still logged in as admin_user.email
check_str = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
action_check_str = 'Histories (%s) have been shared with: %s' % ( history5.name, regular_user1.email )
self.share_history( str( history5.id ), regular_user1.email, check_str, action='public', action_check_str=action_check_str )
@@ -248,7 +247,13 @@
global sharing_role
role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user1.email )
sharing_role = galaxy.model.Role.filter( galaxy.model.Role.table.c.name==role_name ).first()
- assert sharing_role is not None, "Problem retrieving sharing_role from the database"
+ if not sharing_role:
+ # May have created a sharing role in a previous functional test suite from the opposite direction.
+ role_name = 'Sharing role for: %s, %s' % ( regular_user1.email, admin_user.email )
+ sharing_role = galaxy.model.Role.filter( and_( galaxy.model.Role.table.c.type==role_type,
+ galaxy.model.Role.table.c.name==role_name ) ).first()
+ if not sharing_role:
+ raise AssertionError( "Privately sharing a dataset did not properly create a sharing role" )
self.logout()
self.login( email=regular_user1.email )
self.visit_url( "%s/history/list" % self.url )
@@ -275,10 +280,10 @@
# Need to delete history5_copy2 on the history list page for regular_user1
history5_copy2.deleted = True
history5_copy2.flush()
+ def test_045_sharing_private_history_with_multiple_users_by_changing_no_permissions( self ):
+ """Testing sharing a restricted history with multiple users, making no permission changes"""
self.logout()
self.login( email=admin_user.email )
- def test_045_sharing_private_history_with_multiple_users_by_changing_no_permissions( self ):
- """Testing sharing a restricted history with multiple users, making no permission changes"""
# History5 can be shared with any user, since it contains a public dataset. However, only
# regular_user1 should be able to access history5's 2.bed dataset since it is associated with a
# sharing role, and regular_user2 should be able to access history5's 1.bed, but not 2.bed even
@@ -307,17 +312,26 @@
history5_copy4 = history5_copies[1]
assert history5_copy4 is not None, "Problem retrieving history5_copy4 from database"
# Make sure test1(a)bx.psu.edu received a copy of history5 with both datasets accessible
+ if history5_copy3.user_id == regular_user1.id:
+ history_to_use_for_regular_user_1 = history5_copy3
+ history_to_use_for_regular_user_2 = history5_copy4
+ elif history5_copy4.user_id == regular_user1.id:
+ history_to_use_for_regular_user_1 = history5_copy4
+ history_to_use_for_regular_user_2 = history5_copy3
+ else:
+ raise AssertionError, "Copies of history5 were not correctly associated with users"
+ self.logout()
self.login( email=regular_user1.email )
check_str = '%s from %s' % ( history5.name, admin_user.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history5_copy3.id ), name=history5_copy3.name )
+ self.switch_history( id=str( history_to_use_for_regular_user_1.id ), name=history_to_use_for_regular_user_1.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
self.logout()
# Make sure test2(a)bx.psu.edu received a copy of history5, with only 1.bed accessible
self.login( email=regular_user2.email )
self.view_stored_active_histories( check_str=check_str )
- self.switch_history( id=str( history5_copy4.id ), name=history5_copy4.name )
+ self.switch_history( id=str( history_to_use_for_regular_user_2.id ), name=history_to_use_for_regular_user_2.name )
self.check_history_for_string( '1.bed' )
self.check_history_for_string( '2.bed' )
# Get both new hdas from the db that were created for the shared history
@@ -340,18 +354,15 @@
except:
pass
self.check_history_for_string( 'You do not have permission to view this dataset' )
- self.logout()
- self.login( email='test(a)bx.psu.edu' )
# Need to delete the copied histories, so later test runs are valid
history5_copy3.deleted = True
history5_copy3.flush()
history5_copy4.deleted = True
history5_copy4.flush()
-
-
-
def test_050_sharing_private_history_by_choosing_to_not_share( self ):
"""Testing sharing a restricted history with multiple users by choosing not to share"""
+ self.logout()
+ self.login( email=admin_user.email )
self.switch_history( id=str( history5.id ), name=history5.name )
email = '%s,%s' % ( regular_user1.email, regular_user2.email )
check_str = 'The following datasets can be shared with %s with no changes' % email
diff -r 8ac9be4bd716 -r 373962edbe90 test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Fri Jun 05 11:33:19 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Fri Jun 05 16:48:03 2009 -0400
@@ -170,6 +170,11 @@
global sharing_role
sharing_role = galaxy.model.Role.filter( and_( galaxy.model.Role.table.c.type==role_type,
galaxy.model.Role.table.c.name==role_name ) ).first()
+ if not sharing_role:
+ # May have created a sharing role in a previous functional test suite from the opposite direction.
+ role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user1.email )
+ sharing_role = galaxy.model.Role.filter( and_( galaxy.model.Role.table.c.type==role_type,
+ galaxy.model.Role.table.c.name==role_name ) ).first()
if not sharing_role:
raise AssertionError( "Privately sharing a dataset did not properly create a sharing role" )
if len( sharing_role.users ) != 2:
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/9a71b89082fe
changeset: 2428:9a71b89082fe
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Fri Jun 05 11:15:25 2009 -0400
description:
Add ability to share histories with multiple users, along with bug fixes and more functional test coverage for history features.
12 file(s) affected in this change:
lib/galaxy/security/__init__.py
lib/galaxy/tools/actions/upload.py
lib/galaxy/web/controllers/history.py
lib/galaxy/web/controllers/root.py
lib/galaxy/web/framework/__init__.py
scripts/cleanup_datasets/cleanup_datasets.py
templates/history/rename.mako
templates/history/share.mako
test/base/twilltestcase.py
test/functional/test_history_functions.py
test/functional/test_security_and_libraries.py
tool_conf.xml.sample
diffs (1698 lines):
diff -r 73847f425801 -r 9a71b89082fe lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py Mon Jun 01 10:27:04 2009 -0400
+++ b/lib/galaxy/security/__init__.py Fri Jun 05 11:15:25 2009 -0400
@@ -51,6 +51,8 @@
def set_dataset_permission( self, dataset, permission ):
raise "Unimplemented Method"
def set_all_library_permissions( self, dataset, permissions ):
+ raise "Unimplemented Method"
+ def dataset_is_public( self, dataset ):
raise "Unimplemented Method"
def make_dataset_public( self, dataset ):
raise "Unimplemented Method"
@@ -296,6 +298,10 @@
# Add the new specific permission on the dataset
for dp in [ self.model.DatasetPermissions( action, dataset, role ) for role in roles ]:
dp.flush()
+ def dataset_is_public( self, dataset ):
+ # A dataset is considered public if there are no "access" actions associated with it. Any
+ # other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
+ return self.permitted_actions.DATASET_ACCESS.action not in [ a.action for a in dataset.actions ]
def make_dataset_public( self, dataset ):
# A dataset is considered public if there are no "access" actions associated with it. Any
# other actions ( 'manage permissions', 'edit metadata' ) are irrelevant.
diff -r 73847f425801 -r 9a71b89082fe lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py Mon Jun 01 10:27:04 2009 -0400
+++ b/lib/galaxy/tools/actions/upload.py Fri Jun 05 11:15:25 2009 -0400
@@ -245,12 +245,14 @@
parts = file_name.split( "." )
if len( parts ) > 1:
ext = parts[1].strip().lower()
- if not( ext == 'ab1' or ext == 'scf' ):
+ if not( ext == 'ab1' or ext == 'scf' or ext == 'novoindex' ):
raise BadFileException( "you attempted to upload an inappropriate file." )
if ext == 'ab1' and file_type != 'ab1':
raise BadFileException( "you must manually set the 'File Format' to 'Ab1' when uploading ab1 files." )
elif ext == 'scf' and file_type != 'scf':
raise BadFileException( "you must manually set the 'File Format' to 'Scf' when uploading scf files." )
+ elif ext == 'novoindex' and file_type != 'novoindex':
+ raise BadFileException( "you must manually set the 'File Format' to 'NovoIndex' when uploading novoindex files." )
data_type = 'binary'
if not data_type:
# We must have a text file
@@ -336,7 +338,7 @@
return ( False, False, None )
zip_file = zipfile.ZipFile( temp_name, "r" )
# Make sure the archive consists of valid files. The current rules are:
- # 1. Archives can only include .ab1, .scf or .txt files
+ # 1. Archives can only include .ab1, .scf, or .txt files
# 2. All file extensions within an archive must be the same
name = zip_file.namelist()[0]
test_ext = name.split( "." )[1].strip().lower()
diff -r 73847f425801 -r 9a71b89082fe lib/galaxy/web/controllers/history.py
--- a/lib/galaxy/web/controllers/history.py Mon Jun 01 10:27:04 2009 -0400
+++ b/lib/galaxy/web/controllers/history.py Fri Jun 05 11:15:25 2009 -0400
@@ -1,6 +1,7 @@
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
-import webhelpers
+from galaxy import util
+import webhelpers, logging
from datetime import datetime
from cgi import escape
@@ -65,7 +66,6 @@
@web.expose
def index( self, trans ):
return ""
-
@web.expose
def list_as_xml( self, trans ):
"""
@@ -78,9 +78,7 @@
@web.expose
@web.require_login( "work with multiple histories" )
def list( self, trans, **kwargs ):
- """
- List all available histories
- """
+ """List all available histories"""
status = message = None
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
@@ -92,9 +90,7 @@
status, message = None, None
refresh_history = False
# Load the histories and ensure they all belong to the current user
- history_ids = kwargs.get( 'id', [] )
- if type( history_ids ) is not list:
- history_ids = [ history_ids ]
+ history_ids = util.listify( kwargs.get( 'id', [] ) )
histories = []
for hid in history_ids:
history = model.History.get( hid )
@@ -117,17 +113,13 @@
trans.sa_session.flush()
# Render the list view
return self.list_grid( trans, status=status, message=message, **kwargs )
-
def _list_delete( self, trans, histories ):
"""Delete histories"""
n_deleted = 0
deleted_current = False
for history in histories:
if not history.deleted:
- # Delete DefaultHistoryPermissions
- for dhp in history.default_permissions:
- dhp.delete()
- dhp.flush()
+ # We'll not eliminate any DefaultHistoryPermissions in case we undelete the history later
# Mark history as deleted in db
history.deleted = True
# If deleting the current history, make a new current.
@@ -144,7 +136,6 @@
message_parts.append( "Your active history was deleted, a new empty history is now active.")
status = INFO
return ( status, " ".join( message_parts ) )
-
def _list_undelete( self, trans, histories ):
"""Undelete histories"""
n_undeleted = 0
@@ -154,6 +145,15 @@
n_already_purged += 1
if history.deleted:
history.deleted = False
+ if not history.default_permissions:
+ # For backward compatibility - for a while we were deleting all DefaultHistoryPermissions on
+ # the history when we deleted the history. We are no longer doing this.
+ # Need to add default DefaultHistoryPermissions since they were deleted when the history was deleted
+ default_action = trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS
+ private_user_role = trans.app.security_agent.get_private_user_role( history.user )
+ default_permissions = {}
+ default_permissions[ default_action ] = [ private_user_role ]
+ trans.app.security_agent.history_set_default_permissions( history, default_permissions )
n_undeleted += 1
trans.log_event( "History id %d marked as undeleted" % history.id )
status = SUCCESS
@@ -164,7 +164,6 @@
message_parts.append( "%d have already been purged and cannot be undeleted." % n_already_purged )
status = WARNING
return status, "".join( message_parts )
-
def _list_switch( self, trans, histories ):
"""Switch to a new different history"""
new_history = histories[0]
@@ -179,13 +178,9 @@
trans.log_event( "History switched to id: %s, name: '%s'" % (str(new_history.id), new_history.name ) )
# No message
return None, None
-
@web.expose
def delete_current( self, trans ):
- """
- Delete just the active history -- this does not require a logged
- in user.
- """
+ """Delete just the active history -- this does not require a logged in user."""
history = trans.get_history()
if not history.deleted:
history.deleted = True
@@ -195,7 +190,6 @@
# history active
trans.new_history()
return trans.show_ok_message( "History deleted, a new history is active" )
-
@web.expose
def rename_async( self, trans, id=None, new_name=None ):
history = model.History.get( id )
@@ -209,11 +203,9 @@
# Rename
history.name = new_name
trans.sa_session.flush()
-
- ## These have been moved from 'root' but not cleaned up
-
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
+ # TODO clean this up and make sure functionally correct
msg = ""
user = trans.get_user()
user_history = trans.get_history()
@@ -262,97 +254,159 @@
Warning! If you import this history, you will lose your current
history. Click <a href="%s">here</a> to confirm.
""" % web.url_for( id=id, confirm=True ) )
-
@web.expose
@web.require_login( "share histories with other users" )
def share( self, trans, id=None, email="", **kwd ):
- send_to_err = ""
- if not id:
- id = trans.get_history().id
- if not isinstance( id, list ):
- id = [ id ]
- histories = []
- history_names = []
- for hid in id:
- histories.append( trans.app.model.History.get( hid ) )
- history_names.append(histories[-1].name)
- if not email:
- return trans.fill_template("/history/share.mako", histories=histories, email=email, send_to_err=send_to_err)
- user = trans.get_user()
- send_to_user = trans.app.model.User.filter( trans.app.model.User.table.c.email==email ).first()
+ # If a history contains both datasets that can be shared and others that cannot be shared with the desired user,
+ # then the entire history is shared, and the protected datasets will be visible, but inaccessible ( greyed out )
+ # in the shared history
params = util.Params( kwd )
action = params.get( 'action', None )
if action == "no_share":
- trans.response.send_redirect( url_for( action='history_options' ) )
- if not send_to_user:
- send_to_err = "No such user"
- elif user.email == email:
- send_to_err = "You can't send histories to yourself"
- else:
- if 'history_share_btn' in kwd or action != 'share':
- # The user is attempting to share a history whose datasets cannot all be accessed by the other user. In this case,
- # the user sharing the history can chose to make the datasets public ( action == 'public' ) if he has the authority
- # to do so, or automatically create a new "sharing role" that allows the user to share his private datasets only with the
- # desired user ( action == 'private' ).
- can_change = {}
- cannot_change = {}
+ trans.response.send_redirect( url_for( controller='root', action='history_options' ) )
+ if not id:
+ id = trans.get_history().id
+ id = util.listify( id )
+ send_to_err = ""
+ histories = []
+ for hid in id:
+ histories.append( trans.app.model.History.get( hid ) )
+ if not email:
+ return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
+ user = trans.get_user()
+ send_to_users = []
+ for email_address in util.listify( email ):
+ email_address = email_address.strip()
+ if email_address:
+ if email_address == user.email:
+ send_to_err += "You can't send histories to yourself. "
+ else:
+ send_to_user = trans.app.model.User.filter( trans.app.model.User.table.c.email==email_address ).first()
+ if send_to_user:
+ send_to_users.append( send_to_user )
+ else:
+ send_to_err += "%s is not a valid Galaxy user. " % email_address
+ if not send_to_users:
+ if not send_to_err:
+ send_to_err += "%s is not a valid Galaxy user. " % email
+ return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
+ if params.get( 'share_proceed_button', False ) and action == 'share':
+ # We need to filter out all histories that cannot be shared
+ filtered_histories = {}
+ for history in histories:
+ for send_to_user in send_to_users:
+ # Only deal with datasets that have not been purged
+ for hda in history.activatable_datasets:
+ # The history can be shared if it contains at least 1 public dataset or 1 dataset that the
+ # other user can access. Inaccessible datasets contained in the history will be displayed
+ # in the shared history, but "greyed out", so they cannot be viewed or used.
+ if trans.app.security_agent.dataset_is_public( hda.dataset ) or \
+ trans.app.security_agent.allow_action( send_to_user,
+ trans.app.security_agent.permitted_actions.DATASET_ACCESS,
+ dataset=hda ):
+ if send_to_user in filtered_histories:
+ filtered_histories[ send_to_user ].append( history )
+ else:
+ filtered_histories[ send_to_user ] = [ history ]
+ break
+ return self._share_histories( trans, user, send_to_users, send_to_err, filtered_histories=filtered_histories )
+ elif params.get( 'history_share_btn', False ) or action != 'share':
+ # The user is attempting to share histories whose datasets cannot all be accessed by other users. In this case,
+ # the user sharing the histories can:
+ # 1) action=='public': chose to make the datasets public if he is permitted to do so
+ # 2) action=='private': automatically create a new "sharing role" allowing protected
+ # datasets to be accessed only by the desired users
+ # 3) action=='share': share only what can be shared when no permissions are changed - this case is handled above
+ # 4) action=='no_share': Do not share anything - this case is handled above.
+ can_change = {}
+ cannot_change = {}
+ no_change_needed = {}
+ for history in histories:
+ # Only deal with datasets that have not been purged
+ for hda in history.activatable_datasets:
+ if trans.app.security_agent.dataset_is_public( hda.dataset ):
+ if history not in no_change_needed:
+ no_change_needed[ history ] = [ hda ]
+ else:
+ no_change_needed[ history ].append( hda )
+ elif not trans.app.security_agent.allow_action( send_to_user,
+ trans.app.security_agent.permitted_actions.DATASET_ACCESS,
+ dataset=hda ):
+ # The user with which we are sharing the history does not have access permission on the current dataset
+ if trans.app.security_agent.allow_action( user,
+ trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
+ dataset=hda ) and not hda.dataset.library_associations:
+ # The current user has authority to change permissions on the current dataset because
+ # they have permission to manage permissions on the dataset and the dataset is not associated
+ # with a library.
+ if action == "private":
+ trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
+ elif action == "public":
+ trans.app.security_agent.make_dataset_public( hda.dataset )
+ elif history not in can_change:
+ # Build the set of histories / datasets on which the current user has authority
+ # to "manage permissions". This is used in /history/share.mako
+ can_change[ history ] = [ hda ]
+ else:
+ can_change[ history ].append( hda )
+ else:
+ if action in [ "private", "public" ]:
+ # Don't change stuff that the user doesn't have permission to change
+ continue
+ elif history not in cannot_change:
+ # Build the set of histories / datasets on which the current user does
+ # not have authority to "manage permissions". This is used in /history/share.mako
+ cannot_change[ history ] = [ hda ]
+ else:
+ cannot_change[ history ].append( hda )
+ if can_change or cannot_change:
+ return trans.fill_template( "/history/share.mako",
+ histories=histories,
+ email=email,
+ send_to_err=send_to_err,
+ can_change=can_change,
+ cannot_change=cannot_change,
+ no_change_needed=no_change_needed )
+ return self._share_histories( trans, user, send_to_users, send_to_err, histories=histories )
+ return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
+ def _share_histories( self, trans, user, send_to_users, send_to_err, histories=[], filtered_histories={} ):
+ msg = ""
+ if not send_to_users:
+ msg = "No users have been specified with which to share histories"
+ sent_to_emails = []
+ for sent_to_user in send_to_users:
+ sent_to_emails.append( sent_to_user.email )
+ emails = ",".join( e for e in sent_to_emails )
+ if not histories and not filtered_histories:
+ msg = "No histories can be sent to (%s) without changing dataset permissions associating a sharing role with them" % emails
+ elif histories:
+ history_names = []
+ for history in histories:
+ history_names.append( history.name )
+ for send_to_user in send_to_users:
+ new_history = history.copy( target_user=send_to_user )
+ new_history.name = history.name + " from " + user.email
+ new_history.user_id = send_to_user.id
+ self.app.model.flush()
+ msg = "Histories (%s) have been shared with: %s. " % ( ",".join( history_names ), emails )
+ elif filtered_histories:
+ # filtered_histories is a dictionary like: { user: [ history, history ], user: [ history ] }
+ for send_to_user, histories in filtered_histories.items():
+ history_names = []
for history in histories:
- for hda in history.activatable_datasets:
- # Only deal with datasets that have not been purged
- if not trans.app.security_agent.allow_action( send_to_user,
- trans.app.security_agent.permitted_actions.DATASET_ACCESS,
- dataset=hda ):
- # The user with which we are sharing the history does not have access permission on the current dataset
- if trans.app.security_agent.allow_action( user,
- trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS,
- dataset=hda ) and not hda.dataset.library_associations:
- # The current user has authority to change permissions on the current dataset because
- # they have permission to manage permissions on the dataset and the dataset is not associated
- # with a library.
- if action == "private":
- trans.app.security_agent.privately_share_dataset( hda.dataset, users=[ user, send_to_user ] )
- elif action == "public":
- trans.app.security_agent.make_dataset_public( hda.dataset )
- elif history not in can_change:
- # Build the set of histories / datasets on which the current user has authority
- # to "manage permissions". This is used in /history/share.mako
- can_change[ history ] = [ hda ]
- else:
- can_change[ history ].append( hda )
- else:
- if action in [ "private", "public" ]:
- # Don't change stuff that the user doesn't have permission to change
- continue
- elif history not in cannot_change:
- # Build the set of histories / datasets on which the current user does
- # not have authority to "manage permissions". This is used in /history/share.mako
- cannot_change[ history ] = [ hda ]
- else:
- cannot_change[ history ].append( hda )
- if can_change or cannot_change:
- return trans.fill_template( "/history/share.mako",
- histories=histories,
- email=email,
- send_to_err=send_to_err,
- can_change=can_change,
- cannot_change=cannot_change )
- for history in histories:
- new_history = history.copy( target_user=send_to_user )
- new_history.name = history.name + " from " + user.email
- new_history.user_id = send_to_user.id
- trans.log_event( "History share, id: %s, name: '%s': to new id: %s" % ( str( history.id ), history.name, str( new_history.id ) ) )
- self.app.model.flush()
- return trans.show_message( "History (%s) has been shared with: %s" % ( ",".join( history_names ),email ) )
- return trans.fill_template( "/history/share.mako", histories=histories, email=email, send_to_err=send_to_err )
-
+ history_names.append( history.name )
+ new_history = history.copy( target_user=send_to_user )
+ new_history.name = history.name + " from " + user.email
+ new_history.user_id = send_to_user.id
+ self.app.model.flush()
+ msg += "Histories (%s) have been shared with: %s. " % ( ",".join( history_names ), send_to_user.email )
+ if send_to_err:
+ msg += send_to_err
+ return trans.show_message( msg )
@web.expose
@web.require_login( "rename histories" )
def rename( self, trans, id=None, name=None, **kwd ):
- if trans.app.memory_usage:
- # Keep track of memory usage
- m0 = self.app.memory_usage.memory()
user = trans.get_user()
-
if not isinstance( id, list ):
if id != None:
id = [ id ]
@@ -387,7 +441,4 @@
change_msg = change_msg + "<p>You must specify a valid name for History: "+cur_names[i]+"</p>"
else:
change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>"
- if self.app.memory_usage:
- m1 = trans.app.memory_usage.memory( m0, pretty=True )
- log.info( "End of root/history_rename, memory used increased by %s" % m1 )
- return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
\ No newline at end of file
+ return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
diff -r 73847f425801 -r 9a71b89082fe lib/galaxy/web/controllers/root.py
--- a/lib/galaxy/web/controllers/root.py Mon Jun 01 10:27:04 2009 -0400
+++ b/lib/galaxy/web/controllers/root.py Fri Jun 05 11:15:25 2009 -0400
@@ -349,14 +349,12 @@
"""Displays a list of history related actions"""
return trans.fill_template( "/history/options.mako",
user = trans.get_user(), history = trans.get_history() )
-
@web.expose
def history_delete( self, trans, id ):
"""
Backward compatibility with check_galaxy script.
"""
return trans.webapp.controllers['history'].list( trans, id, operation='delete' )
-
@web.expose
def clear_history( self, trans ):
"""Clears the history for a user"""
@@ -367,7 +365,6 @@
self.app.model.flush()
trans.log_event( "History id %s cleared" % (str(history.id)) )
trans.response.send_redirect( url_for("/index" ) )
-
@web.expose
def history_import( self, trans, id=None, confirm=False, **kwd ):
msg = ""
@@ -417,13 +414,11 @@
Warning! If you import this history, you will lose your current
history. Click <a href="%s">here</a> to confirm.
""" % web.url_for( id=id, confirm=True ) )
-
@web.expose
- def history_new( self, trans ):
- trans.new_history()
+ def history_new( self, trans, name=None ):
+ trans.new_history( name=name )
trans.log_event( "Created new History, id: %s." % str(trans.get_history().id) )
return trans.show_message( "New history created", refresh_frames = ['history'] )
-
@web.expose
def history_add_to( self, trans, history_id=None, file_data=None, name="Data Added to History",info=None,ext="txt",dbkey="?",copy_access_from=None,**kwd ):
"""Adds a POSTed file to a History"""
@@ -455,13 +450,22 @@
except Exception, e:
trans.log_event( "Failed to add dataset to history: %s" % ( e ) )
return trans.show_error_message("Adding File to History has Failed")
-
@web.expose
- def history_set_default_permissions( self, trans, **kwd ):
- """Sets the user's default permissions for the current history"""
+ def history_set_default_permissions( self, trans, id=None, **kwd ):
+ """Sets the permissions on a history"""
if trans.user:
if 'update_roles_button' in kwd:
- history = trans.get_history()
+ history = None
+ if id:
+ try:
+ id = int( id )
+ except:
+ id = None
+ if id:
+ history = trans.app.model.History.get( id )
+ if not history:
+ # If we haven't retrieved a history, use the current one
+ history = trans.get_history()
p = util.Params( kwd )
permissions = {}
for k, v in trans.app.model.Dataset.permitted_actions.items():
@@ -478,7 +482,6 @@
else:
#user not logged in, history group must be only public
return trans.show_error_message( "You must be logged in to change a history's default permissions." )
-
@web.expose
def dataset_make_primary( self, trans, id=None):
"""Copies a dataset and makes primary"""
diff -r 73847f425801 -r 9a71b89082fe lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py Mon Jun 01 10:27:04 2009 -0400
+++ b/lib/galaxy/web/framework/__init__.py Fri Jun 05 11:15:25 2009 -0400
@@ -432,13 +432,15 @@
self.galaxy_session.current_history = history
self.sa_session.flush( [ self.galaxy_session ] )
history = property( get_history, set_history )
- def new_history( self ):
+ def new_history( self, name=None ):
"""
Create a new history and associate it with the current session and
its associated user (if set).
"""
# Create new history
history = self.app.model.History()
+ if name:
+ history.name = name
# Associate with session
history.add_galaxy_session( self.galaxy_session )
# Make it the session's current history
diff -r 73847f425801 -r 9a71b89082fe scripts/cleanup_datasets/cleanup_datasets.py
--- a/scripts/cleanup_datasets/cleanup_datasets.py Mon Jun 01 10:27:04 2009 -0400
+++ b/scripts/cleanup_datasets/cleanup_datasets.py Fri Jun 05 11:15:25 2009 -0400
@@ -123,6 +123,11 @@
for dataset_assoc in history.datasets:
_purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
if not info_only:
+ # TODO: should the Delete DefaultHistoryPermissions be deleted here? This was incorrectly
+ # done in the _list_delete() method of the history controller, so copied it here. Not sure
+ # if we should ever delete info like this from the db though, so commented out for now...
+ #for dhp in history.default_permissions:
+ # dhp.delete()
history.purged = True
print "%d" % history.id
history_count += 1
@@ -226,7 +231,6 @@
print "# This Dataset (%i) is not deletable, associated Metadata Files will not be removed.\n" % ( dataset.id )
else:
# Mark all associated MetadataFiles as deleted and purged and remove them from disk
- print "The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
metadata_files = []
#lets create a list of metadata files, then perform actions on them
for hda in dataset.history_associations:
@@ -236,6 +240,7 @@
for metadata_file in app.model.MetadataFile.filter( app.model.MetadataFile.table.c.lda_id==lda.id ).all():
metadata_files.append( metadata_file )
for metadata_file in metadata_files:
+ print "# The following metadata files attached to associations of Dataset '%s' have been purged:" % dataset.id
if not info_only:
if remove_from_disk:
try:
@@ -248,7 +253,6 @@
print "%s" % metadata_file.file_name
print
dataset.deleted = True
- #dataset.flush()
app.model.flush()
def _purge_dataset( dataset, remove_from_disk, info_only = False ):
@@ -259,6 +263,7 @@
if not info_only:
# Remove files from disk and update the database
if remove_from_disk:
+ # TODO: should permissions on the dataset be deleted here?
os.unlink( dataset.file_name )
# Remove associated extra files from disk if they exist
if dataset.extra_files_path and os.path.exists( dataset.extra_files_path ):
@@ -286,6 +291,7 @@
for sub_folder in folder.folders:
_purge_folder( sub_folder, app, remove_from_disk, info_only = info_only )
if not info_only:
+ # TODO: should the folder permissions be deleted here?
folder.purged = True
folder.flush()
diff -r 73847f425801 -r 9a71b89082fe templates/history/rename.mako
--- a/templates/history/rename.mako Mon Jun 01 10:27:04 2009 -0400
+++ b/templates/history/rename.mako Fri Jun 05 11:15:25 2009 -0400
@@ -4,17 +4,38 @@
<div class="toolForm">
<div class="toolFormTitle">${_('Rename History')}</div>
- <div class="toolFormBody">
- <form action="${h.url_for( controller='history', action='rename' )}" method="post" >
- <table>
- <tr><th>${_('Current Name')}</th><th>${_('New Name')}</th></tr>
- %for history in histories:
- <tr><td>${history.name}<input type="hidden" name="id" value="${history.id}"></td><td><input type="text" name="name" value="${history.name}" size="40"></td></tr>
- %endfor
- <tr><td colspan="2"><input type="submit" name="history_rename_btn" value="${_('Rename Histories')}"></td></tr>
- </table>
- </form>
- </div>
+ <div class="toolFormBody">
+ <form action="${h.url_for( controller='history', action='rename' )}" method="post" >
+ <table class="grid">
+ %for history in histories:
+ <tr>
+ <td>
+ <div class="form-row">
+ <input type="hidden" name="id" value="${history.id}">
+ <label>${_('Current Name')}</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ ${history.name}
+ </div>
+ </div>
+ </td>
+ <td>
+ <div class="form-row">
+ <label>${_('New Name')}</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="text" name="name" value="${history.name}" size="40">
+ </div>
+ </div>
+ </td>
+ </tr>
+ %endfor
+ <tr>
+ <td colspan="2">
+ <div class="form-row">
+ <input type="submit" name="history_rename_btn" value="${_('Rename Histories')}">
+ </div>
+ </td>
+ </tr>
+ </table>
+ </form>
+ </div>
</div>
-</body>
-</html>
\ No newline at end of file
diff -r 73847f425801 -r 9a71b89082fe templates/history/share.mako
--- a/templates/history/share.mako Mon Jun 01 10:27:04 2009 -0400
+++ b/templates/history/share.mako Fri Jun 05 11:15:25 2009 -0400
@@ -2,117 +2,173 @@
<%inherit file="/base.mako"/>
<%def name="title()">Share histories</%def>
-%if not can_change and not cannot_change:
- <div class="toolForm">
- <div class="toolFormTitle">${_('Share histories')}</div>
- <table>
+<div class="toolForm">
+ <div class="toolFormTitle">Share ${len( histories)} histories</div>
+ <div class="toolFormBody">
+ %if not can_change and not cannot_change:
<form action="${h.url_for( controller="history", action='share' )}" method="post" >
- <tr><th>${_('History Name:')}</td><th>${_('Number of Datasets:')}</th><th>${_('Share Link')}</th></tr>
%for history in histories:
- <tr>
- <td align="center">${history.name}<input type="hidden" name="id" value="${history.id}"></td>
- <td align="center">
- %if len( history.datasets ) < 1:
- <div class="warningmark">${_('This history contains no data.')}</div>
- %else:
- ${len(history.datasets)}
- %endif
- </td>
- <td align="center"><a href="${h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id) )}">${_('copy link to share')}</a></td>
- </tr>
+ <div class="toolForm">
+ <div class="form-row">
+ <label>${_('History Name:')}</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ ${history.name}<input type="hidden" name="id" value="${history.id}">
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>${_('Number of Datasets:')}</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ %if len( history.datasets ) < 1:
+ <div class="warningmark">${_('This history contains no data.')}</div>
+ %else:
+ ${len(history.datasets)}
+ %endif
+ </td>
+ </div>
+ </div>
+ ## TODO: this feature is not currently working
+ ##<div style="clear: both"></div>
+ ##<div class="form-row">
+ ## <label>${_('Share Link')}</label>
+ ## <div style="float: left; width: 250px; margin-right: 10px;">
+ ## <a href="${h.url_for( controller='history', action='imp', id=trans.security.encode_id(history.id) )}">${_('copy link to share')}</a>
+ ## </div>
+ ##</div>
+ ##<div style="clear: both"></div>
+ <p/>
+ </div>
%endfor
- <tr><td>${_('Email of User to share with:')}</td><td><input type="text" name="email" value="${email}" size="40"></td></tr>
+ <p/>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Galaxy user emails with which to share histories</label>
+ <div style="float: left; width: 250px; margin-right: 10px;">
+ <input type="text" name="email" value="${email}" size="40">
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Enter a Galaxy user email address or a comma-separated list of addresses if sharing with multiple users
+ </div>
+ </div>
%if send_to_err:
- <tr><td colspan="100%"><div class="errormessage">${send_to_err}</div></td></tr>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <div class="errormessage">${send_to_err}</div>
+ </div>
%endif
- <tr><td colspan="2" align="right"><input type="submit" name="history_share_btn" value="Submit"></td></tr>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <input type="submit" name="history_share_btn" value="Submit">
+ </div>
</form>
- </table>
+ %else:
+ <form action="${h.url_for( controller='history', action='share' )}" method="post">
+ %for history in histories:
+ <input type="hidden" name="id" value="${history.id}">
+ %endfor
+ <input type="hidden" name="email" value="${email}">
+ %if no_change_needed:
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <div class="donemessage">
+ The following datasets can be shared with ${email} with no changes
+ </div>
+ </div>
+ %for history, hdas in no_change_needed.items():
+ <div class="form-row">
+ <label>History</label>
+ ${history.name}
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Datasets</label>
+ </div>
+ %for hda in hdas:
+ <div class="form-row">
+ ${hda.name}
+ </div>
+ %endfor
+ %endfor
+ %endif
+ %if can_change:
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <div class="warningmessage">
+ The following datasets can be shared with ${email} by updating their permissions
+ </div>
+ </div>
+ %for history, hdas in can_change.items():
+ <div class="form-row">
+ <label>History</label>
+ ${history.name}
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Datasets</label>
+ </div>
+ %for hda in hdas:
+ <div class="form-row">
+ ${hda.name}
+ </div>
+ %endfor
+ %endfor
+ %endif
+ %if cannot_change:
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <div class="errormessage">
+ The following datasets cannot be shared with ${email} because you are not authorized to
+ change the permissions on them
+ </div>
+ </div>
+ %for history, hdas in cannot_change.items():
+ <div class="form-row">
+ <label>History</label>
+ ${history.name}
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Datasets</label>
+ </div>
+ %for hda in hdas:
+ <div class="form-row">
+ ${hda.name}
+ </div>
+ %endfor
+ %endfor
+ %endif
+ <div class="form-row">
+ <label>How would you like to proceed?</label>
+ </div>
+ %if can_change:
+ <div class="form-row">
+ <input type="radio" name="action" value="public"> Make datasets public so anyone can access them
+ %if cannot_change:
+ (where possible)
+ %endif
+ </div>
+ <div class="form-row">
+ <input type="radio" name="action" value="private"> Make datasets private to me and the user(s) with whom I am sharing
+ %if cannot_change:
+ (where possible)
+ %endif
+ </div>
+ %endif
+ %if no_change_needed:
+ <div class="form-row">
+ <input type="radio" name="action" value="share"> Share anyway
+ %if can_change:
+ (don't change any permissions)
+ %endif
+ </div>
+ %endif
+ <div class="form-row">
+ <input type="radio" name="action" value="no_share"> Don't share
+ </div>
+ <div class="form-row">
+ <input type="submit" name="share_proceed_button" value="Go"><br/>
+ </div>
+ </form>
+ %endif
</div>
-%else:
- <style type="text/css">
- th
- {
- text-align: left;
- }
- td
- {
- vertical-align: top;
- }
- </style>
- <form action="${h.url_for( controller='history', action='share' )}" method="post">
- %for history in histories:
- <input type="hidden" name="id" value="${history.id}">
- %endfor
- <input type="hidden" name="email" value="${email}">
- <div class="warningmessage">
- The history or histories you've chosen to share contain datasets that the user with which you're sharing does not have permission to access.
- These datasets are shown below. Datasets that the user has permission to access are not shown.
- </div>
- <p/>
- %if can_change:
- <div class="donemessage">
- The following datasets can be shared with ${email} by updating their permissions:
- <p/>
- <table cellpadding="0" cellspacing="8" border="0">
- <tr><th>Histories</th><th>Datasets</th></tr>
- %for history, datasets in can_change.items():
- <tr>
- <td>${history.name}</td>
- <td>
- %for dataset in datasets:
- ${dataset.name}<br/>
- %endfor
- </td>
- </tr>
- %endfor
- </table>
- </div>
- <p/>
- %endif
- %if cannot_change:
- <div class="errormessage">
- The following datasets cannot be shared with ${email} because you are not authorized to change the permissions on them.
- <p/>
- <table cellpadding="0" cellspacing="8" border="0">
- <tr><th>Histories</th><th>Datasets</th></tr>
- %for history, datasets in cannot_change.items():
- <tr>
- <td>${history.name}</td>
- <td>
- %for dataset in datasets:
- ${dataset.name}<br/>
- %endfor
- </td>
- </tr>
- %endfor
- </table>
- </div>
- <p/>
- %endif
- <div>
- <b>How would you like to proceed?</b>
- <p/>
- %if can_change:
- <input type="radio" name="action" value="public"> Set datasets above to public access
- %if cannot_change:
- (where possible)
- %endif
- <br/>
- <input type="radio" name="action" value="private"> Set datasets above to private access for me and the user(s) with whom I am sharing
- %if cannot_change:
- (where possible)
- %endif
- <br/>
- %endif
- <input type="radio" name="action" value="share"> Share anyway
- %if can_change:
- (don't change any permissions)
- %endif
- <br/>
- <input type="radio" name="action" value="no_share"> Don't share<br/>
- <br/>
- <input type="submit" name="submit" value="Ok"><br/>
- </div>
- </form>
-%endif
+</div>
diff -r 73847f425801 -r 9a71b89082fe test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Mon Jun 01 10:27:04 2009 -0400
+++ b/test/base/twilltestcase.py Fri Jun 05 11:15:25 2009 -0400
@@ -103,41 +103,42 @@
page = self.last_page()
if page.find( 'error' ) > -1:
raise AssertionError('Errors in the history for user %s' % self.user )
-
def check_history_for_string( self, patt ):
"""Looks for 'string' in history page"""
self.home()
self.visit_page( "history" )
for subpatt in patt.split():
tc.find(subpatt)
-
+ self.home()
def clear_history( self ):
"""Empties a history of all datasets"""
self.visit_page( "clear_history" )
self.check_history_for_string( 'Your history is empty' )
-
- def delete_history( self, id=None ):
- """Deletes a history"""
+ self.home()
+ def delete_history( self, id='' ):
+ """Deletes one or more histories"""
history_list = self.get_histories()
self.assertTrue( history_list )
- if id is None:
+ num_deleted = 1
+ if not id:
history = history_list[0]
id = history.get( 'id' )
- id = str( id )
- self.visit_page( "history/list?operation=delete&id=%s" %(id) )
-
+ else:
+ num_deleted = len( id.split( ',' ) )
+ self.visit_page( "history/list?operation=delete&id=%s" % ( id ) )
+ check_str = 'Deleted %d histories' % num_deleted
+ self.check_page_for_string( check_str )
+ self.home()
def get_histories( self ):
"""Returns all histories"""
tree = self.histories_as_xml_tree()
data_list = [ elem for elem in tree.findall("data") ]
return data_list
-
def get_history( self, show_deleted=False ):
"""Returns a history"""
tree = self.history_as_xml_tree( show_deleted=show_deleted )
data_list = [ elem for elem in tree.findall("data") ]
return data_list
-
def history_as_xml_tree( self, show_deleted=False ):
"""Returns a parsed xml object of a history"""
self.home()
@@ -145,7 +146,6 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
-
def histories_as_xml_tree( self ):
"""Returns a parsed xml object of all histories"""
self.home()
@@ -153,97 +153,96 @@
xml = self.last_page()
tree = ElementTree.fromstring(xml)
return tree
-
- def history_options( self ):
+ def history_options( self, check_str='', upload=False ):
"""Mimics user clicking on history options link"""
self.visit_page( "history_options" )
-
- def new_history( self ):
+ if check_str:
+ self.check_page_for_string( check_str )
+ else:
+ self.check_page_for_string( 'Rename</a> current history' )
+ self.check_page_for_string( 'List</a> previously stored histories' )
+ self.check_page_for_string( 'Construct workflow</a> from the current history' )
+ self.check_page_for_string( 'Share</a> current history' )
+ # Tests for changing default history permissions are done in test_security_and_libraries.py
+ self.check_page_for_string( 'Change default permissions</a> for the current history' )
+ self.check_page_for_string( 'Show deleted</a> datasets in history' )
+ self.check_page_for_string( 'Delete</a> current history' )
+ # Need to add a history item in order to create a new empty history
+ try:
+ self.check_page_for_string( 'Create</a> a new empty history' )
+ raise AssertionError, "Incorrectly able to create a new empty history when the current history is empty."
+ except:
+ pass
+ if upload:
+ self.upload_file( '1.bed', dbkey='hg18' )
+ self.home()
+ self.visit_page( "history_options" )
+ self.check_page_for_string( 'Create</a> a new empty history' )
+ self.home()
+ def new_history( self, name=None ):
"""Creates a new, empty history"""
- self.visit_page( "history_new" )
+ if name:
+ self.visit_url( "%s/history_new?name=%s" % ( self.url, str( name ) ) )
+ else:
+ self.visit_url( "%s/history_new" % self.url )
self.check_history_for_string('Your history is empty')
-
- def rename_history( self, id=None, name='NewTestHistory' ):
+ self.home()
+ def rename_history( self, id, old_name, new_name ):
"""Rename an existing history"""
- history_list = self.get_histories()
- self.assertTrue( history_list )
- if id is None: # take last id
- elem = history_list[-1]
- else:
- i = history_list.index( id )
- self.assertTrue( i )
- elem = history_list[i]
- id = elem.get( 'id' )
- self.assertTrue( id )
- old_name = elem.get( 'name' )
- self.assertTrue( old_name )
- id = str( id )
- self.visit_page( "history/rename?id=%s&name=%s" %(id, name) )
- return id, old_name, name
-
+ self.home()
+ self.visit_page( "history/rename?id=%s&name=%s" %( id, new_name ) )
+ check_str = 'History: %s renamed to: %s' % ( old_name, new_name )
+ self.check_page_for_string( check_str )
+ self.home()
def set_history( self ):
"""Sets the history (stores the cookies for this run)"""
if self.history_id:
self.visit_page( "history?id=%s" % self.history_id )
else:
self.new_history()
- def share_history( self, id=None, email='test2(a)bx.psu.edu' ):
- """Share a history with a different user"""
- history_list = self.get_histories()
- self.assertTrue( history_list )
- if id is None: # take last id
- elem = history_list[-1]
- else:
- i = history_list.index( id )
- self.assertTrue( i )
- elem = history_list[i]
- id = elem.get( 'id' )
- self.assertTrue( id )
- id = str( id )
- name = elem.get( 'name' )
- self.assertTrue( name )
+ self.home()
+ def share_history( self, id, email, check_str, check_str2='', action=None, action_check_str=None ):
+ """Share a history different users"""
self.visit_url( "%s/history/share?id=%s&email=%s&history_share_btn=Submit" % ( self.url, id, email ) )
- self.check_page_for_string( 'History (%s) has been shared with: %s' % ( name, email ) )
- return id, name, email
- def share_history_containing_private_datasets( self, history_id, email='test(a)bx.psu.edu' ):
- """Attempt to share a history containing private datasets with a different user"""
- self.visit_url( "%s/history/share?id=%s&email=%s&history_share_btn=Submit" % ( self.url, history_id, email ) )
- self.last_page()
- self.check_page_for_string( "The history or histories you've chosen to share contain datasets" )
- self.check_page_for_string( "How would you like to proceed?" )
+ self.check_page_for_string( check_str )
+ if check_str2:
+ self.check_page_for_string( check_str2 )
+ if action:
+ # If we have an action, then we are sharing datasets with users that do not have access permissions on them
+ tc.fv( '1', 'action', action )
+ tc.submit( "share_proceed_button" )
+ if action_check_str:
+ self.check_page_for_string( action_check_str )
self.home()
- def make_datasets_public( self, history_id, email='test(a)bx.psu.edu' ):
- """Make private datasets public in order to share a history with a different user"""
- self.visit_url( "%s/history/share?id=%s&email=%s&action=public&submit=Ok" % ( self.url, history_id, email ) )
- self.last_page()
- check_str = "History (Unnamed history) has been shared with: %s" % email
- self.check_page_for_string( check_str )
- self.home()
- def privately_share_dataset( self, history_id, email='test(a)bx.psu.edu' ):
- """Make private datasets public in order to share a history with a different user"""
- self.visit_url( "%s/history/share?id=%s&email=%s&action=private&submit=Ok" % ( self.url, history_id, email ) )
- self.last_page()
- check_str = "History (Unnamed history) has been shared with: %s" % email
- self.check_page_for_string( check_str )
- self.home()
- def switch_history( self, hid=None ):
+ def switch_history( self, id='', name='' ):
"""Switches to a history in the current list of histories"""
data_list = self.get_histories()
self.assertTrue( data_list )
- if hid is None: # take last hid
- elem = data_list[-1]
- hid = elem.get('hid')
- if hid < 0:
- hid = len(data_list) + hid + 1
- hid = str(hid)
- elems = [ elem for elem in data_list if elem.get('hid') == hid ]
- self.assertEqual(len(elems), 1)
- self.visit_page( "history/list?operation=switch&id=%s" % elems[0].get('id') )
-
- def view_stored_histories( self, check_str='' ):
+ if not id:
+ history = history_list[0]
+ id = history.get( 'id' )
+ self.visit_url( "%s/history/list?operation=switch&id=%s" % ( self.url, id ) )
+ if name:
+ self.check_history_for_string( name )
+ self.home()
+ def view_stored_active_histories( self, check_str='' ):
self.visit_page( "history/list" )
+ self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( '<input type="checkbox" name="id" value=' )
+ self.check_page_for_string( 'operation=Rename&id' )
+ self.check_page_for_string( 'operation=Switch&id' )
+ self.check_page_for_string( 'operation=Delete&id' )
if check_str:
self.check_page_for_string( check_str )
+ self.home()
+ def view_stored_deleted_histories( self, check_str='' ):
+ self.visit_page( "history/list?f-deleted=True" )
+ self.check_page_for_string( 'Stored histories' )
+ self.check_page_for_string( '<input type="checkbox" name="id" value=' )
+ self.check_page_for_string( 'operation=Undelete&id' )
+ if check_str:
+ self.check_page_for_string( check_str )
+ self.home()
# Functions associated with datasets (history items) and meta data
def get_job_stderr( self, id ):
@@ -299,9 +298,14 @@
self.visit_url( "%s/dataset/undelete?id=%s" % ( self.url, elems[0].get( 'id' ) ) )
if check_str:
self.check_page_for_string( check_str )
+ def display_history_item( self, id, check_str='' ):
+ """Displays a history item - simulates eye icon click"""
+ self.visit_url( '%s/datasets/%s/display/index' % ( self.url, id ) )
+ if check_str:
+ self.check_page_for_string( check_str )
+ self.home()
def edit_metadata( self, hid=None, form_no=0, **kwd ):
- """
- Edits the metadata associated with a history item."""
+ """Edits the metadata associated with a history item."""
# There are currently 4 forms on the edit page:
# 0. name="edit_attributes"
# 1. name="auto_detect"
@@ -324,7 +328,6 @@
button = "change" #Change data type form
if kwd:
self.submit_form( form_no=form_no, button=button, **kwd)
-
def get_dataset_ids_in_history( self ):
"""Returns the ids of datasets in a history"""
data_list = self.get_history()
diff -r 73847f425801 -r 9a71b89082fe test/functional/test_history_functions.py
--- a/test/functional/test_history_functions.py Mon Jun 01 10:27:04 2009 -0400
+++ b/test/functional/test_history_functions.py Fri Jun 05 11:15:25 2009 -0400
@@ -4,74 +4,367 @@
class TestHistory( TwillTestCase ):
- def test_00_history_options_when_not_logged_in( self ):
+ def test_000_history_options_when_not_logged_in( self ):
"""Testing history options when not logged in"""
- self.logout() #Ensure we are not logged in
- self.history_options()
- self.check_page_for_string( 'logged in</a> to store or switch histories.' )
- self.login( email='test2(a)bx.psu.edu' ) #Just to make sure we have created this account since it is used to share histories
self.logout()
- def test_05_new_history_then_delete( self ):
- """Testing creating a new history and then deleting it"""
- self.login()
- self.new_history()
- if len(self.get_history()) > 0:
- raise AssertionError("test_new_history_then_delete failed")
- self.delete_history()
- self.check_page_for_string( 'Deleted 1 histories' )
- def test_10_history_options_when_logged_in( self ):
+ check_str = 'logged in</a> to store or switch histories.'
+ self.history_options( check_str=check_str )
+ # Make sure we have created the following accounts
+ self.login( email='test1(a)bx.psu.edu' )
+ global regular_user1
+ regular_user1 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ).first()
+ assert regular_user1 is not None, 'Problem retrieving user with email "test1(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ global regular_user2
+ regular_user2 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test2(a)bx.psu.edu' ).first()
+ assert regular_user2 is not None, 'Problem retrieving user with email "test2(a)bx.psu.edu" from the database'
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu' )
+ global regular_user3
+ regular_user3 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test3(a)bx.psu.edu' ).first()
+ assert regular_user3 is not None, 'Problem retrieving user with email "test3(a)bx.psu.edu" from the database'
+ self.logout()
+ def test_005_deleting_histories( self ):
+ """Testing deleting histories"""
+ self.login( email='test(a)bx.psu.edu' )
+ global admin_user
+ admin_user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
+ assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
+ # Get the admin_user private role
+ global admin_user_private_role
+ admin_user_private_role = None
+ for role in admin_user.all_roles():
+ if role.name == admin_user.email and role.description == 'Private Role for %s' % admin_user.email:
+ admin_user_private_role = role
+ break
+ if not admin_user_private_role:
+ raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
+ latest_history = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert latest_history is not None, "Problem retrieving latest history from database"
+ assert not latest_history.deleted, "After login, associated history is deleted"
+ self.delete_history( str( latest_history.id ) )
+ latest_history.refresh()
+ if not latest_history.deleted:
+ raise AssertionError, "Problem deleting history id %d" % latest_history.id
+ # We'll now test deleting a list of histories
+ # After deleting the current history, a new one should have been created
+ global history1
+ history1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history1 is not None, "Problem retrieving history1 from database"
+ self.upload_file( '1.bed', dbkey='hg18' )
+ self.new_history( name='history2' )
+ global history2
+ history2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history2 is not None, "Problem retrieving history2 from database"
+ self.upload_file( '2.bed', dbkey='hg18' )
+ ids = '%s,%s' % ( str( history1.id ), str( history2.id ) )
+ self.delete_history( ids )
+ try:
+ self.view_stored_active_histories( check_str=history1.name )
+ raise AssertionError, "History %s is displayed in the active history list after it was deleted" % history1.name
+ except:
+ pass
+ self.view_stored_deleted_histories( check_str=history1.name )
+ try:
+ self.view_stored_active_histories( check_str=history2.name )
+ raise AssertionError, "History %s is displayed in the active history list after it was deleted" % history2.name
+ except:
+ pass
+ self.view_stored_deleted_histories( check_str=history2.name )
+ history1.refresh()
+ if not history1.deleted:
+ raise AssertionError, "Problem deleting history id %d" % history1.id
+ if not history1.default_permissions:
+ raise AssertionError, "Default permissions were incorrectly deleted from the db for history id %d when it was deleted" % history1.id
+ history2.refresh()
+ if not history2.deleted:
+ raise AssertionError, "Problem deleting history id %d" % history2.id
+ if not history2.default_permissions:
+ raise AssertionError, "Default permissions were incorrectly deleted from the db for history id %d when it was deleted" % history2.id
+ def test_010_history_options_when_logged_in( self ):
"""Testing history options when logged in"""
self.history_options()
- self.check_page_for_string( 'Rename</a> current history' )
- self.check_page_for_string( 'List</a> previously stored histories' )
- self.check_page_for_string( 'Construct workflow</a> from the current history' )
- self.check_page_for_string( 'Share</a> current history' )
- # Tests for changing default history permissions are done in test_security_and_libraries.py
- self.check_page_for_string( 'Change default permissions</a> for the current history' )
- self.check_page_for_string( 'Show deleted</a> datasets in history' )
- self.check_page_for_string( 'Delete</a> current history' )
- # Need to add a history item in order to create a new empty history
+ def test_015_history_rename( self ):
+ """Testing renaming a history"""
+ global history3
+ history3 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history3 is not None, "Problem retrieving history3 from database"
+ if history3.deleted:
+ raise AssertionError, "History id %d deleted when it should not be" % latest_history.id
+ self.rename_history( str( history3.id ), history3.name, new_name='history3' )
+ def test_020_history_list( self ):
+ """Testing viewing previously stored histories"""
+ self.view_stored_active_histories()
+ def test_025_history_share( self ):
+ """Testing sharing histories containing only public datasets"""
+ history3.refresh()
+ self.upload_file( '1.bed', dbkey='hg18' )
+ # Test sharing a history with yourself
+ check_str = "You can't send histories to yourself."
+ self.share_history( str( history3.id ), 'test(a)bx.psu.edu', check_str )
+ # Share a history with 1 valid user
+ name = history3.name
+ email = 'test1(a)bx.psu.edu'
+ check_str = 'Histories (%s) have been shared with: %s' % ( name, email )
+ self.share_history( str( history3.id ), email, check_str )
+ # We need to keep track of all shared histories so they can later be deleted
+ global history3_copy1
+ history3_copy1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history3_copy1 is not None, "Problem retrieving history3_copy1 from database"
+ self.logout()
+ self.login( email='test1(a)bx.psu.edu' )
+ check_str = '%s from test(a)bx.psu.edu' % history3.name
+ self.view_stored_active_histories( check_str=check_str )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ # Need to delete history3_copy1
+ history3_copy1.deleted = True
+ history3_copy1.flush()
+ # Test sharing a history with an invalid user
+ email = 'jack(a)jill.com'
+ check_str = '%s is not a valid Galaxy user.' % email
+ self.share_history( str( history3.id ), email, check_str )
+ # Test sharing multiple histories with multiple users
+ self.new_history()
+ global history4
+ history4 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history4 is not None, "Problem retrieving history4 from database"
+ self.rename_history( str( history4.id ), history4.name, new_name='history4' )
+ history4.refresh()
+ self.upload_file( '2.bed', dbkey='hg18' )
+ id = '%s,%s' % ( str( history3.id ), str( history4.id ) )
+ name = '%s,%s' % ( history3.name, history4.name )
+ email = 'test2@bx.psu.edu,test3@bx.psu.edu'
+ check_str = 'Histories (%s) have been shared with: %s' % ( name, email )
+ self.share_history( id, email, check_str )
+ # We need to keep track of all shared histories so they can later be deleted
+ history3_copy_name = "%s from %s" % ( history3.name, admin_user.email )
+ history3_copies = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history3_copy_name,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .limit( 2 ) \
+ .all()
+ history3_copy2 = history3_copies[0]
+ history3_copy3 = history3_copies[1]
+ history4_copy_name = "%s from %s" % ( history4.name, admin_user.email )
+ history4_copyies = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history4_copy_name,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .limit( 2 ) \
+ .all()
+ history4_copy1 = history4_copyies[0]
+ history4_copy2 = history4_copyies[1]
+ self.logout()
+ self.login( email='test2(a)bx.psu.edu' )
+ check_str = '%s from %s' % ( history3.name, admin_user.email )
+ self.view_stored_active_histories( check_str=check_str )
+ check_str = '%s from %s' % ( history4.name, admin_user.email )
+ self.view_stored_active_histories( check_str=check_str )
+ self.logout()
+ self.login( email='test3(a)bx.psu.edu' )
+ check_str = '%s from %s' % ( history3.name, admin_user.email )
+ self.view_stored_active_histories( check_str=check_str )
+ check_str = '%s from %s' % ( history4.name, admin_user.email )
+ self.view_stored_active_histories( check_str=check_str )
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ # Need to delete the copied histories, so later test runs are valid
+ history3_copy2.deleted = True
+ history3_copy2.flush()
+ history3_copy3.deleted = True
+ history3_copy3.flush()
+ history4_copy1.deleted = True
+ history4_copy1.flush()
+ history4_copy1.deleted = True
+ history4_copy1.flush()
+ history4_copy2.deleted = True
+ history4_copy2.flush()
+ def test_030_change_permissions_on_current_history( self ):
+ """Testing changing permissions on the current history"""
+ global history5
+ history5 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history5 is not None, "Problem retrieving history5 from database"
+ self.rename_history( str( history5.id ), history5.name, new_name='history5' )
+ history5.refresh()
+ # Due to the limitations of twill ( not functional with the permissions forms ), we're forced
+ # to do this manually. At this point, we just want to restrict the access permission on history5
+ # to the admin_user
+ global access_action
+ access_action = galaxy.model.Dataset.permitted_actions.DATASET_ACCESS.action
+ dhp = galaxy.model.DefaultHistoryPermissions( history5, access_action, admin_user_private_role )
+ dhp.flush()
+ self.upload_file( '1.bed', dbkey='hg18' )
+ history5_dataset1 = None
+ for hda in history5.datasets:
+ if hda.name == '1.bed':
+ history5_dataset1 = hda.dataset
+ assert history5_dataset1 is not None, "Problem retrieving history5_dataset1 from the database"
+ # The permissions on the dataset should be restricted from sharing with anyone due to the
+ # inherited history permissions
+ restricted = False
+ for action in history5_dataset1.actions:
+ if action.action == access_action:
+ restricted = True
+ break
+ if not restricted:
+ raise AssertionError, "The 'access' permission is not set for history5_dataset1.actions"
+ def test_035_sharing_history_by_making_datasets_public( self ):
+ """Testing sharing a restricted history by making the datasets public"""
+ check_str = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
+ action_check_str = 'Histories (%s) have been shared with: %s' % ( history5.name, regular_user1.email )
+ self.share_history( str( history5.id ), regular_user1.email, check_str, action='public', action_check_str=action_check_str )
+ history5_copy1 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history5_copy1 is not None, "Problem retrieving history5_copy1 from database"
+ self.logout()
+ self.login( email=regular_user1.email )
+ self.visit_url( "%s/history/list" % self.url )
+ self.check_page_for_string( history5_copy1.name )
+ # Need to delete history5_copy1 on the history list page for regular_user1
+ history5_copy1.deleted = True
+ history5_copy1.flush()
+ self.logout()
+ self.login( email=admin_user.email )
+ def test_040_sharing_history_by_making_new_sharing_role( self ):
+ """Testing sharing a restricted history by associating a new sharing role with protected datasets"""
+ self.switch_history( id=str( history5.id ), name=history5.name )
+ # At this point, history5 should have 1 item, 1.bed, which is public. We'll add another
+ # item which will be private to admin_user due to the permissions on history5
+ self.upload_file( '2.bed', dbkey='hg18' )
+ check_str = 'The following datasets can be shared with %s with no changes' % regular_user1.email
+ check_str2 = 'The following datasets can be shared with %s by updating their permissions' % regular_user1.email
+ action_check_str = 'Histories (%s) have been shared with: %s' % ( history5.name, regular_user1.email )
+ self.share_history( str( history5.id ),
+ regular_user1.email,
+ check_str,
+ check_str2=check_str2,
+ action='private',
+ action_check_str=action_check_str )
+ history5_copy2 = galaxy.model.History.query().order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ assert history5_copy2 is not None, "Problem retrieving history5_copy2 from database"
+ # We should now have a new sharing role
+ global sharing_role
+ role_name = 'Sharing role for: %s, %s' % ( admin_user.email, regular_user1.email )
+ sharing_role = galaxy.model.Role.filter( galaxy.model.Role.table.c.name==role_name ).first()
+ assert sharing_role is not None, "Problem retrieving sharing_role from the database"
+ self.logout()
+ self.login( email=regular_user1.email )
+ self.visit_url( "%s/history/list" % self.url )
+ self.check_page_for_string( history5_copy2.name )
+ self.switch_history( id=str( history5_copy2.id ), name=history5_copy2.name )
+ # Make sure both datasets are in the history
+ self.check_history_for_string( '1.bed' )
+ self.check_history_for_string( '2.bed' )
+ # Get both new hdas from the db that were created for the shared history
+ hda_1_bed = galaxy.model.HistoryDatasetAssociation \
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
+ .first()
+ assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
+ hda_2_bed = galaxy.model.HistoryDatasetAssociation \
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy2.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
+ .first()
+ assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
+ # Make sure 1.bed is accessible since it is public
+ self.display_history_item( str( hda_1_bed.id ), check_str='chr1' )
+ # Make sure 2.bed is accessible since it is associated with a sharing role
+ self.display_history_item( str( hda_2_bed.id ), check_str='chr1' )
+ # Need to delete history5_copy2 on the history list page for regular_user1
+ history5_copy2.deleted = True
+ history5_copy2.flush()
+ self.logout()
+ self.login( email=admin_user.email )
+ def test_045_sharing_private_history_with_multiple_users_by_changing_no_permissions( self ):
+ """Testing sharing a restricted history with multiple users, making no permission changes"""
+ # History5 can be shared with any user, since it contains a public dataset. However, only
+ # regular_user1 should be able to access history5's 2.bed dataset since it is associated with a
+ # sharing role, and regular_user2 should be able to access history5's 1.bed, but not 2.bed even
+ # though they can see it in their shared history.
+ self.switch_history( id=str( history5.id ), name=history5.name )
+ email = '%s,%s' % ( regular_user1.email, regular_user2.email )
+ check_str = 'The following datasets can be shared with %s with no changes' % email
+ check_str2 = 'The following datasets can be shared with %s by updating their permissions' % email
+ action_check_str = 'Histories (%s) have been shared with: %s' % ( history5.name, regular_user1.email )
+ self.share_history( str( history5.id ),
+ email,
+ check_str,
+ check_str2=check_str2,
+ action='share',
+ action_check_str=action_check_str )
+ # We need to keep track of all shared histories so they can later be deleted
+ history5_copy_name = "%s from %s" % ( history5.name, admin_user.email )
+ history5_copies = galaxy.model.History \
+ .filter( and_( galaxy.model.History.table.c.name==history5_copy_name,
+ galaxy.model.History.table.c.deleted==False ) ) \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ) \
+ .limit( 2 ) \
+ .all()
+ history5_copy3 = history5_copies[0]
+ assert history5_copy3 is not None, "Problem retrieving history5_copy3 from database"
+ history5_copy4 = history5_copies[1]
+ assert history5_copy4 is not None, "Problem retrieving history5_copy4 from database"
+ # Make sure test1(a)bx.psu.edu received a copy of history5 with both datasets accessible
+ self.login( email=regular_user1.email )
+ check_str = '%s from %s' % ( history5.name, admin_user.email )
+ self.view_stored_active_histories( check_str=check_str )
+ self.switch_history( id=str( history5_copy3.id ), name=history5_copy3.name )
+ self.check_history_for_string( '1.bed' )
+ self.check_history_for_string( '2.bed' )
+ self.logout()
+ # Make sure test2(a)bx.psu.edu received a copy of history5, with only 1.bed accessible
+ self.login( email=regular_user2.email )
+ self.view_stored_active_histories( check_str=check_str )
+ self.switch_history( id=str( history5_copy4.id ), name=history5_copy4.name )
+ self.check_history_for_string( '1.bed' )
+ self.check_history_for_string( '2.bed' )
+ # Get both new hdas from the db that were created for the shared history
+ hda_1_bed = galaxy.model.HistoryDatasetAssociation \
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='1.bed' ) ) \
+ .first()
+ assert hda_1_bed is not None, "Problem retrieving hda_1_bed from database"
+ hda_2_bed = galaxy.model.HistoryDatasetAssociation \
+ .filter( and_( galaxy.model.HistoryDatasetAssociation.table.c.history_id==history5_copy4.id,
+ galaxy.model.HistoryDatasetAssociation.table.c.name=='2.bed' ) ) \
+ .first()
+ assert hda_2_bed is not None, "Problem retrieving hda_2_bed from database"
+ # Make sure 1.bed is accessible since it is public
+ self.display_history_item( str( hda_1_bed.id ), check_str='chr1' )
+ # Make sure 2.bed is not accessible since it is protected
try:
- self.check_page_for_string( 'Create</a> a new empty history' )
- raise AssertionError, "Incorrectly able to create a new empty history when the current history is empty."
+ self.display_history_item( str( hda_2_bed.id ), check_str='chr1' )
+ raise AssertionError, "History item 2.bed is accessible by user %s when is should not be" % regular_user2.email
except:
pass
- self.upload_file( '1.bed', dbkey='hg18' )
- self.history_options()
- self.check_page_for_string( 'Create</a> a new empty history' )
- def test_15_history_rename( self ):
- """Testing renaming a history"""
- id, old_name, new_name = self.rename_history()
- self.check_page_for_string( 'History: %s renamed to: %s' %(old_name, new_name) )
- def test_20_history_list( self ):
- """Testing viewing previously stored histories"""
- self.view_stored_histories()
- self.check_page_for_string( 'Stored histories' )
- self.check_page_for_string( '<input type="checkbox" name="id" value=' )
- self.check_page_for_string( 'operation=Rename&id' )
- self.check_page_for_string( 'operation=Switch&id' )
- self.check_page_for_string( 'operation=Delete&id' )
- def test_25_history_share( self ):
- """Testing sharing a history with another user"""
- self.upload_file('1.bed', dbkey='hg18')
- id, name, email = self.share_history()
- self.logout()
- self.login( email=email )
- self.home()
- check_str = 'Unnamed history from test(a)bx.psu.edu'
- self.view_stored_histories( check_str=check_str )
- histories = self.get_histories()
- for history in histories:
- if history.get( 'name' ) == 'Unnamed history from test(a)bx.psu.edu':
- id = history.get( 'id' )
- break
- self.assertTrue( id )
- self.delete_history( id )
+ self.check_history_for_string( 'You do not have permission to view this dataset' )
self.logout()
self.login( email='test(a)bx.psu.edu' )
- def test_30_history_show_and_hide_deleted_datasets( self ):
+ # Need to delete the copied histories, so later test runs are valid
+ history5_copy3.deleted = True
+ history5_copy3.flush()
+ history5_copy4.deleted = True
+ history5_copy4.flush()
+
+
+
+ def test_050_sharing_private_history_by_choosing_to_not_share( self ):
+ """Testing sharing a restricted history with multiple users by choosing not to share"""
+ self.switch_history( id=str( history5.id ), name=history5.name )
+ email = '%s,%s' % ( regular_user1.email, regular_user2.email )
+ check_str = 'The following datasets can be shared with %s with no changes' % email
+ check_str2 = 'The following datasets can be shared with %s by updating their permissions' % email
+ action_check_str = 'History Options'
+ self.share_history( str( history5.id ),
+ email,
+ check_str,
+ check_str2=check_str2,
+ action='no_share' )
+ def test_055_history_show_and_hide_deleted_datasets( self ):
"""Testing displaying deleted history items"""
- self.new_history()
+ self.new_history( name='temp_history1' )
self.upload_file('1.bed', dbkey='hg18')
latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
.order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
@@ -85,9 +378,9 @@
self.home()
self.visit_url( "%s/history/?show_deleted=False" % self.url )
self.check_page_for_string( 'Your history is empty' )
- def test_35_deleting_and_undeleting_history_items( self ):
+ def test_060_deleting_and_undeleting_history_items( self ):
"""Testing deleting and un-deleting history items"""
- self.new_history()
+ self.new_history( name='temp_history2' )
# Add a new history item
self.upload_file( '1.bed', dbkey='hg15' )
self.home()
@@ -110,6 +403,8 @@
self.visit_url( "%s/history/?show_deleted=False" % self.url )
self.check_page_for_string( '1.bed' )
self.check_page_for_string( 'hg15' )
- def test_9999_clean_up( self ):
- self.delete_history()
- self.logout()
+ def test_065_reset_data_for_later_test_runs( self ):
+ """Reseting data to enable later test runs to pass"""
+ self.delete_history( id=str( history3.id ) )
+ self.delete_history( id=str( history4.id ) )
+ self.delete_history( id=str( history5.id ) )
diff -r 73847f425801 -r 9a71b89082fe test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Mon Jun 01 10:27:04 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Fri Jun 05 11:15:25 2009 -0400
@@ -40,7 +40,7 @@
global admin_user
admin_user = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test(a)bx.psu.edu' ).first()
assert admin_user is not None, 'Problem retrieving user with email "test(a)bx.psu.edu" from the database'
- # Get the admin user's privat role for later use
+ # Get the admin user's private role for later use
global admin_user_private_role
admin_user_private_role = None
for role in admin_user.all_roles():
@@ -136,7 +136,7 @@
dps.append( dp.action )
# Sort actions for later comparison
dps.sort()
- # Compare DatasetPermissionss with permissions_in - should be the same
+ # Compare DatasetPermissions with permissions_in - should be the same
if dps != actions_in:
raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from changed default permissions "%s"' \
% ( str( dps ), latest_dataset.id, str( actions_in ) ) )
@@ -145,14 +145,26 @@
raise AssertionError( 'DatasetPermissionss "%s" for dataset id %d differ from DefaultHistoryPermissions "%s" for history id %d' \
% ( str( dps ), latest_dataset.id, str( dhps ), latest_history.id ) )
# Since the dataset in the history is now private, we can test sharing with another user
- self.share_history_containing_private_datasets( str( latest_history.id ), email=admin_user.email )
# Test making the dataset in the history public
- self.make_datasets_public( str( latest_history.id ), email=admin_user.email )
+ check_str = 'The following datasets can be shared with %s by updating their permissions' % admin_user.email
+ action_check_str = 'Histories (%s) have been shared with: %s' % ( latest_history.name, admin_user.email )
+ self.share_history( str( latest_history.id ),
+ admin_user.email,
+ check_str,
+ action='public',
+ action_check_str=action_check_str )
# Add another dataset to the history, it should be private since that is now our default
self.upload_file( '2.bed' )
- self.share_history_containing_private_datasets( str( latest_history.id ), email=admin_user.email )
- # Test creating a new sharing role for sharing the private datasets
- self.privately_share_dataset( str( latest_history.id ), email=admin_user.email )
+ # Test creating a new sharing role for the private dataset
+ check_str = 'The following datasets can be shared with %s with no changes' % admin_user.email
+ check_str2 = 'The following datasets can be shared with %s by updating their permissions' % admin_user.email
+ action_check_str = 'Histories (%s) have been shared with: %s' % ( latest_history.name, admin_user.email )
+ self.share_history( str( latest_history.id ),
+ admin_user.email,
+ check_str,
+ check_str2=check_str2,
+ action='private',
+ action_check_str=action_check_str )
role_type = 'sharing'
role_name = 'Sharing role for: %s, %s' % ( regular_user1.email, admin_user.email )
global sharing_role
diff -r 73847f425801 -r 9a71b89082fe tool_conf.xml.sample
--- a/tool_conf.xml.sample Mon Jun 01 10:27:04 2009 -0400
+++ b/tool_conf.xml.sample Fri Jun 05 11:15:25 2009 -0400
@@ -13,6 +13,7 @@
<tool file="data_source/wormbase_test.xml" />
<tool file="data_source/flymine.xml" />
<tool file="data_source/flymine_test.xml" />
+ <tool file="data_source/eupathdb.xml" />
<tool file="data_source/encode_db.xml" />
<tool file="data_source/epigraph_import.xml" />
<tool file="data_source/epigraph_import_test.xml" />
1
0
09 Jun '09
details: http://www.bx.psu.edu/hg/galaxy/rev/1a24a530a3ae
changeset: 2431:1a24a530a3ae
user: Greg Von Kuster <greg(a)bx.psu.edu>
date: Mon Jun 08 10:31:59 2009 -0400
description:
Temporarily eliminate access to templates until new forms are finished, and more fixes for functional tests.
7 file(s) affected in this change:
lib/galaxy/web/controllers/admin.py
templates/admin/library/browse_library.mako
templates/library/browse_library.mako
test/base/twilltestcase.py
test/functional/test_DNAse_flanked_genes.py
test/functional/test_metadata_editing.py
test/functional/test_security_and_libraries.py
diffs (1339 lines):
diff -r 373962edbe90 -r 1a24a530a3ae lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py Fri Jun 05 16:48:03 2009 -0400
+++ b/lib/galaxy/web/controllers/admin.py Mon Jun 08 10:31:59 2009 -0400
@@ -553,9 +553,7 @@
# with the user's private role in case we want the ability to unpurge the user
# some time in the future.
# Purging a deleted User deletes all of the following:
- # - DefaultUserPermissions where user_id == User.id EXCEPT FOR THE PRIVATE ROLE
# - History where user_id = User.id
- # - DefaultHistoryPermissions where history_id == History.id EXCEPT FOR THE PRIVATE ROLE
# - HistoryDatasetAssociation where history_id = History.id
# - Dataset where HistoryDatasetAssociation.dataset_id = Dataset.id
# - UserGroupAssociation where user_id == User.id
@@ -568,19 +566,9 @@
msg = "User '%s' has not been deleted, so it cannot be purged." % user.email
trans.response.send_redirect( web.url_for( action='users', msg=util.sanitize_text( msg ), messagetype='error' ) )
private_role = trans.app.security_agent.get_private_user_role( user )
- # Delete DefaultUserPermissions EXCEPT FOR THE PRIVATE ROLE
- for dup in user.default_permissions:
- if dup.role_id != private_role.id:
- dup.delete()
- dup.flush()
# Delete History
for h in user.active_histories:
h.refresh()
- # Delete DefaultHistoryPermissions EXCEPT FOR THE PRIVATE ROLE
- for dp in h.default_permissions:
- if dp.role_id != private_role.id:
- dp.delete()
- dp.flush()
for hda in h.active_datasets:
# Delete HistoryDatasetAssociation
d = trans.app.model.Dataset.get( hda.dataset_id )
diff -r 373962edbe90 -r 1a24a530a3ae templates/admin/library/browse_library.mako
--- a/templates/admin/library/browse_library.mako Fri Jun 05 16:48:03 2009 -0400
+++ b/templates/admin/library/browse_library.mako Mon Jun 08 10:31:59 2009 -0400
@@ -112,13 +112,14 @@
<a class="action-button" href="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=folder.id )}">Add datasets to this folder</a>
<a class="action-button" href="${h.url_for( controller='admin', action='folder', new=True, id=folder.id, library_id=library_id )}">Create a new sub-folder in this folder</a>
<a class="action-button" href="${h.url_for( controller='admin', action='folder', information=True, id=folder.id, library_id=library_id )}">Edit this folder's information</a>
- %if folder.library_folder_info_template_associations:
- <% template = folder.get_library_item_info_templates( template_list=[], restrict=True )[0] %>
- <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library_id, id=template.id, edit_template=True )}">Edit this folder's information template</a>
- %elif not folder.library_folder_info_associations:
- ## Only allow adding a new template to the folder if a previously inherited template has not already been used
- <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library_id, folder_id=folder.id, new_template=True )}">Add an information template to this folder</a>
- %endif
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ ##%if folder.library_folder_info_template_associations:
+ ## <% template = folder.get_library_item_info_templates( template_list=[], restrict=True )[0] %>
+ ## <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library_id, id=template.id, edit_template=True )}">Edit this folder's information template</a>
+ ##%elif not folder.library_folder_info_associations:
+ ## ## Only allow adding a new template to the folder if a previously inherited template has not already been used
+ ## <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library_id, folder_id=folder.id, new_template=True )}">Add an information template to this folder</a>
+ ##%endif
<a class="action-button" href="${h.url_for( controller='admin', action='folder', permissions=True, id=folder.id, library_id=library_id )}">Edit this folder's permissions</a>
<a class="action-button" confirm="Click OK to delete the folder '${folder.name}.'" href="${h.url_for( controller='admin', action='delete_library_item', library_id=library_id, library_item_id=folder.id, library_item_type='folder' )}">Delete this folder and its contents</a>
</div>
@@ -199,21 +200,21 @@
library_item_ids = {}
library_item_ids[ 'library' ] = library.id
%>
-
- <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a>
- %if library.library_info_template_associations:
- <% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
- <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this library's information template</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
- %endif
- <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a>
- <a class="action-button" confirm="Click OK to delete the library named '${library.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Delete this library and its contents</a>
- %if show_deleted:
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a>
- %else:
- <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a>
- %endif
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ ##<a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, information=True )}">Edit this library's information</a>
+ ##%if library.library_info_template_associations:
+ ## <% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
+ ## <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this library's information template</a>
+ ##%else:
+ ## <a class="action-button" href="${h.url_for( controller='admin', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
+ ##%endif
+ <a class="action-button" href="${h.url_for( controller='admin', action='library', id=library.id, permissions=True )}">Edit this library's permissions</a>
+ <a class="action-button" confirm="Click OK to delete the library named '${library.name}'." href="${h.url_for( controller='admin', action='delete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Delete this library and its contents</a>
+ %if show_deleted:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=False )}">Hide deleted library items</a>
+ %else:
+ <a class="action-button" href="${h.url_for( controller='admin', action='browse_library', id=library.id, show_deleted=True )}">Show deleted library items</a>
+ %endif
%elif not library.purged:
<a class="action-button" href="${h.url_for( controller='admin', action='undelete_library_item', library_id=library.id, library_item_id=library.id, library_item_type='library' )}">Undelete this library</a>
%endif
diff -r 373962edbe90 -r 1a24a530a3ae templates/library/browse_library.mako
--- a/templates/library/browse_library.mako Fri Jun 05 16:48:03 2009 -0400
+++ b/templates/library/browse_library.mako Mon Jun 08 10:31:59 2009 -0400
@@ -150,17 +150,18 @@
%else:
<a class="action-button" href="${h.url_for( controller='library', action='folder', information=True, id=folder.id, library_id=library_id )}">View this folder's information</a>
%endif
- %if folder.library_folder_info_template_associations:
- %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ):
- <% template = folder.get_library_item_info_templates( template_list=[], restrict=True )[0] %>
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this folder's information template</a>
- %endif
- %elif not folder.library_folder_info_associations:
- ## Only allow adding a new template to the folder if a previously inherited template has not already been used
- %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=folder ):
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, folder_id=folder.id, new_template=True )}">Add an information template to this folder</a>
- %endif
- %endif
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ ##%if folder.library_folder_info_template_associations:
+ ## %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=folder ):
+ ## <% template = folder.get_library_item_info_templates( template_list=[], restrict=True )[0] %>
+ ## <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this folder's information template</a>
+ ## %endif
+ ##%elif not folder.library_folder_info_associations:
+ ## ## Only allow adding a new template to the folder if a previously inherited template has not already been used
+ ## %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=folder ):
+ ## <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, folder_id=folder.id, new_template=True )}">Add an information template to this folder</a>
+ ## %endif
+ ##%endif
%if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=folder ):
<a class="action-button" href="${h.url_for( controller='library', action='folder', permissions=True, id=folder.id, library_id=library_id )}">Edit this folder's permissions</a>
%endif
@@ -229,17 +230,17 @@
%else:
<a class="action-button" href="${h.url_for( controller='library', action='library', information=True, id=library.id )}">View this library's information</a>
%endif
-
- %if library.library_info_template_associations:
- <% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
- %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=template ):
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this library's information template</a>
- %endif
- %else:
- %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
- <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
- %endif
- %endif
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ ##%if library.library_info_template_associations:
+ ## <% template = library.get_library_item_info_templates( template_list=[], restrict=False )[0] %>
+ ## %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MODIFY, library_item=template ):
+ ## <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, id=template.id, edit_template=True )}">Edit this library's information template</a>
+ ## %endif
+ ##%else:
+ ## %if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_ADD, library_item=library ):
+ ## <a class="action-button" href="${h.url_for( controller='library', action='info_template', library_id=library.id, new_template=True )}">Add an information template to this library</a>
+ ## %endif
+ ##%endif
%if trans.app.security_agent.allow_action( trans.user, trans.app.security_agent.permitted_actions.LIBRARY_MANAGE, library_item=library ):
<a class="action-button" href="${h.url_for( controller='library', action='library', permissions=True, id=library.id )}">Edit this library's permissions</a>
%endif
diff -r 373962edbe90 -r 1a24a530a3ae test/base/twilltestcase.py
--- a/test/base/twilltestcase.py Fri Jun 05 16:48:03 2009 -0400
+++ b/test/base/twilltestcase.py Mon Jun 08 10:31:59 2009 -0400
@@ -304,30 +304,49 @@
if check_str:
self.check_page_for_string( check_str )
self.home()
- def edit_metadata( self, hid=None, form_no=0, **kwd ):
- """Edits the metadata associated with a history item."""
- # There are currently 4 forms on the edit page:
- # 0. name="edit_attributes"
- # 1. name="auto_detect"
- # 2. name="convert_data"
- # 3. name="change_datatype"
- data_list = self.get_history()
- self.assertTrue( data_list )
- if hid is None: # take last hid
- elem = data_list[-1]
- hid = int( elem.get('hid') )
- self.assertTrue( hid )
- self.visit_page( 'edit?hid=%d' % hid )
- if form_no == 0:
- button = "save" #Edit Attributes form
- elif form_no == 1:
- button = "detect" #Auto-detect Metadata Attributes
- elif form_no == 2:
- button = "convert_data" #Convert to new format form
- elif form_no == 3:
- button = "change" #Change data type form
- if kwd:
- self.submit_form( form_no=form_no, button=button, **kwd)
+ def edit_hda_attribute_info( self, hda_id, new_name='', new_info='', new_dbkey='', new_startcol='' ):
+ """Edit history_dataset_association attribute information"""
+ self.home()
+ self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.check_page_for_string( 'Edit Attributes' )
+ if new_name:
+ tc.fv( 'edit_attributes', 'name', new_name )
+ if new_info:
+ tc.fv( 'edit_attributes', 'info', new_info )
+ if new_dbkey:
+ tc.fv( 'edit_attributes', 'dbkey', new_dbkey )
+ if new_startcol:
+ tc.fv( 'edit_attributes', 'startCol', new_startcol )
+ tc.submit( 'save' )
+ self.check_page_for_string( 'Attributes updated' )
+ self.home()
+ def auto_detect_metadata( self, hda_id ):
+ """Auto-detect history_dataset_association metadata"""
+ self.home()
+ self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.check_page_for_string( 'This will inspect the dataset and attempt' )
+ tc.fv( 'auto_detect', 'id', hda_id )
+ tc.submit( 'detect' )
+ self.check_page_for_string( 'Attributes updated' )
+ self.home()
+ def convert_format( self, hda_id, target_type ):
+ """Auto-detect history_dataset_association metadata"""
+ self.home()
+ self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.check_page_for_string( 'This will inspect the dataset and attempt' )
+ tc.fv( 'convert_data', 'target_type', target_type )
+ tc.submit( 'convert_data' )
+ self.check_page_for_string( 'The file conversion of Convert BED to GFF on data' )
+ self.home()
+ def change_datatype( self, hda_id, datatype ):
+ """Auto-detect history_dataset_association metadata"""
+ self.home()
+ self.visit_url( "%s/root/edit?id=%s" % ( self.url, hda_id ) )
+ self.check_page_for_string( 'This will change the datatype of the existing dataset but' )
+ tc.fv( 'change_datatype', 'datatype', datatype )
+ tc.submit( 'change' )
+ self.check_page_for_string( 'Edit Attributes' )
+ self.home()
def get_dataset_ids_in_history( self ):
"""Returns the ids of datasets in a history"""
data_list = self.get_history()
diff -r 373962edbe90 -r 1a24a530a3ae test/functional/test_DNAse_flanked_genes.py
--- a/test/functional/test_DNAse_flanked_genes.py Fri Jun 05 16:48:03 2009 -0400
+++ b/test/functional/test_DNAse_flanked_genes.py Mon Jun 08 10:31:59 2009 -0400
@@ -1,16 +1,16 @@
+import galaxy.model
+from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
""" A sample analysis"""
-
-
class AnalysisDNAseHSSFlankedGenes( TwillTestCase ):
-
def test_get_DNAseHSS_flanked_genes( self ):
self.login()
-
self.new_history()
-
+ global history1
+ history1 = galaxy.model.History.query() \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
track_params = dict(
db="hg17",
hgta_group="regulation",
@@ -24,14 +24,21 @@
output_params = dict(
fbQual="whole",
)
-
+ # Test 1
self.run_ucsc_main( track_params, output_params )
self.wait()
self.verify_dataset_correctness('DNAseHSS.dat')
-
- self.edit_metadata( name="DNAse HS" )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ # Due to twill not being able to handle the permissions forms, we'll eliminate
+ # DefaultHistoryPermissions prior to uploading a dataset so that the permission
+ # form will not be displayed on ted edit attributes page.
+ for dp in latest_hda.dataset.actions:
+ dp.delete()
+ dp.flush()
+ latest_hda.dataset.refresh()
+ self.edit_hda_attribute_info( str( latest_hda.id ), new_name="DNAse HS" )
self.check_metadata_for_string( "DNAse HS" )
-
track_params = dict(
db="hg17",
hgta_group="genes",
@@ -45,35 +52,38 @@
output_params = dict(
fbQual="whole",
)
-
+ # Test 2
self.run_ucsc_main( track_params, output_params )
self.wait()
self.verify_dataset_correctness('hg17chr22KnownGenes.dat')
-
- self.edit_metadata( name="Genes" )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ for dp in latest_hda.dataset.actions:
+ dp.delete()
+ dp.flush()
+ latest_hda.dataset.refresh()
+ self.edit_hda_attribute_info( str( latest_hda.id ), new_name="Genes" )
self.check_metadata_for_string( "Genes" )
-
-
+ # Test 3
self.run_tool( 'get_flanks1', input="2", region="whole", direction="Upstream", offset="0", size="500" )
self.wait()
self.verify_dataset_correctness( 'knownGeneUpstream500Flanks.dat' )
-
- self.edit_metadata( name="Flanks" )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ for dp in latest_hda.dataset.actions:
+ dp.delete()
+ dp.flush()
+ latest_hda.dataset.refresh()
+ self.edit_hda_attribute_info( str( latest_hda.id ), new_name="Flanks" )
self.check_metadata_for_string( "Flanks" )
-
-
+ # Test 4
self.run_tool( 'gops_join_1', input1="3", input2="1", min="1", fill="none" )
self.wait()
- #self.verify_dataset_correctness( 'joinFlanksDNAse.dat' ) #we cannot verify this dataset, because this tool spits out data in a non-deterministic order
-
-
+ # We cannot verify this dataset, because this tool spits out data in a non-deterministic order
+ #self.verify_dataset_correctness( 'joinFlanksDNAse.dat' )
+ # Test 5
self.run_tool( 'Filter1', input="4", cond="c17==1000" )
self.wait()
self.verify_dataset_correctness( 'filteredJoinedFlanksDNAse.dat' )
-
-
self.delete_history()
self.logout()
-
-
-
diff -r 373962edbe90 -r 1a24a530a3ae test/functional/test_metadata_editing.py
--- a/test/functional/test_metadata_editing.py Fri Jun 05 16:48:03 2009 -0400
+++ b/test/functional/test_metadata_editing.py Mon Jun 08 10:31:59 2009 -0400
@@ -1,27 +1,45 @@
+import galaxy.model
+from galaxy.model.orm import *
from base.twilltestcase import TwillTestCase
class TestMetadataEdit( TwillTestCase ):
- def test_00_metadata_edit(self):
+ def test_00_metadata_edit( self ):
"""test_metadata_edit: Testing metadata editing"""
- self.login()
- self.upload_file('1.bed')
- self.check_history_for_string('\? bed')
- self.check_metadata_for_string('1.bed uploaded file unspecified (\?) chromCol value="1" selected endCol value="3" is_strandCol value="true" checked', hid=1)
+ self.logout()
+ self.login( email='test(a)bx.psu.edu' )
+ self.new_history( name='Test Metadata Edit' )
+ global history1
+ history1 = galaxy.model.History.query() \
+ .order_by( desc( galaxy.model.History.table.c.create_time ) ).first()
+ self.upload_file( '1.bed' )
+ latest_hda = galaxy.model.HistoryDatasetAssociation.query() \
+ .order_by( desc( galaxy.model.HistoryDatasetAssociation.table.c.create_time ) ).first()
+ self.home()
+ # Due to twill not being able to handle the permissions forms, we'll eliminate
+ # DefaultHistoryPermissions prior to uploading a dataset so that the permission
+ # form will not be displayed on ted edit attributes page.
+ for dp in latest_hda.dataset.actions:
+ dp.delete()
+ dp.flush()
+ latest_hda.dataset.refresh()
+ self.check_history_for_string( '1.bed' )
+ self.check_metadata_for_string( '1.bed uploaded file unspecified (\?) chromCol value="1" selected endCol value="3" is_strandCol value="true" checked', hid=1 )
"""test editing attributes"""
- self.edit_metadata(hid=1, form_no=0, name='Testdata', info="Uploaded my file", dbkey='hg16', startCol='6')
- self.check_metadata_for_string('Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="6" selected', hid=1)
+ self.edit_hda_attribute_info( hda_id=str( latest_hda.id ),
+ new_name='Testdata',
+ new_info="Uploaded my file",
+ new_dbkey='hg16',
+ new_startcol='6' )
+ self.check_metadata_for_string( 'Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="6" selected', hid=1 )
"""test Auto-detecting attributes"""
- self.edit_metadata(hid=1, form_no=1)
- self.check_metadata_for_string('Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="2" selected', hid=1)
+ self.auto_detect_metadata( hda_id=str( latest_hda.id ) )
+ self.check_metadata_for_string('Testdata bed Uploaded my file hg16 "bed" selected="yes" "startCol" value="2" selected', hid=1 )
"""test converting formats"""
- self.edit_metadata(hid=1, form_no=2, target_type='gff')
- self.check_history_for_string('hg16 Testdata Convert BED to GFF')
- self.check_metadata_for_string('"gff" selected="yes"', hid=1)
+ self.convert_format( hda_id=str( latest_hda.id ), target_type='gff' )
+ self.check_metadata_for_string( '"gff" selected="yes"', hid=1 )
"""test changing data type"""
- self.edit_metadata(hid=1, form_no=3, datatype='gff3')
- self.check_history_for_string('hg16 Testdata Convert BED to GFF format: gff3')
- self.delete_history_item( 1 )
- def test_9999_clean_up( self ):
- self.delete_history()
+ self.change_datatype( hda_id=str( latest_hda.id ), datatype='gff3' )
+ self.check_metadata_for_string( 'gff3', hid=1 )
+ self.delete_history( id=str( history1.id ) )
self.logout()
diff -r 373962edbe90 -r 1a24a530a3ae test/functional/test_security_and_libraries.py
--- a/test/functional/test_security_and_libraries.py Fri Jun 05 16:48:03 2009 -0400
+++ b/test/functional/test_security_and_libraries.py Mon Jun 08 10:31:59 2009 -0400
@@ -51,7 +51,7 @@
raise AssertionError( "Private role not found for user '%s'" % admin_user.email )
# Make sure DefaultUserPermissions are correct
if len( admin_user.default_permissions ) > 1:
- raise AssertionError( '%d DefaultUserPermissions were created for %s when their account was created ( should have been 1 )' \
+ raise AssertionError( '%d DefaultUserPermissions associated with user %s ( should be 1 )' \
% ( len( admin_user.default_permissions ), admin_user.email ) )
dup = galaxy.model.DefaultUserPermissions.filter( galaxy.model.DefaultUserPermissions.table.c.user_id==admin_user.id ).first()
if not dup.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
@@ -72,6 +72,8 @@
self.logout()
def test_010_login_as_regular_user1( self ):
"""Testing logging in as regular user test1(a)bx.psu.edu - tests private role creation, changing DefaultHistoryPermissions for new histories, and sharing histories with another user"""
+ # Some of the history related tests here are similar to some tests in the
+ # test_history_functions.py script, so we could potentially eliminate 1 or 2 of them.
self.login( email='test1(a)bx.psu.edu' ) # test1(a)bx.psu.edu is not an admin user
global regular_user1
regular_user1 = galaxy.model.User.filter( galaxy.model.User.table.c.email=='test1(a)bx.psu.edu' ).first()
@@ -93,7 +95,7 @@
latest_dataset = galaxy.model.Dataset.query().order_by( desc( galaxy.model.Dataset.table.c.create_time ) ).first()
# Make sure DatasetPermissions is correct - default is 'manage permissions'
if len( latest_dataset.actions ) > 1:
- raise AssertionError( '%d DatasetPermissionss were created for dataset id %d when it was created ( should have been 1 )' \
+ raise AssertionError( '%d DatasetPermissions were created for dataset id %d when it was created ( should have been 1 )' \
% ( len( latest_dataset.actions ), latest_dataset.id ) )
dp = galaxy.model.DatasetPermissions.filter( galaxy.model.DatasetPermissions.table.c.dataset_id==latest_dataset.id ).first()
if not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
@@ -494,97 +496,100 @@
library_one.refresh()
self.rename_library( str( library_one.id ), library_one.name, name=name, description=description )
library_one.refresh()
- def test_075_library_template_features( self ):
- """Testing adding a template to a library, along with template features on the admin side"""
- actions = [ v.action for k, v in galaxy.model.Library.permitted_actions.items() ]
- actions.sort()
- # Add a new information template to the library
- template_name = 'Library Template 1'
- ele_name_0 = 'Foo'
- ele_name_1 = 'Doh'
- self.add_library_info_template( library_one.id, library_one.name, name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_name_1=ele_name_1 )
- self.home()
- self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
- self.check_page_for_string( ele_name_0 )
- self.check_page_for_string( ele_name_1 )
- self.home()
- # Get the template for later testing
- global library_one_template
- library_one_template = galaxy.model.LibraryItemInfoTemplate.query().order_by( desc( galaxy.model.LibraryItemInfoTemplate.table.c.id ) ).first()
- assert library_one_template is not None, 'Problem retrieving LibraryItemInfoTemplate for library named "%s" from the database' % str( library_one.name )
- # Make sure the library permissions were inherited by the template
- template_permissions = galaxy.model.LibraryItemInfoTemplatePermissions \
- .query() \
- .filter( galaxy.model.LibraryItemInfoTemplatePermissions.table.c.library_item_info_template_id == library_one_template.id ) \
- .order_by( desc( galaxy.model.LibraryItemInfoTemplatePermissions.table.c.id ) ) \
- .limit( 3 ) \
- .all()
- template_permissions = [ litp_obj.action for litp_obj in template_permissions ]
- template_permissions.sort()
- assert actions == template_permissions, "Template permissions for template %s not correctly inherited from library %s" \
- % ( library_one_template.name, library_one.name )
- # Make sure that the library permissions were inherited by each of the template elements
- for library_item_info in library_one_template.library_item_infos:
- info_permissions = galaxy.model.LibraryItemInfoPermissions \
- .filter( galaxy.model.LibraryItemInfoPermissions.table.c.library_item_info_id == library_item_info.id ) \
+ """
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ def test_075_library_template_features( self ):
+ Testing adding a template to a library, along with template features on the admin side
+ actions = [ v.action for k, v in galaxy.model.Library.permitted_actions.items() ]
+ actions.sort()
+ # Add a new information template to the library
+ template_name = 'Library Template 1'
+ ele_name_0 = 'Foo'
+ ele_name_1 = 'Doh'
+ self.add_library_info_template( library_one.id, library_one.name, name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_name_1=ele_name_1 )
+ self.home()
+ self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
+ self.check_page_for_string( ele_name_0 )
+ self.check_page_for_string( ele_name_1 )
+ self.home()
+ # Get the template for later testing
+ global library_one_template
+ library_one_template = galaxy.model.LibraryItemInfoTemplate.query().order_by( desc( galaxy.model.LibraryItemInfoTemplate.table.c.id ) ).first()
+ assert library_one_template is not None, 'Problem retrieving LibraryItemInfoTemplate for library named "%s" from the database' % str( library_one.name )
+ # Make sure the library permissions were inherited by the template
+ template_permissions = galaxy.model.LibraryItemInfoTemplatePermissions \
+ .query() \
+ .filter( galaxy.model.LibraryItemInfoTemplatePermissions.table.c.library_item_info_template_id == library_one_template.id ) \
+ .order_by( desc( galaxy.model.LibraryItemInfoTemplatePermissions.table.c.id ) ) \
+ .limit( 3 ) \
.all()
- info_permissions = [ liip_obj.action for liip_obj in info_permissions ]
- info_permissions.sort()
- assert actions == info_permissions, "Permissions for library_item_info id %s not correctly inherited from library %s" \
- % ( library_item_info.id, library_one.name )
- element_ids = []
- if library_one.library_info_associations:
- # We have a set of LibraryItemInfoElements
- last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
- .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
- if not last_library_item_info_element:
- element_ids.append( 0 )
- element_ids.append( 1 )
+ template_permissions = [ litp_obj.action for litp_obj in template_permissions ]
+ template_permissions.sort()
+ assert actions == template_permissions, "Template permissions for template %s not correctly inherited from library %s" \
+ % ( library_one_template.name, library_one.name )
+ # Make sure that the library permissions were inherited by each of the template elements
+ for library_item_info in library_one_template.library_item_infos:
+ info_permissions = galaxy.model.LibraryItemInfoPermissions \
+ .filter( galaxy.model.LibraryItemInfoPermissions.table.c.library_item_info_id == library_item_info.id ) \
+ .all()
+ info_permissions = [ liip_obj.action for liip_obj in info_permissions ]
+ info_permissions.sort()
+ assert actions == info_permissions, "Permissions for library_item_info id %s not correctly inherited from library %s" \
+ % ( library_item_info.id, library_one.name )
+ element_ids = []
+ if library_one.library_info_associations:
+ # We have a set of LibraryItemInfoElements
+ last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
+ .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
+ if not last_library_item_info_element:
+ element_ids.append( 0 )
+ element_ids.append( 1 )
+ else:
+ element_ids.append( last_library_item_info_element.id + 1 )
+ element_ids.append( last_library_item_info_element.id + 2 )
else:
- element_ids.append( last_library_item_info_element.id + 1 )
- element_ids.append( last_library_item_info_element.id + 2 )
- else:
- # We only have a set of LibraryItemInfoTemplateElements
- for ele in library_one_template.elements:
- element_ids.append( ele.id )
- element_ids.sort()
- # Add information to the library using the template
- ele_1_field_name = "info_element_%s_%s" % ( str( library_one_template.id ), str( element_ids[0] ) )
- ele_1_contents = 'hello'
- ele_2_field_name = "info_element_%s_%s" % ( str( library_one_template.id ), str( element_ids[1] ) )
- ele_2_contents = 'world'
- self.edit_library_info( str( library_one.id ), library_one.name,
- ele_1_field_name, ele_1_contents,
- ele_2_field_name, ele_2_contents )
- self.home()
- self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
- self.check_page_for_string( ele_1_contents )
- self.check_page_for_string( ele_2_contents )
- self.home()
- self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
- self.check_page_for_string( ele_1_contents )
- self.check_page_for_string( ele_2_contents )
- # Edit the template
- new_name = 'Library Template 1 renamed'
- new_ele_name_1_field = "element_name_%s" % element_ids[0]
- new_ele_name_1 = 'wind'
- new_ele_desc_1_field = "element_description_%s" % element_ids[0]
- ele_desc_1 = 'This is the wind component'
- new_ele_name_2_field = "element_name_%s" % element_ids[1]
- new_ele_name_2 = 'bag'
- new_ele_desc_2_field = "element_description_%s" % element_ids[1]
- ele_desc_2 = 'This is the bag component'
- self.edit_library_info_template( str( library_one.id ), library_one_template.id, new_name,
- new_ele_name_1_field, new_ele_name_1, new_ele_desc_1_field, ele_desc_1,
- new_ele_name_2_field, new_ele_name_2, new_ele_desc_2_field, ele_desc_2 )
- self.home()
- self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
- self.check_page_for_string( new_ele_name_1 )
- self.check_page_for_string( ele_desc_1 )
- self.check_page_for_string( ele_1_contents )
- self.check_page_for_string( new_ele_name_2 )
- self.check_page_for_string( ele_desc_2 )
- self.check_page_for_string( ele_2_contents )
+ # We only have a set of LibraryItemInfoTemplateElements
+ for ele in library_one_template.elements:
+ element_ids.append( ele.id )
+ element_ids.sort()
+ # Add information to the library using the template
+ ele_1_field_name = "info_element_%s_%s" % ( str( library_one_template.id ), str( element_ids[0] ) )
+ ele_1_contents = 'hello'
+ ele_2_field_name = "info_element_%s_%s" % ( str( library_one_template.id ), str( element_ids[1] ) )
+ ele_2_contents = 'world'
+ self.edit_library_info( str( library_one.id ), library_one.name,
+ ele_1_field_name, ele_1_contents,
+ ele_2_field_name, ele_2_contents )
+ self.home()
+ self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
+ self.check_page_for_string( ele_1_contents )
+ self.check_page_for_string( ele_2_contents )
+ self.home()
+ self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
+ self.check_page_for_string( ele_1_contents )
+ self.check_page_for_string( ele_2_contents )
+ # Edit the template
+ new_name = 'Library Template 1 renamed'
+ new_ele_name_1_field = "element_name_%s" % element_ids[0]
+ new_ele_name_1 = 'wind'
+ new_ele_desc_1_field = "element_description_%s" % element_ids[0]
+ ele_desc_1 = 'This is the wind component'
+ new_ele_name_2_field = "element_name_%s" % element_ids[1]
+ new_ele_name_2 = 'bag'
+ new_ele_desc_2_field = "element_description_%s" % element_ids[1]
+ ele_desc_2 = 'This is the bag component'
+ self.edit_library_info_template( str( library_one.id ), library_one_template.id, new_name,
+ new_ele_name_1_field, new_ele_name_1, new_ele_desc_1_field, ele_desc_1,
+ new_ele_name_2_field, new_ele_name_2, new_ele_desc_2_field, ele_desc_2 )
+ self.home()
+ self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
+ self.check_page_for_string( new_ele_name_1 )
+ self.check_page_for_string( ele_desc_1 )
+ self.check_page_for_string( ele_1_contents )
+ self.check_page_for_string( new_ele_name_2 )
+ self.check_page_for_string( ele_desc_2 )
+ self.check_page_for_string( ele_2_contents )
+ """
def test_080_add_public_dataset_to_root_folder( self ):
"""Testing adding a public dataset to the root folder"""
actions = [ v.action for k, v in galaxy.model.Library.permitted_actions.items() ]
@@ -623,7 +628,9 @@
if not dp.action == galaxy.model.Dataset.permitted_actions.DATASET_MANAGE_PERMISSIONS.action:
raise AssertionError( 'The DatasetPermissions.action for dataset id %d is "%s", but it should be "manage permissions"' \
% ( ldda_one.dataset.id, dp.action ) )
+ ## TODO: temporarily eliminating templates until we have the new forms features done
# Make sure the library template was inherited by the ldda
+ """
self.home()
self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
( self.url, str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ) ) )
@@ -631,6 +638,7 @@
self.check_page_for_string( 'This is the wind component' )
self.check_page_for_string( 'bag' )
self.check_page_for_string( 'This is the bag component' )
+ """
# Make sure other users can access the dataset from the Libraries view
self.logout()
self.login( email=regular_user2.email )
@@ -640,20 +648,23 @@
self.logout()
self.login( email=admin_user.email )
self.home()
- def test_085_editing_dataset_information( self ):
- """Testing editing dataset template element information"""
- # Need the current library_item_info_element.id
- last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
- .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
- global ldda_one_ele_2_field_name
- ldda_one_ele_2_field_name = "info_element_%s" % str( last_library_item_info_element.id )
- ele_2_contents = 'pipe'
- global ldda_one_ele_1_field_name
- ldda_one_ele_1_field_name = "info_element_%s" % ( str( last_library_item_info_element.id - 1 ) )
- ele_1_contents = 'blown'
- self.edit_ldda_template_element_info( str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ),
- ldda_one.name, ldda_one_ele_1_field_name, ele_1_contents, ldda_one_ele_2_field_name, ele_2_contents )
- self.home()
+ """
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ def test_085_editing_dataset_information( self ):
+ Testing editing dataset template element information
+ # Need the current library_item_info_element.id
+ last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
+ .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
+ global ldda_one_ele_2_field_name
+ ldda_one_ele_2_field_name = "info_element_%s" % str( last_library_item_info_element.id )
+ ele_2_contents = 'pipe'
+ global ldda_one_ele_1_field_name
+ ldda_one_ele_1_field_name = "info_element_%s" % ( str( last_library_item_info_element.id - 1 ) )
+ ele_1_contents = 'blown'
+ self.edit_ldda_template_element_info( str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ),
+ ldda_one.name, ldda_one_ele_1_field_name, ele_1_contents, ldda_one_ele_2_field_name, ele_2_contents )
+ self.home()
+ """
def test_090_add_new_folder_to_root_folder( self ):
"""Testing adding a folder to a library root folder"""
root_folder = library_one.root_folder
@@ -669,35 +680,41 @@
self.visit_url( '%s/admin/browse_library?id=%s' % ( self.url, str( library_one.id ) ) )
self.check_page_for_string( name )
self.check_page_for_string( description )
+ ## TODO: temporarily eliminating templates until we have the new forms features done
# Make sure the library template is inherited
+ """
self.home()
self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
self.check_page_for_string( 'wind' )
self.check_page_for_string( 'This is the wind component' )
self.check_page_for_string( 'bag' )
self.check_page_for_string( 'This is the bag component' )
+ """
self.home()
- def test_095_add_folder_template( self ):
- """Testing adding a new folder template to a folder"""
- # Add a new information template to the folder
- template_name = 'Folder Template 1'
- ele_name_0 = 'Fu'
- ele_help_0 = 'This is the Fu component'.replace( ' ', '+' )
- ele_name_1 = 'Bar'
- ele_help_1 = 'This is the Bar component'.replace( ' ', '+' )
- self.home()
- self.add_folder_info_template( str( library_one.id ), library_one.name, str( folder_one.id ), folder_one.name,
- name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_help_0=ele_help_0,
- ele_name_1=ele_name_1, ele_help_1=ele_help_1 )
- self.home()
- self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
- self.check_page_for_string( ele_name_0 )
- check_str = ele_help_0.replace( '+', ' ' )
- self.check_page_for_string( check_str )
- self.check_page_for_string( ele_name_1 )
- check_str = ele_help_1.replace( '+', ' ' )
- self.check_page_for_string( check_str )
- self.home()
+ """
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ def test_095_add_folder_template( self ):
+ Testing adding a new folder template to a folder
+ # Add a new information template to the folder
+ template_name = 'Folder Template 1'
+ ele_name_0 = 'Fu'
+ ele_help_0 = 'This is the Fu component'.replace( ' ', '+' )
+ ele_name_1 = 'Bar'
+ ele_help_1 = 'This is the Bar component'.replace( ' ', '+' )
+ self.home()
+ self.add_folder_info_template( str( library_one.id ), library_one.name, str( folder_one.id ), folder_one.name,
+ name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_help_0=ele_help_0,
+ ele_name_1=ele_name_1, ele_help_1=ele_help_1 )
+ self.home()
+ self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
+ self.check_page_for_string( ele_name_0 )
+ check_str = ele_help_0.replace( '+', ' ' )
+ self.check_page_for_string( check_str )
+ self.check_page_for_string( ele_name_1 )
+ check_str = ele_help_1.replace( '+', ' ' )
+ self.check_page_for_string( check_str )
+ self.home()
+ """
def test_100_add_subfolder_to_folder( self ):
"""Testing adding a folder to a library folder"""
name = "Folder One's Subfolder"
@@ -712,8 +729,10 @@
self.visit_url( '%s/admin/browse_library?id=%s' % ( self.url, str( library_one.id ) ) )
self.check_page_for_string( name )
self.check_page_for_string( description )
+ ## TODO: temporarily eliminating templates until we have the new forms features done
# Make sure the parent folder's template is inherited
self.home()
+ """
self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
self.check_page_for_string( 'Fu' )
self.check_page_for_string( 'This is the Fu component' )
@@ -726,81 +745,84 @@
except:
pass
self.home()
- def test_105_add_template_element( self ):
- """Testing adding a new element to an existing library template"""
- library_one_template.refresh()
- element_ids = []
- for ele in library_one_template.elements:
- element_ids.append( ele.id )
- element_ids.sort()
-
- name = 'Library Template 1 renamed'
- ele_field_name_1 = "element_name_%s" % element_ids[0]
- ele_name_1 = 'wind'
- ele_field_desc_1 = "element_description_%s" % element_ids[0]
- ele_desc_1 = 'This is the wind component'
- ele_field_name_2 = "element_name_%s" % element_ids[1]
- ele_name_2 = 'bag'
- ele_field_desc_2 = "element_description_%s" % element_ids[1]
- ele_desc_2 = 'This is the bag component'
- new_ele_name = 'Fubar'
- new_ele_desc = 'This is the Fubar component'
-
- self.add_library_info_template_element( str( library_one.id ),
- str( library_one_template.id ),
- library_one_template.name,
- ele_field_name_1,
- ele_name_1,
- ele_field_desc_1,
- ele_desc_1,
- ele_field_name_2,
- ele_name_2,
- ele_field_desc_2,
- ele_desc_2,
- new_ele_name=new_ele_name,
- new_ele_desc=new_ele_desc )
- # Make sure the new template element shows up on the existing library info page
- self.home()
- self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
- self.check_page_for_string( library_one.name )
- self.check_page_for_string( library_one.description )
- self.check_page_for_string( 'wind' )
- self.check_page_for_string( 'hello' )
- self.check_page_for_string( 'This is the wind component' )
- self.check_page_for_string( 'bag' )
- self.check_page_for_string( 'world' )
- self.check_page_for_string( 'This is the bag component' )
- self.check_page_for_string( 'Fubar' )
- self.check_page_for_string( 'This is the Fubar component' )
- # Make sure the new template element does not show up on existing info pages for folder_one since it has its own template
- self.home()
- self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
- self.check_page_for_string( 'Fu' )
- self.check_page_for_string( 'This is the Fu component' )
- self.check_page_for_string( 'Bar' )
- self.check_page_for_string( 'This is the Bar component' )
- try:
+ """
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
+ def test_105_add_template_element( self ):
+ Testing adding a new element to an existing library template
+ library_one_template.refresh()
+ element_ids = []
+ for ele in library_one_template.elements:
+ element_ids.append( ele.id )
+ element_ids.sort()
+
+ name = 'Library Template 1 renamed'
+ ele_field_name_1 = "element_name_%s" % element_ids[0]
+ ele_name_1 = 'wind'
+ ele_field_desc_1 = "element_description_%s" % element_ids[0]
+ ele_desc_1 = 'This is the wind component'
+ ele_field_name_2 = "element_name_%s" % element_ids[1]
+ ele_name_2 = 'bag'
+ ele_field_desc_2 = "element_description_%s" % element_ids[1]
+ ele_desc_2 = 'This is the bag component'
+ new_ele_name = 'Fubar'
+ new_ele_desc = 'This is the Fubar component'
+ self.add_library_info_template_element( str( library_one.id ),
+ str( library_one_template.id ),
+ library_one_template.name,
+ ele_field_name_1,
+ ele_name_1,
+ ele_field_desc_1,
+ ele_desc_1,
+ ele_field_name_2,
+ ele_name_2,
+ ele_field_desc_2,
+ ele_desc_2,
+ new_ele_name=new_ele_name,
+ new_ele_desc=new_ele_desc )
+ # Make sure the new template element shows up on the existing library info page
+ self.home()
+ self.visit_url( '%s/admin/library?id=%s&information=True' % ( self.url, str( library_one.id ) ) )
+ self.check_page_for_string( library_one.name )
+ self.check_page_for_string( library_one.description )
+ self.check_page_for_string( 'wind' )
+ self.check_page_for_string( 'hello' )
+ self.check_page_for_string( 'This is the wind component' )
+ self.check_page_for_string( 'bag' )
+ self.check_page_for_string( 'world' )
+ self.check_page_for_string( 'This is the bag component' )
self.check_page_for_string( 'Fubar' )
- raise AssertionError( 'Changed library template inherited by folder "%s" when folder had an associated template of its own' )
- except:
- pass
- # Make sure the new template element shows up on existing info pages for ldda_one since it is contained in the root folder
- self.home()
- self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
- ( self.url, str( library_one.id ), str( folder_one.id ), str( ldda_one.id ) ) )
- # Visiting the above page will have resulted in the creation of a new LibraryItemInfoElement, so we'll retrieve it
- # for later use
- last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
- .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
- global ldda_one_ele_3_field_name
- ldda_one_ele_3_field_name = "info_element_%s" % str( last_library_item_info_element.id )
- self.check_page_for_string( 'wind' )
- self.check_page_for_string( 'This is the wind component' )
- self.check_page_for_string( 'bag' )
- self.check_page_for_string( 'This is the bag component' )
- self.check_page_for_string( 'Fubar' )
- self.check_page_for_string( 'This is the Fubar component' )
- self.home()
+ self.check_page_for_string( 'This is the Fubar component' )
+ # Make sure the new template element does not show up on existing info pages for folder_one since it has its own template
+ self.home()
+ self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_one.id ), str( library_one.id ) ) )
+ self.check_page_for_string( 'Fu' )
+ self.check_page_for_string( 'This is the Fu component' )
+ self.check_page_for_string( 'Bar' )
+ self.check_page_for_string( 'This is the Bar component' )
+ try:
+ self.check_page_for_string( 'Fubar' )
+ raise AssertionError( 'Changed library template inherited by folder "%s" when folder had an associated template of its own' )
+ except:
+ pass
+ # Make sure the new template element shows up on existing info pages for ldda_one since it is contained in the root folder
+ self.home()
+ self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
+ ( self.url, str( library_one.id ), str( folder_one.id ), str( ldda_one.id ) ) )
+ # Visiting the above page will have resulted in the creation of a new LibraryItemInfoElement, so we'll retrieve it
+ # for later use
+ last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
+ .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
+ global ldda_one_ele_3_field_name
+ ldda_one_ele_3_field_name = "info_element_%s" % str( last_library_item_info_element.id )
+ self.check_page_for_string( 'wind' )
+ self.check_page_for_string( 'This is the wind component' )
+ self.check_page_for_string( 'bag' )
+ self.check_page_for_string( 'This is the bag component' )
+ self.check_page_for_string( 'Fubar' )
+ self.check_page_for_string( 'This is the Fubar component' )
+ self.home()
+ """
def test_110_add_2nd_new_folder_to_root_folder( self ):
"""Testing adding a 2nd folder to a library root folder"""
root_folder = library_one.root_folder
@@ -816,7 +838,9 @@
self.visit_url( '%s/admin/browse_library?id=%s' % ( self.url, str( library_one.id ) ) )
self.check_page_for_string( name )
self.check_page_for_string( description )
+ ## TODO: temporarily eliminating templates until we have the new forms features done
# Make sure the changed library template is inherited to the new folder
+ """
self.home()
self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_two.id ), str( library_one.id ) ) )
self.check_page_for_string( 'wind' )
@@ -825,6 +849,7 @@
self.check_page_for_string( 'This is the bag component' )
self.check_page_for_string( 'Fubar' )
self.check_page_for_string( 'This is the Fubar component' )
+ """
self.home()
def test_115_add_public_dataset_to_root_folders_2nd_subfolder( self ):
"""Testing adding a public dataset to the root folder's 2nd sub-folder"""
@@ -848,56 +873,59 @@
self.check_page_for_string( "2.bed" )
self.check_page_for_string( message )
self.check_page_for_string( admin_user.email )
- def test_120_add_template_to_root_folders_2nd_subfolder( self ):
- """Testing adding a template to the root folder's 2nd sub-folder"""
- # Before adding the folder template, the inherited library template should be displayed
- self.home()
- self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
- ( self.url, str( library_one.id ), str( folder_two.id ), str( ldda_two.id ) ) )
- self.check_page_for_string( 'wind' )
- self.check_page_for_string( 'This is the wind component' )
- self.check_page_for_string( 'bag' )
- self.check_page_for_string( 'This is the bag component' )
- self.check_page_for_string( 'Fubar' )
- self.check_page_for_string( 'This is the Fubar component' )
- self.home()
- # Add a new folde template
- template_name = 'Folder 2 Template'
- ele_name_0 = 'kill'
- ele_help_0 = 'This is the kill component'.replace( ' ', '+' )
- ele_name_1 = 'bill'
- ele_help_1 = 'This is the bill component'.replace( ' ', '+' )
- self.home()
- self.add_folder_info_template( str( library_one.id ), library_one.name, str( folder_two.id ), folder_two.name,
- name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_help_0=ele_help_0,
- ele_name_1=ele_name_1, ele_help_1=ele_help_1 )
- # Make sure the new template id displayed on the folder information page
- self.home()
- self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_two.id ), str( library_one.id ) ) )
- self.check_page_for_string( ele_name_0 )
- check_str = ele_help_0.replace( '+', ' ' )
- self.check_page_for_string( check_str )
- self.check_page_for_string( ele_name_1 )
- check_str = ele_help_1.replace( '+', ' ' )
- self.check_page_for_string( check_str )
- # The library dataset ldda_two had previously inherited the library template prior to the new folder template
- # being introduced, so the library template should still be displayed on the ldda information page
- self.home()
- self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
- ( self.url, str( library_one.id ), str( folder_two.id ), str( ldda_two.id ) ) )
- self.check_page_for_string( 'wind' )
- self.check_page_for_string( 'This is the wind component' )
- self.check_page_for_string( 'bag' )
- self.check_page_for_string( 'This is the bag component' )
- self.check_page_for_string( 'Fubar' )
- self.check_page_for_string( 'This is the Fubar component' )
- # Make sure the new folder template is not displayed
- try:
- self.check_page_for_string( 'kill' )
- raise AssertionError( 'New folder template elements incorrectly included in information page for ldda "%s"' % ldda_two.name )
- except:
- pass
- self.home()
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
+ def test_120_add_template_to_root_folders_2nd_subfolder( self ):
+ Testing adding a template to the root folder's 2nd sub-folder
+ # Before adding the folder template, the inherited library template should be displayed
+ self.home()
+ self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
+ ( self.url, str( library_one.id ), str( folder_two.id ), str( ldda_two.id ) ) )
+ self.check_page_for_string( 'wind' )
+ self.check_page_for_string( 'This is the wind component' )
+ self.check_page_for_string( 'bag' )
+ self.check_page_for_string( 'This is the bag component' )
+ self.check_page_for_string( 'Fubar' )
+ self.check_page_for_string( 'This is the Fubar component' )
+ self.home()
+ # Add a new folde template
+ template_name = 'Folder 2 Template'
+ ele_name_0 = 'kill'
+ ele_help_0 = 'This is the kill component'.replace( ' ', '+' )
+ ele_name_1 = 'bill'
+ ele_help_1 = 'This is the bill component'.replace( ' ', '+' )
+ self.home()
+ self.add_folder_info_template( str( library_one.id ), library_one.name, str( folder_two.id ), folder_two.name,
+ name=template_name, num_fields='2', ele_name_0=ele_name_0, ele_help_0=ele_help_0,
+ ele_name_1=ele_name_1, ele_help_1=ele_help_1 )
+ # Make sure the new template id displayed on the folder information page
+ self.home()
+ self.visit_url( '%s/admin/folder?id=%s&library_id=%s&information=True' % ( self.url, str( folder_two.id ), str( library_one.id ) ) )
+ self.check_page_for_string( ele_name_0 )
+ check_str = ele_help_0.replace( '+', ' ' )
+ self.check_page_for_string( check_str )
+ self.check_page_for_string( ele_name_1 )
+ check_str = ele_help_1.replace( '+', ' ' )
+ self.check_page_for_string( check_str )
+ # The library dataset ldda_two had previously inherited the library template prior to the new folder template
+ # being introduced, so the library template should still be displayed on the ldda information page
+ self.home()
+ self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
+ ( self.url, str( library_one.id ), str( folder_two.id ), str( ldda_two.id ) ) )
+ self.check_page_for_string( 'wind' )
+ self.check_page_for_string( 'This is the wind component' )
+ self.check_page_for_string( 'bag' )
+ self.check_page_for_string( 'This is the bag component' )
+ self.check_page_for_string( 'Fubar' )
+ self.check_page_for_string( 'This is the Fubar component' )
+ # Make sure the new folder template is not displayed
+ try:
+ self.check_page_for_string( 'kill' )
+ raise AssertionError( 'New folder template elements incorrectly included in information page for ldda "%s"' % ldda_two.name )
+ except:
+ pass
+ self.home()
+ """
def test_125_add_2nd_public_dataset_to_root_folders_2nd_subfolder( self ):
"""Testing adding a 2nd public dataset to the root folder's 2nd sub-folder"""
actions = [ v.action for k, v in galaxy.model.Library.permitted_actions.items() ]
@@ -920,22 +948,25 @@
self.check_page_for_string( "3.bed" )
self.check_page_for_string( message )
self.check_page_for_string( admin_user.email )
- def test_130_editing_dataset_information_with_new_folder_template( self ):
- """Testing editing dataset template element information with new inherited folder template"""
- # Need the current library_item_info_element.id
- last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
- .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
- # Make sure the changed inherited template is included in the ldda information
- ele_2_field_name = "info_element_%s" % str( last_library_item_info_element.id )
- ele_2_contents = 'II'
- ele_2_help = 'This is the bill component'
- ele_1_field_name = "info_element_%s" % ( str( last_library_item_info_element.id - 1 ) )
- ele_1_contents = 'Volume'
- ele_1_help = 'This is the kill component'
- self.edit_ldda_template_element_info( str( library_one.id ), str( folder_two.id ), str( ldda_three.id ),
- ldda_three.name, ele_1_field_name, ele_1_contents, ele_2_field_name, ele_2_contents,
- ele_1_help=ele_1_help, ele_2_help=ele_2_help )
- self.home()
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
+ def test_130_editing_dataset_information_with_new_folder_template( self ):
+ Testing editing dataset template element information with new inherited folder template
+ # Need the current library_item_info_element.id
+ last_library_item_info_element = galaxy.model.LibraryItemInfoElement.query() \
+ .order_by( desc( galaxy.model.LibraryItemInfoElement.table.c.id ) ).first()
+ # Make sure the changed inherited template is included in the ldda information
+ ele_2_field_name = "info_element_%s" % str( last_library_item_info_element.id )
+ ele_2_contents = 'II'
+ ele_2_help = 'This is the bill component'
+ ele_1_field_name = "info_element_%s" % ( str( last_library_item_info_element.id - 1 ) )
+ ele_1_contents = 'Volume'
+ ele_1_help = 'This is the kill component'
+ self.edit_ldda_template_element_info( str( library_one.id ), str( folder_two.id ), str( ldda_three.id ),
+ ldda_three.name, ele_1_field_name, ele_1_contents, ele_2_field_name, ele_2_contents,
+ ele_1_help=ele_1_help, ele_2_help=ele_2_help )
+ self.home()
+ """
def test_135_add_dataset_with_private_role_restriction_to_folder( self ):
"""Testing adding a dataset with a private role restriction to a folder"""
# Add a dataset restricted by the following:
@@ -954,6 +985,8 @@
message ='This is a test of the fourth dataset uploaded'
ele_name_0 = 'Fu'
ele_name_1 = 'Bar'
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.add_library_dataset( '4.bed',
str( library_one.id ),
str( folder_one.id ),
@@ -965,6 +998,16 @@
root=False,
check_template_str1=ele_name_0,
check_template_str2=ele_name_1 )
+ """
+ self.add_library_dataset( '4.bed',
+ str( library_one.id ),
+ str( folder_one.id ),
+ folder_one.name,
+ file_format='bed',
+ dbkey='hg18',
+ roles=[ str( regular_user1_private_role.id ) ],
+ message=message.replace( ' ', '+' ),
+ root=False )
global ldda_four
ldda_four = galaxy.model.LibraryDatasetDatasetAssociation.query() \
.order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ).first()
@@ -1047,27 +1090,30 @@
self.visit_url( '%s/library/browse_library?id=%s' % ( self.url, str( library_one.id ) ) )
self.check_page_for_string( ldda_four.name )
self.home()
- def test_150_editing_restricted_datasets_information( self ):
- """Testing editing a restricted library dataset's template element information"""
- ele_1_contents = ''
- ele_2_contents = ''
- ele_3_contents = 'Adding Fubar text'
- self.edit_ldda_template_element_info( str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ),
- ldda_one.name, ldda_one_ele_1_field_name, ele_1_contents, ldda_one_ele_2_field_name, ele_2_contents,
- ele_1_help='This is the wind component'.replace( ' ', '+' ),
- ele_2_help='This is the bag component'.replace( ' ', '+' ),
- ele_3_field_name=ldda_one_ele_3_field_name, ele_3_contents=ele_3_contents.replace( ' ', '+' ) )
- # Check the updated information from the libraries view
- self.home()
- self.visit_url( '%s/library/library_dataset_dataset_association?info=True&library_id=%s&folder_id=%s&id=%s' \
- % ( self.url, str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ) ) )
- self.check_page_for_string( ele_3_contents )
- try:
- self.check_page_for_string( 'blown' )
- raise AssertionError( 'Element contents were not correctly eliminated when the information was edited for ldda %s' % ldda_one.name)
- except:
- pass
- self.home()
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
+ def test_150_editing_restricted_datasets_information( self ):
+ Testing editing a restricted library dataset's template element information
+ ele_1_contents = ''
+ ele_2_contents = ''
+ ele_3_contents = 'Adding Fubar text'
+ self.edit_ldda_template_element_info( str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ),
+ ldda_one.name, ldda_one_ele_1_field_name, ele_1_contents, ldda_one_ele_2_field_name, ele_2_contents,
+ ele_1_help='This is the wind component'.replace( ' ', '+' ),
+ ele_2_help='This is the bag component'.replace( ' ', '+' ),
+ ele_3_field_name=ldda_one_ele_3_field_name, ele_3_contents=ele_3_contents.replace( ' ', '+' ) )
+ # Check the updated information from the libraries view
+ self.home()
+ self.visit_url( '%s/library/library_dataset_dataset_association?info=True&library_id=%s&folder_id=%s&id=%s' \
+ % ( self.url, str( library_one.id ), str( library_one.root_folder.id ), str( ldda_one.id ) ) )
+ self.check_page_for_string( ele_3_contents )
+ try:
+ self.check_page_for_string( 'blown' )
+ raise AssertionError( 'Element contents were not correctly eliminated when the information was edited for ldda %s' % ldda_one.name)
+ except:
+ pass
+ self.home()
+ """
def test_155_add_dataset_with_role_associated_with_group_and_users( self ):
"""Testing adding a dataset with a role that is associated with a group and users"""
self.login( email='test(a)bx.psu.edu' )
@@ -1077,6 +1123,8 @@
message = 'Testing adding a dataset with a role that is associated with a group and users'
ele_name_0 = 'Fu'
ele_name_1 = 'Bar'
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.add_library_dataset( '5.bed',
str( library_one.id ),
str( folder_one.id ),
@@ -1088,6 +1136,16 @@
root=False,
check_template_str1=ele_name_0,
check_template_str2=ele_name_1 )
+ """
+ self.add_library_dataset( '5.bed',
+ str( library_one.id ),
+ str( folder_one.id ),
+ folder_one.name,
+ file_format='bed',
+ dbkey='hg17',
+ roles=[ str( role_two.id ) ],
+ message=message.replace( ' ', '+' ),
+ root=False )
global ldda_five
ldda_five = galaxy.model.LibraryDatasetDatasetAssociation.query() \
.order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ).first()
@@ -1194,13 +1252,16 @@
.order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ).first()
assert ldda_six is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda_six from the database'
# Make sure the correct template was inherited
+ ## TODO: temporarily eliminating templates until we have the new forms features done
self.home()
+ """
self.visit_url( "%s/admin/library_dataset_dataset_association?edit_info=True&library_id=%s&folder_id=%s&id=%s" % \
( self.url, str( library_one.id ), str( subfolder_one.id ), str( ldda_six.id ) ) )
self.check_page_for_string( 'Fu' )
self.check_page_for_string( 'This is the Fu component' )
self.check_page_for_string( 'Bar' )
self.check_page_for_string( 'This is the Bar component' )
+ """
def test_170_editing_dataset_attribute_info( self ):
"""Testing editing a datasets attribute information"""
new_ldda_name = '6.bed ( version 1 )'
@@ -1213,6 +1274,8 @@
def test_175_uploading_new_dataset_version( self ):
"""Testing uploading a new version of a dataset"""
message = 'Testing uploading a new version of a dataset'
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.upload_new_dataset_version( '6.bed',
str( library_one.id ),
str( subfolder_one.id ),
@@ -1224,6 +1287,16 @@
message=message.replace( ' ', '+' ),
check_template_str1='Fu',
check_template_str2='Bar' )
+ """
+ self.upload_new_dataset_version( '6.bed',
+ str( library_one.id ),
+ str( subfolder_one.id ),
+ str( subfolder_one.name ),
+ str( ldda_six.library_dataset.id ),
+ ldda_six.name,
+ file_format='auto',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ) )
global ldda_six_version_two
ldda_six_version_two = galaxy.model.LibraryDatasetDatasetAssociation.query() \
.order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ).first()
@@ -1233,6 +1306,8 @@
( self.url, str( library_one.id ), str( subfolder_one.id ), str( ldda_six_version_two.id ) ) )
self.check_page_for_string( 'This is the latest version of this library dataset' )
# Make sure the correct template was inherited
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.check_page_for_string( 'Fu' )
self.check_page_for_string( 'This is the Fu component' )
self.check_page_for_string( 'Bar' )
@@ -1240,6 +1315,7 @@
check_str = 'Expired versions of %s' % ldda_six_version_two.name
self.check_page_for_string( check_str )
self.check_page_for_string( ldda_six.name )
+ """
self.home()
# Make sure th permissions are the same
ldda_six.refresh()
@@ -1262,6 +1338,8 @@
"""Testing uploading new versions of a dataset using a directory of files"""
message = 'Testing uploading new versions of a dataset using a directory of files'
ldda_six_version_two.refresh()
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.upload_new_dataset_versions( str( library_one.id ),
str( subfolder_one.id ),
str( subfolder_one.name ),
@@ -1272,6 +1350,15 @@
message=message.replace( ' ', '+' ),
check_template_str1='Fu',
check_template_str2='Bar' )
+ """
+ self.upload_new_dataset_versions( str( library_one.id ),
+ str( subfolder_one.id ),
+ str( subfolder_one.name ),
+ str( ldda_six_version_two.library_dataset.id ),
+ ldda_six_version_two.name,
+ file_format='auto',
+ dbkey='hg18',
+ message=message.replace( ' ', '+' ) )
global ldda_six_version_five
ldda_six_version_five = galaxy.model.LibraryDatasetDatasetAssociation.query() \
.order_by( desc( galaxy.model.LibraryDatasetDatasetAssociation.table.c.create_time ) ).first()
@@ -1281,10 +1368,13 @@
( self.url, str( library_one.id ), str( subfolder_one.id ), str( ldda_six_version_five.id ) ) )
self.check_page_for_string( 'This is the latest version of this library dataset' )
# Make sure the correct template was inherited
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.check_page_for_string( 'Fu' )
self.check_page_for_string( 'This is the Fu component' )
self.check_page_for_string( 'Bar' )
self.check_page_for_string( 'This is the Bar component' )
+ """
check_str = 'Expired versions of %s' % ldda_six_version_five.name
self.check_page_for_string( check_str )
self.check_page_for_string( ldda_six.name )
@@ -1309,7 +1399,9 @@
def test_185_upload_datasets_from_library_dir( self ):
"""Testing uploading 3 datasets from a library directory to a root folder"""
message = 'This is a test for uploading a directory of files'
- roles_tuple = [ ( str( role_one.id ), role_one.name ) ]
+ roles_tuple = [ ( str( role_one.id ), role_one.name ) ]
+ ## TODO: temporarily eliminating templates until we have the new forms features done
+ """
self.add_datasets_from_library_dir( str( library_one.id ),
str( library_one.root_folder.id ),
library_one.root_folder.name,
@@ -1319,6 +1411,13 @@
check_template_str1='wind',
check_template_str2='bag',
check_template_str3='Fubar' )
+ """
+ self.add_datasets_from_library_dir( str( library_one.id ),
+ str( library_one.root_folder.id ),
+ library_one.root_folder.name,
+ roles_tuple=roles_tuple,
+ message=message.replace( '+', ' ' ),
+ root=True )
self.home()
self.visit_page( 'admin/browse_library?id=%s' % ( str( library_one.id ) ) )
self.check_page_for_string( admin_user.email )
@@ -1552,6 +1651,7 @@
def test_245_purge_user( self ):
"""Testing purging a user account"""
self.mark_user_deleted( user_id=regular_user3.id, email=regular_user3.email )
+ regular_user3.refresh()
self.purge_user( str( regular_user3.id ), regular_user3.email )
regular_user3.refresh()
if not regular_user3.purged:
@@ -1565,15 +1665,9 @@
raise AssertionError( 'DefaultUserPermissions for user %s are not related with the private role.' % regular_user3.email )
# Make sure History deleted
for history in regular_user3.histories:
+ history.refresh()
if not history.deleted:
raise AssertionError( 'User %s has active history id %d after their account was marked as purged.' % ( regular_user3.email, hda.id ) )
- # Make sure DefaultHistoryPermissions deleted EXCEPT FOR THE PRIVATE ROLE
- if len( history.default_permissions ) != 1:
- raise AssertionError( 'DefaultHistoryPermissions for history id %d were not deleted.' % history.id )
- for dhp in history.default_permissions:
- role = galaxy.model.Role.get( dhp.role_id )
- if role.type != 'private':
- raise AssertionError( 'DefaultHistoryPermissions for history id %d are not related with the private role.' % history.id )
# Make sure HistoryDatasetAssociation deleted
for hda in history.datasets:
hda.refresh()
1
0
sh run.sh seems to work okay until hitting this block of report...
Any ideas what this socket business is about? It looks to me like
possibly something python-specific.
Starting server in PID 11963.
Traceback (most recent call last):
File "./scripts/paster.py", line 27, in ?
command.run()
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteScript-1.3.6-py2.4.egg/paste/script/command.py", line 78, in run
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteScript-1.3.6-py2.4.egg/paste/script/command.py", line 117, in
invoke
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteScript-1.3.6-py2.4.egg/paste/script/command.py", line 212, in run
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteScript-1.3.6-py2.4.egg/paste/script/serve.py", line 232, in command
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteDeploy-1.3.1-py2.4.egg/paste/deploy/loadwsgi.py", line 139, in
server_wrapper
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
PasteDeploy-1.3.1-py2.4.egg/paste/deploy/util/fixtypeerror.py", line
57, in fix_call
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
Paste-1.5.1-py2.4.egg/paste/httpserver.py", line 1307, in server_runner
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
Paste-1.5.1-py2.4.egg/paste/httpserver.py", line 1257, in serve
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
Paste-1.5.1-py2.4.egg/paste/httpserver.py", line 1107, in __init__
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
Paste-1.5.1-py2.4.egg/paste/httpserver.py", line 1087, in __init__
File "/root/src/galaxy-f7336991d0ee/eggs/py2.4-noplatform/
Paste-1.5.1-py2.4.egg/paste/httpserver.py", line 328, in __init__
File "/usr/lib64/python2.4/SocketServer.py", line 330, in __init__
self.server_bind()
File "/usr/lib64/python2.4/BaseHTTPServer.py", line 101, in
server_bind
SocketServer.TCPServer.server_bind(self)
File "/usr/lib64/python2.4/SocketServer.py", line 341, in server_bind
self.socket.bind(self.server_address)
File "<string>", line 1, in bind
socket.error: (98, 'Address already in use')
galaxy.jobs INFO 2009-06-05 02:15:43,692 sending stop signal to worker
thread
galaxy.jobs INFO 2009-06-05 02:15:43,692 job queue stopped
galaxy.jobs.runners.local INFO 2009-06-05 02:15:43,693 sending stop
signal to worker threads
galaxy.jobs.runners.local INFO 2009-06-05 02:15:43,694 local job
runner stopped
galaxy.jobs INFO 2009-06-05 02:15:43,694 sending stop signal to worker
thread
galaxy.jobs INFO 2009-06-05 02:15:43,695 job stopper stopped
2
7