[hg] galaxy 3520: remove bogus string.translate
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/262b16c8e277
changeset: 3520:262b16c8e277
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:51:53 2010 -0500
description:
remove bogus string.translate
diffstat:
lib/galaxy/web/controllers/library_common.py | 2 --
1 files changed, 0 insertions(+), 2 deletions(-)
diffs (12 lines):
diff -r 39e4b119bdf4 -r 262b16c8e277 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:41:40 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:51:53 2010 -0500
@@ -1357,8 +1357,6 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
- if path > '':
- path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
12 years, 2 months
[hg] galaxy 3519: merge with 3518:0c9e154e9176
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/39e4b119bdf4
changeset: 3519:39e4b119bdf4
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:41:40 2010 -0500
description:
merge with 3518:0c9e154e9176
diffstat:
lib/galaxy/datatypes/sniff.py | 39 ++++++++++++++++++++++++---------------
tools/data_source/upload.py | 24 +++++++++++++++++++-----
2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r e98117dd6054 -r 39e4b119bdf4 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Thu Mar 11 13:37:49 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py Thu Mar 11 13:41:40 2010 -0500
@@ -70,7 +70,7 @@
f.close()
return False
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
"""
Converts in place a file from universal line endings
to Posix line endings.
@@ -78,7 +78,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1 2\\n3 4\\n'
"""
@@ -87,18 +87,21 @@
for i, line in enumerate( file( fname, "U" ) ):
fp.write( "%s\n" % line.rstrip( "\r\n" ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Transforms in place a 'sep' separated file to a tab separated one
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\n3 4\\n")
>>> sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -110,11 +113,14 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Combines above methods: convert_newlines() and sep2tabs()
so that files do not need to be read twice
@@ -122,7 +128,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines_sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -134,9 +140,12 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""
diff -r e98117dd6054 -r 39e4b119bdf4 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Mar 11 13:37:49 2010 -0500
+++ b/tools/data_source/upload.py Thu Mar 11 13:41:40 2010 -0500
@@ -138,6 +138,7 @@
def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
+ converted_path = None
if dataset.type == 'url':
try:
@@ -239,10 +240,15 @@
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
if data_type != 'binary' and data_type != 'zip':
- if dataset.space_to_tab:
- line_count = sniff.convert_newlines_sep2tabs( dataset.path )
- else:
- line_count = sniff.convert_newlines( dataset.path )
+ # don't convert newlines on data we're only going to symlink
+ if not dataset.get( 'link_data_only', False ):
+ in_place = True
+ if dataset.type in ( 'server_dir', 'path_paste' ):
+ in_place = False
+ if dataset.space_to_tab:
+ line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+ else:
+ line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
if dataset.file_type == 'auto':
ext = sniff.guess_ext( dataset.path )
else:
@@ -257,7 +263,15 @@
if dataset.get( 'link_data_only', False ):
pass # data will remain in place
elif dataset.type in ( 'server_dir', 'path_paste' ):
- shutil.copy( dataset.path, output_path )
+ if converted_path is not None:
+ shutil.copy( converted_path, output_path )
+ try:
+ os.remove( converted_path )
+ except:
+ pass
+ else:
+ # this should not happen, but it's here just in case
+ shutil.copy( dataset.path, output_path )
else:
shutil.move( dataset.path, output_path )
# Write the job info
12 years, 2 months
[hg] galaxy 3517: Modify the newline conversion methods in sniff...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/0c9e154e9176
changeset: 3517:0c9e154e9176
user: Nate Coraor <nate(a)bx.psu.edu>
date: Thu Mar 11 11:56:20 2010 -0500
description:
Modify the newline conversion methods in sniff so converting in place is optional. This is necessary to fix a bug that occurs if using the 'server_dir' or 'path_paste' library upload methods: previously, they would modify the file to be imported in-place if permissions allowed (probably not what the admin wanted) or fail if permissions did not allow. New functionality is to return the converted tempfile if 'server_dir' or 'path_paste' methods are used. Also, no newline conversion will be done if the administrator uses the symlink checkbox.
diffstat:
lib/galaxy/datatypes/sniff.py | 39 ++++++++++++++++++++++++---------------
tools/data_source/upload.py | 24 +++++++++++++++++++-----
2 files changed, 43 insertions(+), 20 deletions(-)
diffs (140 lines):
diff -r 2e97ae04856d -r 0c9e154e9176 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/datatypes/sniff.py Thu Mar 11 11:56:20 2010 -0500
@@ -70,7 +70,7 @@
f.close()
return False
-def convert_newlines( fname ):
+def convert_newlines( fname, in_place=True ):
"""
Converts in place a file from universal line endings
to Posix line endings.
@@ -78,7 +78,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1 2\\n3 4\\n'
"""
@@ -87,18 +87,21 @@
for i, line in enumerate( file( fname, "U" ) ):
fp.write( "%s\n" % line.rstrip( "\r\n" ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def sep2tabs(fname, patt="\\s+"):
+def sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Transforms in place a 'sep' separated file to a tab separated one
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\n3 4\\n")
>>> sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -110,11 +113,14 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
-def convert_newlines_sep2tabs( fname, patt="\\s+" ):
+def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
Combines above methods: convert_newlines() and sep2tabs()
so that files do not need to be read twice
@@ -122,7 +128,7 @@
>>> fname = get_test_fname('temp.txt')
>>> file(fname, 'wt').write("1 2\\r3 4")
>>> convert_newlines_sep2tabs(fname)
- 2
+ (2, None)
>>> file(fname).read()
'1\\t2\\n3\\t4\\n'
"""
@@ -134,9 +140,12 @@
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
fp.close()
- shutil.move( temp_name, fname )
- # Return number of lines in file.
- return i + 1
+ if in_place:
+ shutil.move( temp_name, fname )
+ # Return number of lines in file.
+ return ( i + 1, None )
+ else:
+ return ( i + 1, temp_name )
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""
diff -r 2e97ae04856d -r 0c9e154e9176 tools/data_source/upload.py
--- a/tools/data_source/upload.py Thu Mar 11 11:17:11 2010 -0500
+++ b/tools/data_source/upload.py Thu Mar 11 11:56:20 2010 -0500
@@ -138,6 +138,7 @@
def add_file( dataset, json_file, output_path ):
data_type = None
line_count = None
+ converted_path = None
if dataset.type == 'url':
try:
@@ -239,10 +240,15 @@
file_err( 'The uploaded file contains inappropriate content', dataset, json_file )
return
if data_type != 'binary' and data_type != 'zip':
- if dataset.space_to_tab:
- line_count = sniff.convert_newlines_sep2tabs( dataset.path )
- else:
- line_count = sniff.convert_newlines( dataset.path )
+ # don't convert newlines on data we're only going to symlink
+ if not dataset.get( 'link_data_only', False ):
+ in_place = True
+ if dataset.type in ( 'server_dir', 'path_paste' ):
+ in_place = False
+ if dataset.space_to_tab:
+ line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place )
+ else:
+ line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place )
if dataset.file_type == 'auto':
ext = sniff.guess_ext( dataset.path )
else:
@@ -257,7 +263,15 @@
if dataset.get( 'link_data_only', False ):
pass # data will remain in place
elif dataset.type in ( 'server_dir', 'path_paste' ):
- shutil.copy( dataset.path, output_path )
+ if converted_path is not None:
+ shutil.copy( converted_path, output_path )
+ try:
+ os.remove( converted_path )
+ except:
+ pass
+ else:
+ # this should not happen, but it's here just in case
+ shutil.copy( dataset.path, output_path )
else:
shutil.move( dataset.path, output_path )
# Write the job info
12 years, 2 months
[hg] galaxy 3518: fix for string.translate on empty path in crea...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/e98117dd6054
changeset: 3518:e98117dd6054
user: fubar: ross Lazarus at gmail period com
date: Thu Mar 11 13:37:49 2010 -0500
description:
fix for string.translate on empty path in creating an archive of library composite datatypes
diffstat:
lib/galaxy/web/controllers/library_common.py | 6 ++++--
1 files changed, 4 insertions(+), 2 deletions(-)
diffs (23 lines):
diff -r 2e97ae04856d -r e98117dd6054 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Thu Mar 11 11:17:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Thu Mar 11 13:37:49 2010 -0500
@@ -1357,7 +1357,8 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
- path = path.translate(trantab)
+ if path > '':
+ path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
@@ -1377,7 +1378,8 @@
flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
for fpath in flist:
efp,fname = os.path.split(fpath)
- fname = fname.translate(trantab)
+ if fname > '':
+ fname = fname.translate(trantab)
try:
archive.add( fpath,fname )
except IOError:
12 years, 2 months
[hg] galaxy 3516: Fix for implicit datatype conversion.
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/2e97ae04856d
changeset: 3516:2e97ae04856d
user: Dan Blankenberg <dan(a)bx.psu.edu>
date: Thu Mar 11 11:17:11 2010 -0500
description:
Fix for implicit datatype conversion.
diffstat:
lib/galaxy/datatypes/data.py | 2 +-
1 files changed, 1 insertions(+), 1 deletions(-)
diffs (12 lines):
diff -r 6fa986527398 -r 2e97ae04856d lib/galaxy/datatypes/data.py
--- a/lib/galaxy/datatypes/data.py Wed Mar 10 23:59:25 2010 -0500
+++ b/lib/galaxy/datatypes/data.py Thu Mar 11 11:17:11 2010 -0500
@@ -257,7 +257,7 @@
break
params[input_name] = original_dataset
#Run converter, job is dispatched through Queue
- converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )
+ converted_dataset = converter.execute( trans, incoming = params, set_output_hid = visible )[1]
if len(params) > 0:
trans.log_event( "Converter params: %s" % (str(params)), tool_id=converter.id )
if not visible:
12 years, 2 months
[hg] galaxy 3513: Remove old Lmap from datatypes_conf.xml.sample...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/87da8cd1f091
changeset: 3513:87da8cd1f091
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 20:59:11 2010 -0500
description:
Remove old Lmap from datatypes_conf.xml.sample - now deprecated from genetics.py
Causing buildbot to barf.
diffstat:
datatypes_conf.xml.sample | 1 -
lib/galaxy/web/controllers/library_common.py | 3 ++-
run_functional_tests.sh | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diffs (43 lines):
diff -r 53ddb4b728f7 -r 87da8cd1f091 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample Wed Mar 10 19:48:28 2010 -0500
+++ b/datatypes_conf.xml.sample Wed Mar 10 20:59:11 2010 -0500
@@ -194,7 +194,6 @@
<!-- genome graphs ucsc file - first col is always marker then numeric values to plot -->
<datatype extension="gg" type="galaxy.datatypes.genetics:GenomeGraphs"/>
<!-- part of linkage format pedigree -->
- <datatype extension="lmap" type="galaxy.datatypes.genetics:Lmap" display_in_upload="true"/>
<datatype extension="malist" type="galaxy.datatypes.genetics:MAlist" display_in_upload="true"/>
<!-- linkage format pedigree (separate .map file) -->
<datatype extension="lped" type="galaxy.datatypes.genetics:Lped" display_in_upload="true">
diff -r 53ddb4b728f7 -r 87da8cd1f091 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 19:48:28 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 20:59:11 2010 -0500
@@ -1111,7 +1111,7 @@
# is composite - must return a zip of contents and the html file itself - ugh - should be reversible at upload!
# use act_on_multiple_datasets( self, trans, cntrller, library_id, ldda_ids='', **kwd ) since it does what we need
kwd['do_action'] = 'zip'
- return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=id, **kwd )
+ return self.act_on_multiple_datasets( trans, cntrller, library_id, ldda_ids=[id,], **kwd )
else:
mime = trans.app.datatypes_registry.get_mimetype_by_extension( ldda.extension.lower() )
trans.response.set_content_type( mime )
@@ -1258,6 +1258,7 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
+ log.debug('## act on multiple got %s' % ldda_ids)
if action == 'import_to_history':
history = trans.get_history()
if history is None:
diff -r 53ddb4b728f7 -r 87da8cd1f091 run_functional_tests.sh
--- a/run_functional_tests.sh Wed Mar 10 19:48:28 2010 -0500
+++ b/run_functional_tests.sh Wed Mar 10 20:59:11 2010 -0500
@@ -1,7 +1,7 @@
#!/bin/sh
# A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
-
+export PATH=/usr/local/bin:$PATH
rm -f run_functional_tests.log
if [ ! $1 ]; then
12 years, 2 months
[hg] galaxy 3514: Fixes for downloading library archives - corre...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/06dcf56688ea
changeset: 3514:06dcf56688ea
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 23:58:35 2010 -0500
description:
Fixes for downloading library archives - correct extensions help a lot and removing spaces from ldda.name helps too.
diffstat:
lib/galaxy/web/controllers/library_common.py | 15 +++++++++------
1 files changed, 9 insertions(+), 6 deletions(-)
diffs (63 lines):
diff -r 87da8cd1f091 -r 06dcf56688ea lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 20:59:11 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 23:58:35 2010 -0500
@@ -1,4 +1,4 @@
-import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob
+import os, os.path, shutil, urllib, StringIO, re, gzip, tempfile, shutil, zipfile, copy, glob, string
from galaxy.web.base.controller import *
from galaxy import util, jobs
from galaxy.datatypes import sniff
@@ -1258,7 +1258,6 @@
messagetype = 'error'
else:
ldda_ids = util.listify( ldda_ids )
- log.debug('## act on multiple got %s' % ldda_ids)
if action == 'import_to_history':
history = trans.get_history()
if history is None:
@@ -1312,6 +1311,8 @@
msg = "The selected datasets have been removed from this data library"
else:
error = False
+ killme = string.punctuation + string.whitespace
+ trantab = string.maketrans(killme,'_'*len(killme))
try:
outext = 'zip'
if action == 'zip':
@@ -1325,10 +1326,10 @@
archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
elif action == 'tgz':
archive = util.streamball.StreamBall( 'w|gz' )
- outext = 'gz'
+ outext = 'tgz'
elif action == 'tbz':
archive = util.streamball.StreamBall( 'w|bz2' )
- outext = 'bz2'
+ outext = 'tbz2'
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
@@ -1356,12 +1357,13 @@
path = os.path.join( parent_folder.name, path )
parent_folder = parent_folder.parent
path += ldda.name
+ path = path.translate(trantab)
while path in seen:
path += '_'
seen.append( path )
+ zpath = os.path.split(path)[-1] # comes as base_name/fname
+ outfname,zpathext = os.path.splitext(zpath)
if is_composite: # need to add all the components from the extra_files_path to the zip
- zpath = os.path.split(path)[-1] # comes as base_name/fname
- outfname,zpathext = os.path.splitext(zpath)
if zpathext == '':
zpath = '%s.html' % zpath # fake the real nature of the html file
try:
@@ -1375,6 +1377,7 @@
flist = glob.glob(os.path.join(ldda.dataset.extra_files_path,'*.*')) # glob returns full paths
for fpath in flist:
efp,fname = os.path.split(fpath)
+ fname = fname.translate(trantab)
try:
archive.add( fpath,fname )
except IOError:
12 years, 2 months
[hg] galaxy 3512: Reverted missing python version check for zipf...
by Greg Von Kuster
details: http://www.bx.psu.edu/hg/galaxy/rev/53ddb4b728f7
changeset: 3512:53ddb4b728f7
user: fubar: ross Lazarus at gmail period com
date: Wed Mar 10 19:48:28 2010 -0500
description:
Reverted missing python version check for zipfile setup
Changed download archive names for both libraries and for history items
so they're related to the content - uses data.name rather than one fixed string for all
diffstat:
lib/galaxy/web/controllers/dataset.py | 25 ++++++++++++++++---------
lib/galaxy/web/controllers/library_common.py | 13 ++++++++-----
2 files changed, 24 insertions(+), 14 deletions(-)
diffs (153 lines):
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/dataset.py Wed Mar 10 19:48:28 2010 -0500
@@ -1,4 +1,4 @@
-import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob
+import logging, os, string, shutil, re, socket, mimetypes, smtplib, urllib, tempfile, zipfile, glob, sys
from galaxy.web.base.controller import *
from galaxy.web.framework.helpers import time_ago, iff, grids
@@ -11,6 +11,11 @@
pkg_resources.require( "Paste" )
import paste.httpexceptions
+if sys.version_info[:2] < ( 2, 6 ):
+ zipfile.BadZipFile = zipfile.error
+if sys.version_info[:2] < ( 2, 5 ):
+ zipfile.LargeZipFile = zipfile.error
+
tmpd = tempfile.mkdtemp()
comptypes=[]
ziptype = '32'
@@ -204,6 +209,9 @@
def archive_composite_dataset( self, trans, data=None, **kwd ):
# save a composite object into a compressed archive for downloading
params = util.Params( kwd )
+ valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ outfname = data.name[0:150]
+ outfname = ''.join(c in valid_chars and c or '_' for c in outfname)
if (params.do_action == None):
params.do_action = 'zip' # default
msg = util.restore_text( params.get( 'msg', '' ) )
@@ -230,7 +238,7 @@
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
- msg = "Unable to create archive for %s for download, please report this error" % data.name
+ msg = "Unable to create archive for %s for download, please report this error" % outfname
messagetype = 'error'
if not error:
current_user_roles = trans.get_current_user_roles()
@@ -239,7 +247,7 @@
fname = os.path.split(path)[-1]
basename = data.metadata.base_name
efp = data.extra_files_path
- htmlname = os.path.splitext(data.name)[0]
+ htmlname = os.path.splitext(outfname)[0]
if not htmlname.endswith(ext):
htmlname = '%s_%s' % (htmlname,ext)
archname = '%s.html' % htmlname # fake the real nature of the html file
@@ -276,14 +284,14 @@
messagetype = 'error'
if not error:
trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyCompositeObject.zip"
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.zip" % outfname
return tmpfh
else:
trans.response.set_content_type( "application/x-tar" )
outext = 'tgz'
if params.do_action == 'tbz':
outext = 'tbz'
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % outext
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
@@ -294,7 +302,8 @@
@web.expose
def display(self, trans, dataset_id=None, preview=False, filename=None, to_ext=None, **kwd):
"""Catches the dataset id and displays file contents as directed"""
-
+ composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
+ composite_extensions.append('html') # for archiving composite datatypes
# DEPRECATION: We still support unencoded ids for backward compatibility
try:
dataset_id = int( dataset_id )
@@ -329,8 +338,6 @@
trans.log_event( "Display dataset id: %s" % str( dataset_id ) )
if to_ext: # Saving the file
- composite_extensions = trans.app.datatypes_registry.get_composite_extensions( )
- composite_extensions.append('html')
if data.ext in composite_extensions:
return self.archive_composite_dataset( trans, data, **kwd )
else:
@@ -340,7 +347,7 @@
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
fname = data.name
fname = ''.join(c in valid_chars and c or '_' for c in fname)[0:150]
- trans.response.headers["Content-Disposition"] = "attachment; filename=GalaxyHistoryItem-%s-[%s]%s" % (data.hid, fname, to_ext)
+ trans.response.headers["Content-Disposition"] = "attachment; filename=Galaxy%s-[%s]%s" % (data.hid, fname, to_ext)
return open( data.file_name )
if os.path.exists( data.file_name ):
max_peek_size = 1000000 # 1 MB
diff -r 2af472aa0844 -r 53ddb4b728f7 lib/galaxy/web/controllers/library_common.py
--- a/lib/galaxy/web/controllers/library_common.py Wed Mar 10 17:10:43 2010 -0500
+++ b/lib/galaxy/web/controllers/library_common.py Wed Mar 10 19:48:28 2010 -0500
@@ -1312,6 +1312,7 @@
else:
error = False
try:
+ outext = 'zip'
if action == 'zip':
# Can't use mkstemp - the file must not exist first
tmpd = tempfile.mkdtemp()
@@ -1323,8 +1324,10 @@
archive.add = lambda x, y: archive.write( x, y.encode('CP437') )
elif action == 'tgz':
archive = util.streamball.StreamBall( 'w|gz' )
+ outext = 'gz'
elif action == 'tbz':
archive = util.streamball.StreamBall( 'w|bz2' )
+ outext = 'bz2'
except (OSError, zipfile.BadZipFile):
error = True
log.exception( "Unable to create archive for download" )
@@ -1357,11 +1360,11 @@
seen.append( path )
if is_composite: # need to add all the components from the extra_files_path to the zip
zpath = os.path.split(path)[-1] # comes as base_name/fname
- zpathext = os.path.splitext(zpath)[-1]
+ outfname,zpathext = os.path.splitext(zpath)
if zpathext == '':
zpath = '%s.html' % zpath # fake the real nature of the html file
try:
- archive.add(ldda.dataset.file_name,zpath)
+ archive.add(ldda.dataset.file_name,zpath) # add the primary of a composite set
except IOError:
error = True
log.exception( "Unable to add composite parent %s to temporary library download archive" % ldda.dataset.file_name)
@@ -1375,7 +1378,7 @@
archive.add( fpath,fname )
except IOError:
error = True
- log.exception( "Unable to add %s to temporary library download archive" % fname)
+ log.exception( "Unable to add %s to temporary library download archive %s" % (fname,outfname))
msg = "Unable to create archive for download, please report this error"
messagetype = 'error'
continue
@@ -1402,11 +1405,11 @@
messagetype = 'error'
if not error:
trans.response.set_content_type( "application/x-zip-compressed" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
return tmpfh
else:
trans.response.set_content_type( "application/x-tar" )
- trans.response.headers[ "Content-Disposition" ] = "attachment; filename=GalaxyLibraryFiles.%s" % action
+ trans.response.headers[ "Content-Disposition" ] = "attachment; filename=%s.%s" % (outfname,outext)
archive.wsgi_status = trans.response.wsgi_status()
archive.wsgi_headeritems = trans.response.wsgi_headeritems()
return archive.stream
12 years, 2 months
Balloon tooltips
by Assaf Gordon
Hi,
I was wondering if there's an easy way to disable the pop-up balloon tooltips (on the save, rerun, view, delete icons) ?
The new beautiful icons (or the old text-label) are informative enough - and the constantly appearing/disappearing black tooltip balloons (when you move the mouse over datasets) is a bit annoying.
Alternatively, is it possible to add a short delay before showing the tooltip (like in all regular GUI client-side applications) ?
Thanks,
-gordon
12 years, 2 months