1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e361766bbcfd/
Changeset: e361766bbcfd
User: jmchilton
Date: 2015-01-23 19:51:13+00:00
Summary: Merged in trevorw/galaxy-central-6/trevorw/add-template_cache_path-to-reports_wsgii-1422042129382 (pull request #645)
add template_cache_path to reports_wsgi.ini.sample
Affected #: 1 file
diff -r d8d20b31c15bec018df93653c3f39e5d41e374f6 -r e361766bbcfd8976ee05cb8826ed239d0f2028f6 config/reports_wsgi.ini.sample
--- a/config/reports_wsgi.ini.sample
+++ b/config/reports_wsgi.ini.sample
@@ -26,6 +26,10 @@
# Temporary storage for additional datasets, this should be shared through the cluster
new_file_path = database/tmp
+# Mako templates are compiled as needed and cached for reuse, this directory is
+# used for the cache
+# template_cache_path = database/compiled_templates
+
# Session support (beaker)
use_beaker_session = True
session_type = memory
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f18f905ceaa7/
Changeset: f18f905ceaa7
Branch: trevorw/add-template_cache_path-to-reports_wsgii-1422042129382
User: trevorw
Date: 2015-01-23 19:42:36+00:00
Summary: add template_cache_path to reports_wsgi.ini.sample
Affected #: 1 file
diff -r 9ef517d8e767037e1633f6063c6edb3023f86499 -r f18f905ceaa7586625c7f7d202a1f0f4ceabc8a1 config/reports_wsgi.ini.sample
--- a/config/reports_wsgi.ini.sample
+++ b/config/reports_wsgi.ini.sample
@@ -26,6 +26,10 @@
# Temporary storage for additional datasets, this should be shared through the cluster
new_file_path = database/tmp
+# Mako templates are compiled as needed and cached for reuse, this directory is
+# used for the cache
+# template_cache_path = database/compiled_templates
+
# Session support (beaker)
use_beaker_session = True
session_type = memory
https://bitbucket.org/galaxy/galaxy-central/commits/e361766bbcfd/
Changeset: e361766bbcfd
User: jmchilton
Date: 2015-01-23 19:51:13+00:00
Summary: Merged in trevorw/galaxy-central-6/trevorw/add-template_cache_path-to-reports_wsgii-1422042129382 (pull request #645)
add template_cache_path to reports_wsgi.ini.sample
Affected #: 1 file
diff -r d8d20b31c15bec018df93653c3f39e5d41e374f6 -r e361766bbcfd8976ee05cb8826ed239d0f2028f6 config/reports_wsgi.ini.sample
--- a/config/reports_wsgi.ini.sample
+++ b/config/reports_wsgi.ini.sample
@@ -26,6 +26,10 @@
# Temporary storage for additional datasets, this should be shared through the cluster
new_file_path = database/tmp
+# Mako templates are compiled as needed and cached for reuse, this directory is
+# used for the cache
+# template_cache_path = database/compiled_templates
+
# Session support (beaker)
use_beaker_session = True
session_type = memory
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9ef517d8e767/
Changeset: 9ef517d8e767
User: jmchilton
Date: 2015-01-23 19:13:25+00:00
Summary: Merged in BjoernGruening/galaxy-central-1/compressed_upload (pull request #630)
Add CompressedArchive as datatype and do not uncomress it during upload.
Affected #: 3 files
diff -r a6855d0b02d70cd35ae312f48bd704590805ca15 -r 9ef517d8e767037e1633f6063c6edb3023f86499 config/datatypes_conf.xml.sample
--- a/config/datatypes_conf.xml.sample
+++ b/config/datatypes_conf.xml.sample
@@ -141,6 +141,8 @@
<datatype extension="pbm" type="galaxy.datatypes.images:Pbm" mimetype="image/pbm"/><datatype extension="pgm" type="galaxy.datatypes.images:Pgm" mimetype="image/pgm"/><datatype extension="rna_eps" type="galaxy.datatypes.sequence:RNADotPlotMatrix" mimetype="image/eps" display_in_upload="True"/>
+ <datatype extension="searchgui_archive" type="galaxy.datatypes.binary:CompressedArchive" subclass="True" display_in_upload="True"/>
+ <datatype extension="peptideshaker_archive" type="galaxy.datatypes.binary:CompressedArchive" subclass="True" display_in_upload="True"/><datatype extension="eps" type="galaxy.datatypes.images:Eps" mimetype="image/eps"/><datatype extension="rast" type="galaxy.datatypes.images:Rast" mimetype="image/rast"/><datatype extension="laj" type="galaxy.datatypes.images:Laj"/>
diff -r a6855d0b02d70cd35ae312f48bd704590805ca15 -r 9ef517d8e767037e1633f6063c6edb3023f86499 lib/galaxy/datatypes/binary.py
--- a/lib/galaxy/datatypes/binary.py
+++ b/lib/galaxy/datatypes/binary.py
@@ -106,6 +106,30 @@
Binary.register_unsniffable_binary_ext("ab1")
+class CompressedArchive( Binary ):
+ """
+ Class describing an compressed binary file
+ This class can be sublass'ed to implement archive filetypes that will not be unpacked by upload.py.
+ """
+ file_ext = "compressed_archive"
+ compressed = True
+
+ def set_peek( self, dataset, is_multi_byte=False ):
+ if not dataset.dataset.purged:
+ dataset.peek = "Compressed binary file"
+ dataset.blurb = data.nice_size( dataset.get_size() )
+ else:
+ dataset.peek = 'file does not exist'
+ dataset.blurb = 'file purged from disk'
+
+ def display_peek( self, dataset ):
+ try:
+ return dataset.peek
+ except:
+ return "Compressed binary file (%s)" % ( data.nice_size( dataset.get_size() ) )
+
+Binary.register_unsniffable_binary_ext("compressed_archive")
+
class GenericAsn1Binary( Binary ):
"""Class for generic ASN.1 binary format"""
diff -r a6855d0b02d70cd35ae312f48bd704590805ca15 -r 9ef517d8e767037e1633f6063c6edb3023f86499 tools/data_source/upload.py
--- a/tools/data_source/upload.py
+++ b/tools/data_source/upload.py
@@ -120,171 +120,176 @@
data_type = type_info[0]
ext = type_info[1]
if not data_type:
- # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
- is_gzipped, is_valid = check_gzip( dataset.path )
- if is_gzipped and not is_valid:
- file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
- return
- elif is_gzipped and is_valid:
- if link_data_only == 'copy_files':
- # We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
- CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
- gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- file_err( 'Problem decompressing gzipped data', dataset, json_file )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- # Replace the gzipped file with the decompressed file if it's safe to do so
- if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
- dataset.path = uncompressed
- else:
- shutil.move( uncompressed, dataset.path )
- os.chmod(dataset.path, 0644)
- dataset.name = dataset.name.rstrip( '.gz' )
- data_type = 'gzip'
- if not data_type and bz2 is not None:
- # See if we have a bz2 file, much like gzip
- is_bzipped, is_valid = check_bz2( dataset.path )
- if is_bzipped and not is_valid:
+ root_datatype = registry.get_datatype_by_extension( dataset.file_type )
+ if getattr( root_datatype, 'compressed', False ):
+ data_type = 'compressed archive'
+ ext = dataset.file_type
+ else:
+ # See if we have a gzipped file, which, if it passes our restrictions, we'll uncompress
+ is_gzipped, is_valid = check_gzip( dataset.path )
+ if is_gzipped and not is_valid:
file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
return
- elif is_bzipped and is_valid:
+ elif is_gzipped and is_valid:
if link_data_only == 'copy_files':
- # We need to uncompress the temp_name file
+ # We need to uncompress the temp_name file, but BAM files must remain compressed in the BGZF format
CHUNK_SIZE = 2**20 # 1Mb
- fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_bunzip2_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
- bzipped_file = bz2.BZ2File( dataset.path, 'rb' )
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_gunzip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+ gzipped_file = gzip.GzipFile( dataset.path, 'rb' )
while 1:
try:
- chunk = bzipped_file.read( CHUNK_SIZE )
+ chunk = gzipped_file.read( CHUNK_SIZE )
except IOError:
os.close( fd )
os.remove( uncompressed )
- file_err( 'Problem decompressing bz2 compressed data', dataset, json_file )
+ file_err( 'Problem decompressing gzipped data', dataset, json_file )
return
if not chunk:
break
os.write( fd, chunk )
os.close( fd )
- bzipped_file.close()
- # Replace the bzipped file with the decompressed file if it's safe to do so
+ gzipped_file.close()
+ # Replace the gzipped file with the decompressed file if it's safe to do so
if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
dataset.path = uncompressed
else:
shutil.move( uncompressed, dataset.path )
os.chmod(dataset.path, 0644)
- dataset.name = dataset.name.rstrip( '.bz2' )
- data_type = 'bz2'
- if not data_type:
- # See if we have a zip archive
- is_zipped = check_zip( dataset.path )
- if is_zipped:
- if link_data_only == 'copy_files':
- CHUNK_SIZE = 2**20 # 1Mb
- uncompressed = None
- uncompressed_name = None
- unzipped = False
- z = zipfile.ZipFile( dataset.path )
- for name in z.namelist():
- if name.endswith('/'):
- continue
- if unzipped:
- stdout = 'ZIP file contained more than one file, only the first file was added to Galaxy.'
- break
- fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_zip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
- if sys.version_info[:2] >= ( 2, 6 ):
- zipped_file = z.open( name )
- while 1:
+ dataset.name = dataset.name.rstrip( '.gz' )
+ data_type = 'gzip'
+ if not data_type and bz2 is not None:
+ # See if we have a bz2 file, much like gzip
+ is_bzipped, is_valid = check_bz2( dataset.path )
+ if is_bzipped and not is_valid:
+ file_err( 'The gzipped uploaded file contains inappropriate content', dataset, json_file )
+ return
+ elif is_bzipped and is_valid:
+ if link_data_only == 'copy_files':
+ # We need to uncompress the temp_name file
+ CHUNK_SIZE = 2**20 # 1Mb
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_bunzip2_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+ bzipped_file = bz2.BZ2File( dataset.path, 'rb' )
+ while 1:
+ try:
+ chunk = bzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing bz2 compressed data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ bzipped_file.close()
+ # Replace the bzipped file with the decompressed file if it's safe to do so
+ if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
+ dataset.path = uncompressed
+ else:
+ shutil.move( uncompressed, dataset.path )
+ os.chmod(dataset.path, 0644)
+ dataset.name = dataset.name.rstrip( '.bz2' )
+ data_type = 'bz2'
+ if not data_type:
+ # See if we have a zip archive
+ is_zipped = check_zip( dataset.path )
+ if is_zipped:
+ if link_data_only == 'copy_files':
+ CHUNK_SIZE = 2**20 # 1Mb
+ uncompressed = None
+ uncompressed_name = None
+ unzipped = False
+ z = zipfile.ZipFile( dataset.path )
+ for name in z.namelist():
+ if name.endswith('/'):
+ continue
+ if unzipped:
+ stdout = 'ZIP file contained more than one file, only the first file was added to Galaxy.'
+ break
+ fd, uncompressed = tempfile.mkstemp( prefix='data_id_%s_upload_zip_' % dataset.dataset_id, dir=os.path.dirname( output_path ), text=False )
+ if sys.version_info[:2] >= ( 2, 6 ):
+ zipped_file = z.open( name )
+ while 1:
+ try:
+ chunk = zipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ file_err( 'Problem decompressing zipped data', dataset, json_file )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ zipped_file.close()
+ uncompressed_name = name
+ unzipped = True
+ else:
+ # python < 2.5 doesn't have a way to read members in chunks(!)
try:
- chunk = zipped_file.read( CHUNK_SIZE )
+ outfile = open( uncompressed, 'wb' )
+ outfile.write( z.read( name ) )
+ outfile.close()
+ uncompressed_name = name
+ unzipped = True
except IOError:
os.close( fd )
os.remove( uncompressed )
file_err( 'Problem decompressing zipped data', dataset, json_file )
return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- zipped_file.close()
- uncompressed_name = name
- unzipped = True
+ z.close()
+ # Replace the zipped file with the decompressed file if it's safe to do so
+ if uncompressed is not None:
+ if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
+ dataset.path = uncompressed
+ else:
+ shutil.move( uncompressed, dataset.path )
+ os.chmod(dataset.path, 0644)
+ dataset.name = uncompressed_name
+ data_type = 'zip'
+ if not data_type:
+ # TODO refactor this logic. check_binary isn't guaranteed to be
+ # correct since it only looks at whether the first 100 chars are
+ # printable or not. If someone specifies a known unsniffable
+ # binary datatype and check_binary fails, the file gets mangled.
+ if check_binary( dataset.path ) or Binary.is_ext_unsniffable(dataset.file_type):
+ # We have a binary dataset, but it is not Bam, Sff or Pdf
+ data_type = 'binary'
+ #binary_ok = False
+ parts = dataset.name.split( "." )
+ if len( parts ) > 1:
+ ext = parts[-1].strip().lower()
+ if not Binary.is_ext_unsniffable(ext):
+ file_err( 'The uploaded binary file contains inappropriate content', dataset, json_file )
+ return
+ elif Binary.is_ext_unsniffable(ext) and dataset.file_type != ext:
+ err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
+ file_err( err_msg, dataset, json_file )
+ return
+ if not data_type:
+ # We must have a text file
+ if check_html( dataset.path ):
+ file_err( 'The uploaded file contains inappropriate HTML content', dataset, json_file )
+ return
+ if data_type != 'binary':
+ if link_data_only == 'copy_files':
+ if dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
+ in_place = False
+ # Convert universal line endings to Posix line endings, but allow the user to turn it off,
+ # so that is becomes possible to upload gzip, bz2 or zip files with binary data without
+ # corrupting the content of those files.
+ if dataset.to_posix_lines:
+ tmpdir = output_adjacent_tmpdir( output_path )
+ tmp_prefix = 'data_id_%s_convert_' % dataset.dataset_id
+ if dataset.space_to_tab:
+ line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
else:
- # python < 2.5 doesn't have a way to read members in chunks(!)
- try:
- outfile = open( uncompressed, 'wb' )
- outfile.write( z.read( name ) )
- outfile.close()
- uncompressed_name = name
- unzipped = True
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- file_err( 'Problem decompressing zipped data', dataset, json_file )
- return
- z.close()
- # Replace the zipped file with the decompressed file if it's safe to do so
- if uncompressed is not None:
- if dataset.type in ( 'server_dir', 'path_paste' ) or not in_place:
- dataset.path = uncompressed
- else:
- shutil.move( uncompressed, dataset.path )
- os.chmod(dataset.path, 0644)
- dataset.name = uncompressed_name
- data_type = 'zip'
- if not data_type:
- # TODO refactor this logic. check_binary isn't guaranteed to be
- # correct since it only looks at whether the first 100 chars are
- # printable or not. If someone specifies a known unsniffable
- # binary datatype and check_binary fails, the file gets mangled.
- if check_binary( dataset.path ) or Binary.is_ext_unsniffable(dataset.file_type):
- # We have a binary dataset, but it is not Bam, Sff or Pdf
- data_type = 'binary'
- #binary_ok = False
- parts = dataset.name.split( "." )
- if len( parts ) > 1:
- ext = parts[-1].strip().lower()
- if not Binary.is_ext_unsniffable(ext):
- file_err( 'The uploaded binary file contains inappropriate content', dataset, json_file )
- return
- elif Binary.is_ext_unsniffable(ext) and dataset.file_type != ext:
- err_msg = "You must manually set the 'File Format' to '%s' when uploading %s files." % ( ext.capitalize(), ext )
- file_err( err_msg, dataset, json_file )
- return
- if not data_type:
- # We must have a text file
- if check_html( dataset.path ):
- file_err( 'The uploaded file contains inappropriate HTML content', dataset, json_file )
- return
- if data_type != 'binary':
- if link_data_only == 'copy_files':
- if dataset.type in ( 'server_dir', 'path_paste' ) and data_type not in [ 'gzip', 'bz2', 'zip' ]:
- in_place = False
- # Convert universal line endings to Posix line endings, but allow the user to turn it off,
- # so that is becomes possible to upload gzip, bz2 or zip files with binary data without
- # corrupting the content of those files.
- if dataset.to_posix_lines:
- tmpdir = output_adjacent_tmpdir( output_path )
- tmp_prefix = 'data_id_%s_convert_' % dataset.dataset_id
- if dataset.space_to_tab:
- line_count, converted_path = sniff.convert_newlines_sep2tabs( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
- else:
- line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
- if dataset.file_type == 'auto':
- ext = sniff.guess_ext( dataset.path, registry.sniff_order )
- else:
- ext = dataset.file_type
- data_type = ext
+ line_count, converted_path = sniff.convert_newlines( dataset.path, in_place=in_place, tmp_dir=tmpdir, tmp_prefix=tmp_prefix )
+ if dataset.file_type == 'auto':
+ ext = sniff.guess_ext( dataset.path, registry.sniff_order )
+ else:
+ ext = dataset.file_type
+ data_type = ext
# Save job info for the framework
if ext == 'auto' and dataset.ext:
ext = dataset.ext
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a6855d0b02d7/
Changeset: a6855d0b02d7
User: dannon
Date: 2015-01-23 18:23:15+00:00
Summary: Remove arbitrary limit of '50' uploads via FTP. If this turns out to be an issue, we can revisit it. This was actually an old bug if you manually selected more than 50 unexpected things would happen without erroring.
Affected #: 3 files
diff -r a626b45a14dce92771043ae55e2872b720f15f00 -r a6855d0b02d70cd35ae312f48bd704590805ca15 client/galaxy/scripts/utils/uploadbox.js
--- a/client/galaxy/scripts/utils/uploadbox.js
+++ b/client/galaxy/scripts/utils/uploadbox.js
@@ -12,7 +12,6 @@
url : '',
paramname : 'content',
maxfilesize : 2048,
- maxfilenumber : 50,
dragover : function() {},
dragleave : function() {},
announce : function() {},
@@ -129,9 +128,6 @@
// add files to queue
for (var i = 0; i < files.length; i++)
{
- // check
- if(queue_length >= opts.maxfilenumber)
- break;
// new identifier
var index = String(queue_index++);
diff -r a626b45a14dce92771043ae55e2872b720f15f00 -r a6855d0b02d70cd35ae312f48bd704590805ca15 static/scripts/packed/utils/uploadbox.js
--- a/static/scripts/packed/utils/uploadbox.js
+++ b/static/scripts/packed/utils/uploadbox.js
@@ -1,1 +1,1 @@
-(function(c){jQuery.event.props.push("dataTransfer");var h={url:"",paramname:"content",maxfilesize:2048,maxfilenumber:50,dragover:function(){},dragleave:function(){},announce:function(){},initialize:function(){},progress:function(){},success:function(){},error:function(k,l,m){alert(m)},complete:function(){},error_filesize:"File exceeds 2GB. Please use an FTP client.",error_default:"Please make sure the file is available.",error_server:"Upload request failed.",error_login:"Uploads require you to log in.",error_missing:"No upload content available."};var a={};var e={};var f=0;var j=0;var d=false;var g=false;var b=null;var i=null;c.fn.uploadbox=function(A){a=c.extend({},h,A);b=this;b.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');b.on("drop",o);b.on("dragover",p);b.on("dragleave",x);c("#uploadbox_input").change(function(B){z(B.target.files);c(this).val("")});function o(B){if(!B.dataTransfer){return}z(B.dataTransfer.files);B.preventDefault();return false}function p(B){B.preventDefault();a.dragover.call(B)}function x(B){B.stopPropagation();a.dragleave.call(B)}function k(B){if(B.lengthComputable){a.progress(this.index,this.file,Math.round((B.loaded*100)/B.total))}}function z(D){if(d){return}var E=f;for(var C=0;C<D.length;C++){if(j>=a.maxfilenumber){break}var B=String(f++);e[B]=D[C];a.announce(B,e[B],"");j++}return E}function r(B){if(e[B]){delete e[B];j--}}function m(){if(j==0||g){g=false;d=false;a.complete();return}else{d=true}var D=-1;for(var F in e){D=F;break}var E=e[D];r(D);var C=E.size;var H=E.mode;var B=1048576*a.maxfilesize;if(C<B||E.mode=="ftp"){var G=a.initialize(D,E);if(G){q(D,E,G)}else{u(D,E,a.error_missing)}}else{u(D,E,a.error_filesize)}}function q(B,D,E){var F=new FormData();for(var C in E){F.append(C,E[C])}if(D.size>0&&a.paramname){F.append(a.paramname,D,D.name)}i=new XMLHttpRequest();i.open("POST",a.url,true);i.setRequestHeader("Accept","application/json");i.setRequestHeader("Cache-Control","no-cache");i.setRequestHeader("X-Requested-With","XMLHttpRequest");i.onreadystatechange=function(){if(i.readyState!=i.DONE){return}var G=null;if(i.responseText){try{G=jQuery.parseJSON(i.responseText)}catch(H){G=i.responseText}}if(i.status<200||i.status>299){var I=i.statusText;if(i.status==403){I=a.error_login}else{if(i.status==0){I=a.error_server}else{if(!I){I=a.error_default}}}u(B,D,I+" ("+i.status+")")}else{y(B,D,G)}};i.upload.index=B;i.upload.file=D;i.upload.addEventListener("progress",k,false);i.send(F)}function y(B,C,D){a.success(B,C,D);m()}function u(B,C,D){a.error(B,C,D);m()}function w(){c("#uploadbox_input").trigger("click")}function t(B){for(B in e){r(B)}}function l(){if(!d){d=true;m()}}function v(){g=true}function n(B){a=c.extend({},a,B);return a}function s(){return window.File&&window.FormData&&window.XMLHttpRequest&&window.FileList}return{select:w,add:z,remove:r,start:l,stop:v,reset:t,configure:n,compatible:s}}})(jQuery);
\ No newline at end of file
+(function(c){jQuery.event.props.push("dataTransfer");var h={url:"",paramname:"content",maxfilesize:2048,dragover:function(){},dragleave:function(){},announce:function(){},initialize:function(){},progress:function(){},success:function(){},error:function(k,l,m){alert(m)},complete:function(){},error_filesize:"File exceeds 2GB. Please use an FTP client.",error_default:"Please make sure the file is available.",error_server:"Upload request failed.",error_login:"Uploads require you to log in.",error_missing:"No upload content available."};var a={};var e={};var f=0;var j=0;var d=false;var g=false;var b=null;var i=null;c.fn.uploadbox=function(A){a=c.extend({},h,A);b=this;b.append('<input id="uploadbox_input" type="file" style="display: none" multiple>');b.on("drop",o);b.on("dragover",p);b.on("dragleave",x);c("#uploadbox_input").change(function(B){z(B.target.files);c(this).val("")});function o(B){if(!B.dataTransfer){return}z(B.dataTransfer.files);B.preventDefault();return false}function p(B){B.preventDefault();a.dragover.call(B)}function x(B){B.stopPropagation();a.dragleave.call(B)}function k(B){if(B.lengthComputable){a.progress(this.index,this.file,Math.round((B.loaded*100)/B.total))}}function z(D){if(d){return}var E=f;for(var C=0;C<D.length;C++){var B=String(f++);e[B]=D[C];a.announce(B,e[B],"");j++}return E}function r(B){if(e[B]){delete e[B];j--}}function m(){if(j==0||g){g=false;d=false;a.complete();return}else{d=true}var D=-1;for(var F in e){D=F;break}var E=e[D];r(D);var C=E.size;var H=E.mode;var B=1048576*a.maxfilesize;if(C<B||E.mode=="ftp"){var G=a.initialize(D,E);if(G){q(D,E,G)}else{u(D,E,a.error_missing)}}else{u(D,E,a.error_filesize)}}function q(B,D,E){var F=new FormData();for(var C in E){F.append(C,E[C])}if(D.size>0&&a.paramname){F.append(a.paramname,D,D.name)}i=new XMLHttpRequest();i.open("POST",a.url,true);i.setRequestHeader("Accept","application/json");i.setRequestHeader("Cache-Control","no-cache");i.setRequestHeader("X-Requested-With","XMLHttpRequest");i.onreadystatechange=function(){if(i.readyState!=i.DONE){return}var G=null;if(i.responseText){try{G=jQuery.parseJSON(i.responseText)}catch(H){G=i.responseText}}if(i.status<200||i.status>299){var I=i.statusText;if(i.status==403){I=a.error_login}else{if(i.status==0){I=a.error_server}else{if(!I){I=a.error_default}}}u(B,D,I+" ("+i.status+")")}else{y(B,D,G)}};i.upload.index=B;i.upload.file=D;i.upload.addEventListener("progress",k,false);i.send(F)}function y(B,C,D){a.success(B,C,D);m()}function u(B,C,D){a.error(B,C,D);m()}function w(){c("#uploadbox_input").trigger("click")}function t(B){for(B in e){r(B)}}function l(){if(!d){d=true;m()}}function v(){g=true}function n(B){a=c.extend({},a,B);return a}function s(){return window.File&&window.FormData&&window.XMLHttpRequest&&window.FileList}return{select:w,add:z,remove:r,start:l,stop:v,reset:t,configure:n,compatible:s}}})(jQuery);
\ No newline at end of file
diff -r a626b45a14dce92771043ae55e2872b720f15f00 -r a6855d0b02d70cd35ae312f48bd704590805ca15 static/scripts/utils/uploadbox.js
--- a/static/scripts/utils/uploadbox.js
+++ b/static/scripts/utils/uploadbox.js
@@ -12,7 +12,6 @@
url : '',
paramname : 'content',
maxfilesize : 2048,
- maxfilenumber : 50,
dragover : function() {},
dragleave : function() {},
announce : function() {},
@@ -129,9 +128,6 @@
// add files to queue
for (var i = 0; i < files.length; i++)
{
- // check
- if(queue_length >= opts.maxfilenumber)
- break;
// new identifier
var index = String(queue_index++);
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a059e0544235/
Changeset: a059e0544235
User: carlfeberhard
Date: 2015-01-23 17:50:21+00:00
Summary: Plugins, Scatterplot: fix library imports to use proper path after ui.js breakup
Affected #: 1 file
diff -r a95230bfcbe7777562ccf808cb86a471aac4bda4 -r a059e05442354478bfa551621718afbc18cc6263 config/plugins/visualizations/scatterplot/templates/scatterplot.mako
--- a/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
+++ b/config/plugins/visualizations/scatterplot/templates/scatterplot.mako
@@ -31,9 +31,8 @@
'libs/backbone/backbone',
'libs/d3',
'libs/handlebars.runtime',
- 'mvc/ui',
- 'jq-plugins/ui/peek-column-selector',
- 'jq-plugins/ui/pagination',
+ 'ui/peek-column-selector',
+ 'ui/pagination',
'mvc/visualization/visualization-model' )}
${h.javascript_link( root + 'plugins/visualizations/scatterplot/static/scatterplot-edit.js' )}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.