使用 resumable.js 添加散列
Adding hashing with resumable.js
我正在尝试使用 resumable.JS 附加散列,挂钩 "fileAdded" 事件。
我们如何为每个块附加哈希?
通过 Hook FileAdded 事件如下。
r.on('fileAdded', function computeHashes(resumableFile, offset, fileReader) {
// debugger;
// var round = resumableFile.resumableObj.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
var chunkSize = resumableFile.resumableObj.opts.chunkSize;// resumableFile.getOpt('chunkSize');
var numChunks = Math.max(Math.floor(resumableFile.file.size / chunkSize), 1);
var forceChunkSize = false;//resumableFile.getOpt('forceChunkSize');
var startByte, endByte; var hasher = new SparkMD5();
var func = (resumableFile.file.slice ? 'slice' : (resumableFile.file.mozSlice ? 'mozSlice' : (resumableFile.file.webkitSlice ? 'webkitSlice' : 'slice')));
var bytes;
resumableFile.hashes = resumableFile.hashes || [];
fileReader = fileReader || new FileReader();
offset = offset || 0;
// if (resumableFile.resumableObj.cancelled === false) {
startByte = offset * chunkSize;
endByte = Math.min(resumableFile.file.size, (offset + 1) * chunkSize);
if (resumableFile.file.size - endByte < chunkSize && !forceChunkSize) {
endByte = resumableFile.file.size;
}
bytes = resumableFile.file[func](startByte, endByte);
fileReader.onloadend = function (e) {
// debugger;
var spark = SparkMD5.ArrayBuffer.hash(e.target.result);//hasher.append(e.target.result).end();//
//console.log(spark);
resumableFile.hashes.push(spark);
if (numChunks > offset + 1) {
computeHashes(resumableFile, offset + 1, fileReader);
}
if (numChunks == offset + 1) {
r.upload();
}
resumableFile.resumableObj.opts.query = function (resumableFile, resumableObj) {
// debugger;
return { 'checksum': resumableFile.hashes[resumableObj] };
};
};
fileReader.readAsArrayBuffer(bytes);
// Show progress pabr
$('.resumable-progress, .resumable-list').show();
// Show pause, hide resume
$('.resumable-progress .progress-resume-link').hide();
$('.resumable-progress .progress-pause-link').show();
// Add the file to the list
$('.resumable-list').append('<li class="resumable-file-' + resumableFile.uniqueIdentifier + '">Uploading <span class="resumable-file-name"></span> <span class="resumable-file-progress"></span>');
$('.resumable-file-' + resumableFile.uniqueIdentifier + ' .resumable-file-name').html(resumableFile.fileName);
// Actually start the upload
});
正在将 var 查询更改为
(typeof $.resumableObj.opts.query == "function") ? $.resumableObj.opts.query($.fileObj, offset) : $.resumableObj.opts.query;
所以它returns 当前偏移量/
我正在尝试使用 resumable.JS 附加散列,挂钩 "fileAdded" 事件。 我们如何为每个块附加哈希?
通过 Hook FileAdded 事件如下。
r.on('fileAdded', function computeHashes(resumableFile, offset, fileReader) { // debugger; // var round = resumableFile.resumableObj.getOpt('forceChunkSize') ? Math.ceil : Math.floor; var chunkSize = resumableFile.resumableObj.opts.chunkSize;// resumableFile.getOpt('chunkSize'); var numChunks = Math.max(Math.floor(resumableFile.file.size / chunkSize), 1); var forceChunkSize = false;//resumableFile.getOpt('forceChunkSize'); var startByte, endByte; var hasher = new SparkMD5(); var func = (resumableFile.file.slice ? 'slice' : (resumableFile.file.mozSlice ? 'mozSlice' : (resumableFile.file.webkitSlice ? 'webkitSlice' : 'slice'))); var bytes; resumableFile.hashes = resumableFile.hashes || []; fileReader = fileReader || new FileReader(); offset = offset || 0; // if (resumableFile.resumableObj.cancelled === false) { startByte = offset * chunkSize; endByte = Math.min(resumableFile.file.size, (offset + 1) * chunkSize); if (resumableFile.file.size - endByte < chunkSize && !forceChunkSize) { endByte = resumableFile.file.size; } bytes = resumableFile.file[func](startByte, endByte); fileReader.onloadend = function (e) { // debugger; var spark = SparkMD5.ArrayBuffer.hash(e.target.result);//hasher.append(e.target.result).end();// //console.log(spark); resumableFile.hashes.push(spark); if (numChunks > offset + 1) { computeHashes(resumableFile, offset + 1, fileReader); } if (numChunks == offset + 1) { r.upload(); } resumableFile.resumableObj.opts.query = function (resumableFile, resumableObj) { // debugger; return { 'checksum': resumableFile.hashes[resumableObj] }; }; }; fileReader.readAsArrayBuffer(bytes); // Show progress pabr $('.resumable-progress, .resumable-list').show(); // Show pause, hide resume $('.resumable-progress .progress-resume-link').hide(); $('.resumable-progress .progress-pause-link').show(); // Add the file to the list $('.resumable-list').append('<li class="resumable-file-' + resumableFile.uniqueIdentifier + '">Uploading <span class="resumable-file-name"></span> <span class="resumable-file-progress"></span>'); $('.resumable-file-' + resumableFile.uniqueIdentifier + ' .resumable-file-name').html(resumableFile.fileName); // Actually start the upload });
正在将 var 查询更改为
(typeof $.resumableObj.opts.query == "function") ? $.resumableObj.opts.query($.fileObj, offset) : $.resumableObj.opts.query;
所以它returns 当前偏移量/