第一步,设计worker,引入md5依赖,解析文件/切片的md5和chunk
importScripts('./md5.js');
const handleFileReader = (file) => {
return new Promise((resolve, reject) => {
const reader = new FileReader();
const spark = new SparkMD5.ArrayBuffer();
reader.onload = (e) => {
const arrayBuffer = e.target.result;
spark.append(arrayBuffer); // 计算 MD5
const md5 = spark.end(); // 获取最终结果
resolve(md5);
};
reader.readAsArrayBuffer(file);
})
}
self.onmessage = async (event) => {
const { file, chunkSize } = event.data;
const spark = new SparkMD5.ArrayBuffer();
const chunks = Math.ceil(file.size / chunkSize);
const md5 = await handleFileReader(file);
self.postMessage({ type: 'file_md5', md5: md5, total: chunks, });
for (let i = 0; i < chunks; i++) {
const start = i * chunkSize;
const end = Math.min(file.size, start + chunkSize);
const chunk = file.slice(start, end);
const md5 = await handleFileReader(chunk);
self.postMessage({ type: 'chunk', md5: md5, chunk: chunk, index: i, });
}
self.postMessage({ type: 'done', hash: spark.end() });
self.close();
};
第二步,设计上传
let preparedChunks = []; // 存储分片
let fileMd5 = ''; // 整个文件的md5
let totalSize = 0; // 切片总数
let uploadedCount = 0; // 已经上传的切片数量
let doingCount = 0; // 正在上传的任务数量
let maxNum = 5; // 上传并发最多多少
let taskList = []; // 上传任务
let uploadedChunkList = []; // 已经上传过的切片
// 处理上传并发
const doTask = () => {
while (doingCount < maxNum && taskList.length) {
const { task, resolve, reject, } = taskList.shift();
doingCount = doingCount + 1;
task().then((res) => {
if (uploadedCount == totalSize) {
console.log('全部上传完了--doTask:', 111);
} else {
const progress = Math.floor((uploadedCount / totalSize) * 100).toFixed(1);
console.log('上传中--doTask:', `${ progress }%`);
}
resolve(res)
}).catch((error) => {
reject(error);
}).finally(() => {
doingCount = doingCount - 1;
doTask();
})
}
}
// 追加上传任务
const addTask = (task) => {
return new Promise((resolve, reject) => {
taskList.push({
task,
resolve,
reject
})
doTask();
})
}
// 上传每个切片
const uploadChunks = async (chunk, md5) => {
await getUploadedChunks(fileMd5);
if (uploadedChunkList.includes(md5)) {
return new Promise((resolve, reject) => {
uploadedCount = uploadedCount + 1;
resolve();
})
}
const task = () => {
return new Promise((resolve, reject) => {
setTimeout(() => {
uploadedCount = uploadedCount + 1;
resolve();
}, Math.round(Math.random() * 2 * 1000));
})
}
addTask(task).then((res) => {
console.log('taskList--uploadChunks:', taskList.length);
});
}
// 获取文件,以及每个切片和对应的切片md5
async function calculateFileHash(file) {
return new Promise((resolve, reject) => {
const worker = new Worker('worker.js');
const chunkSize = 100 * 1024 * 1024;
const md5 = new SparkMD5.ArrayBuffer();
worker.postMessage({ file, chunkSize, md5 });
worker.onmessage = (event) => {
const { type, md5, chunk, total, index, } = event.data;
if (type === 'file_md5') {
fileMd5 = md5;
totalSize = total;
getUploadedChunks(md5);
} else if (type === 'chunk') {
console.log('md5, chunk--calculateFileHash:', md5, chunk);
preparedChunks.push({
md5: md5,
chunk: chunk,
index: index,
})
uploadChunks(md5, chunk);
}
if (preparedChunks.length == total) {
resolve();
}
};
worker.onerror = (error) => {
reject(error);
};
});
}
// 获取已上传的切片
async function getUploadedChunks(fileHash) {
return new Promise((resolve, reject) => {
// 如何已请求,直接return
if (uploadedChunkList.length) {
resolve();
return;
}
setTimeout(() => {
// mock之前已上传的切片
uploadedChunkList = [
{ chunk: '', },
]
resolve();
}, Math.round(Math.random() * 2 * 1000));
})
}
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。
原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。
如有侵权,请联系 cloudcommunity@tencent.com 删除。