前往小程序,Get更优阅读体验!
立即前往
发布
社区首页 >专栏 >大文件上传

大文件上传

原创
作者头像
剁椒鱼鳞
发布2025-01-10 15:11:21
发布2025-01-10 15:11:21
920
举报
文章被收录于专栏:前端小学生
  1. 切片上传,获取文件,以及每个切片的md,这一步相对耗时较长,且堵塞渲染主线程
  2. 断点续传,每次上传之前,将文件md5值、切片数量、每个切片md5传给后台,后台告知该文件是否上传过,上传了哪些切片,已上传的暂不处理(之前上传中的切片认为未上传,所以考虑每个切片的体积较小,为1M)
  3. 处理并发,不能一次性调用一百次上传接口,每完成一个新增一个上传任务
  4. 多线程解析,因解析文件耗费较长时间,且会堵塞渲染主线程,需开启多线程解析文件及切片md5

第一步,设计worker,引入md5依赖,解析文件/切片的md5和chunk

代码语言:javascript
复制
importScripts('./md5.js'); 
const handleFileReader = (file) => {
    return new Promise((resolve, reject) => {
        const reader = new FileReader();
        const spark = new SparkMD5.ArrayBuffer();
        reader.onload = (e) => {
            const arrayBuffer = e.target.result;
            spark.append(arrayBuffer); // 计算 MD5
            const md5 = spark.end();  // 获取最终结果
            resolve(md5);
        };
        reader.readAsArrayBuffer(file);
    })
}

self.onmessage = async (event) => {
    const { file, chunkSize } = event.data;
    const spark = new SparkMD5.ArrayBuffer();
    const chunks = Math.ceil(file.size / chunkSize);
    const md5 = await handleFileReader(file);
    self.postMessage({ type: 'file_md5', md5: md5, total: chunks, });

    for (let i = 0; i < chunks; i++) {
        const start = i * chunkSize;
        const end = Math.min(file.size, start + chunkSize);
        const chunk = file.slice(start, end);
        const md5 = await handleFileReader(chunk);
        self.postMessage({ type: 'chunk', md5: md5, chunk: chunk, index: i, });
    }
    self.postMessage({ type: 'done', hash: spark.end() });
    self.close();
};

第二步,设计上传

代码语言:javascript
复制
let preparedChunks = []; // 存储分片
let fileMd5 = ''; // 整个文件的md5
let totalSize = 0; // 切片总数
let uploadedCount = 0; // 已经上传的切片数量
let doingCount = 0; // 正在上传的任务数量
let maxNum = 5; // 上传并发最多多少
let taskList = []; // 上传任务
let uploadedChunkList = []; // 已经上传过的切片

// 处理上传并发
const doTask = () => {
    while (doingCount < maxNum && taskList.length) {
        const { task, resolve, reject, } = taskList.shift();
        doingCount = doingCount + 1;
        task().then((res) => {
            if (uploadedCount == totalSize) {
                console.log('全部上传完了--doTask:', 111);
            } else {
                const progress = Math.floor((uploadedCount / totalSize) * 100).toFixed(1);
                console.log('上传中--doTask:', `${ progress }%`);
            }
            resolve(res)
        }).catch((error) => {
            reject(error);
        }).finally(() => {
            doingCount = doingCount - 1;
            doTask();
        })
    }
}
// 追加上传任务
const addTask = (task) => {
    return new Promise((resolve, reject) => {
        taskList.push({
            task,
            resolve,
            reject
        })
        doTask();
    })
}
// 上传每个切片
const uploadChunks = async (chunk, md5) => {
    await getUploadedChunks(fileMd5);
    if (uploadedChunkList.includes(md5)) {
        return new Promise((resolve, reject) => {
            uploadedCount = uploadedCount + 1;
            resolve();
        })
    }
    const task = () => {
        return new Promise((resolve, reject) => {
            setTimeout(() => {
                uploadedCount = uploadedCount + 1;
                resolve();
            }, Math.round(Math.random() * 2 * 1000));
        })
    }
    addTask(task).then((res) => {
        console.log('taskList--uploadChunks:', taskList.length);
    });
}
// 获取文件,以及每个切片和对应的切片md5
async function calculateFileHash(file) {
    return new Promise((resolve, reject) => {
        const worker = new Worker('worker.js');
        const chunkSize = 100 * 1024 * 1024;

        const md5 = new SparkMD5.ArrayBuffer();
        worker.postMessage({ file, chunkSize, md5 });

        worker.onmessage = (event) => {
            const { type, md5, chunk, total, index, } = event.data;

            if (type === 'file_md5') {
                fileMd5 = md5;
                totalSize = total;
                getUploadedChunks(md5);
            } else if (type === 'chunk') {
                console.log('md5, chunk--calculateFileHash:', md5, chunk);
                preparedChunks.push({
                    md5: md5,
                    chunk: chunk,
                    index: index,
                })
                uploadChunks(md5, chunk);
            }
            if (preparedChunks.length == total) {
                resolve();
            }
        };

        worker.onerror = (error) => {
            reject(error);
        };
    });
}
// 获取已上传的切片
async function getUploadedChunks(fileHash) {
    return new Promise((resolve, reject) => {
        // 如何已请求,直接return
        if (uploadedChunkList.length) {
            resolve();
            return;
        }
        setTimeout(() => {
            // mock之前已上传的切片
            uploadedChunkList = [
                { chunk: '', },
            ]
            resolve();
        }, Math.round(Math.random() * 2 * 1000));
    })
}

原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。

如有侵权,请联系 cloudcommunity@tencent.com 删除。

原创声明:本文系作者授权腾讯云开发者社区发表,未经许可,不得转载。

如有侵权,请联系 cloudcommunity@tencent.com 删除。

评论
登录后参与评论
0 条评论
热度
最新
推荐阅读
领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档