文件分片上传Hooks-完整版

4 阅读2分钟
import SparkMD5 from 'spark-md5';
import { ref } from 'vue';
import createRequest from '@/utils/request';
import { get } from 'lodash';
const baseURL = import.meta.env.VITE_APP_URL;
const request = createRequest(baseURL);

const finish_res = ref({});
const RETRY_COUNT = 3; // 重试次数
const retryCount = ref(0); // 重试次数
const upload_finish = ref(0); // 上传完成
const task_id = ref(undefined); // 上传任务ID
// 状态管理
const file = ref(null);
const fileMd5 = ref(''); // 文件唯一标识(MD5)
const totalChunks = ref(0); // 总分片数
const uploadedChunks = ref([]); // 已上传分片序号
const progress = ref(0); // 整体进度(0-100)
const isUploading = ref(false); // 是否正在上传
const abortController = ref(null); // 取消上传控制器
const THRESHOLD = 20; // 20MB 阈值
const CHUNK_SIZE = 10 * 1024 * 1024; // 注意:分片大小 10MB/片读MD5 不要设置太大免得内存溢出或带宽不足  也不要设置太小免得太慢

export default () => {
  const sliceChunk = (file) => {
    file.value = file.file;
    const fileSize = file.file.size;
    totalChunks.value = Math.ceil(fileSize / CHUNK_SIZE);
  };

  const handleFileSelect = (File) => {
    return new Promise((resolve, reject) => {
      file.value = File.file;
      if (!file.value) {
        reject(new Error('文件不存在'));
        return;
      }

      // 生成文件MD5(避免内存溢出:分片读文件)
      const spark = new SparkMD5.ArrayBuffer();
      const fileReader = new FileReader();

      let currentOffset = 0;

      // 数据读完后,会触发 fileReader.onload 事件
      fileReader.onload = (e) => {
        spark.append(e.target.result);
        currentOffset += CHUNK_SIZE;

        if (currentOffset < file.value.size) {
          // 继续读下一片
          readNextChunk();
        } else {
          // MD5生成完成
          fileMd5.value = spark.end();
          resolve(fileMd5.value);
        }
      };

      fileReader.onerror = () => {
        reject(new Error('文件读取失败'));
      };

      const readNextChunk = () => {
        const blob = file.value.slice(currentOffset, currentOffset + CHUNK_SIZE);
        fileReader.readAsArrayBuffer(blob);
      };

      readNextChunk();
    });
  };

  // 2. 查询后端:该文件已上传的分片(断点续传基础)
  const queryUploadProgress = async () => {
    request({
      url: '/upload/getProgress',
      method: 'post',
      data: { file_md5: fileMd5.value, file_name: file.value.name },
    }).then(
      (res) => {
        uploadedChunks.value = res.data.uploaded_chunks || []; // 已传分片序号
        totalChunks.value = res.data.total_chunks; // 后端计算的总分片数
        // console.log(totalChunks.value, res);
        // 计算当前进度
        progress.value = Math.round((uploadedChunks.value.length / totalChunks.value) * 100);
        startUpload();
      },
      (err) => {
        console.error('查询进度失败', err);
      },
    );
  };

  // 3. 开始上传:按序上传未传分片
  const startUpload = async () => {
    // console.log(file.value, fileMd5.value, totalChunks.value);
    if (!file.value || !fileMd5.value) {
      return;
    }
    isUploading.value = true;
    // 遍历所有分片,跳过已传的
    for (let chunkIndex = 0; chunkIndex < totalChunks.value; chunkIndex++) {
      if (uploadedChunks.value.includes(chunkIndex)) {
        continue;
      }
      if (!isUploading.value) {
        break;
      } // 暂停则退出循环

      await uploadSingleChunk(chunkIndex);
    }
  };

  // 4. 上传单个分片
  const uploadSingleChunk = async (chunkIndex) => {
    const start = chunkIndex * CHUNK_SIZE;
    const end = Math.min(start + CHUNK_SIZE, file.value.size);
    const chunkBlob = file.value.slice(start, end);
    const spark = new SparkMD5.ArrayBuffer();
    const arrayBuffer = await chunkBlob.arrayBuffer();
    spark.append(arrayBuffer);
    const chunkMd5 = spark.end();
    // 构建FormData
    const formData = new FormData();
    formData.append('task_id', task_id.value);
    formData.append('chunk_md5', chunkMd5);
    // console.log('chunk_md5', chunkMd5, chunkBlob);
    formData.append('chunk_id', chunkIndex);
    formData.append('chunk', chunkBlob);

    abortController.value = new AbortController();
    try {
      const res = await request.post('/h5/upload/multi/upload-chunk', formData, {
        signal: abortController.value.signal,
        onUploadProgress: (e) => {
          // 计算当前分片的临时进度,叠加到整体进度
          const chunkProgress = e.loaded / e.total;
          const totalUploaded = uploadedChunks.value.length + chunkProgress;
          progress.value = Math.round((totalUploaded / totalChunks.value) * 100);
        },
      });
      upload_finish.value = get(res, 'data.finished', 0);
      upload_finish.value ? (finish_res.value = res) : null;
      // 分片上传成功,记录已传分片
      uploadedChunks.value.push(chunkIndex);
    } catch (err) {
      if (err.name !== 'CanceledError') {
        console.error(`分片${chunkIndex}上传失败,重试中...`, err);
        retryCount.value++;
        if (retryCount.value < RETRY_COUNT) {
          await new Promise((resolve) => setTimeout(resolve, 1000)); // 1秒后重试
          await uploadSingleChunk(chunkIndex); // 重试
        } else {
          console.error(`分片${chunkIndex}上传失败,重试${RETRY_COUNT}次后仍失败`);
          retryCount.value = 0; // 重置重试次数
        }
      }
    }
  };

  // 5. 暂停上传
  const pauseUpload = () => {
    isUploading.value = false;
    abortController.value?.abort(); // 取消当前正在上传的分片
  };

  return {
    finish_res,
    task_id,
    file,
    fileMd5,
    THRESHOLD,
    totalChunks,
    uploadedChunks,
    progress,
    isUploading,
    abortController,
    CHUNK_SIZE,
    queryUploadProgress,
    handleFileSelect,
    startUpload,
    pauseUpload,
    sliceChunk,
    upload_finish,
  };
};