import SparkMD5 from 'spark-md5';
import { ref } from 'vue';
import createRequest from '@/utils/request';
import { get } from 'lodash';
const baseURL = import.meta.env.VITE_APP_URL;
const request = createRequest(baseURL);
const finish_res = ref({});
const RETRY_COUNT = 3;
const retryCount = ref(0);
const upload_finish = ref(0);
const task_id = ref(undefined);
const file = ref(null);
const fileMd5 = ref('');
const totalChunks = ref(0);
const uploadedChunks = ref([]);
const progress = ref(0);
const isUploading = ref(false);
const abortController = ref(null);
const THRESHOLD = 20;
const CHUNK_SIZE = 10 * 1024 * 1024;
export default () => {
const sliceChunk = (file) => {
file.value = file.file;
const fileSize = file.file.size;
totalChunks.value = Math.ceil(fileSize / CHUNK_SIZE);
};
const handleFileSelect = (File) => {
return new Promise((resolve, reject) => {
file.value = File.file;
if (!file.value) {
reject(new Error('文件不存在'));
return;
}
const spark = new SparkMD5.ArrayBuffer();
const fileReader = new FileReader();
let currentOffset = 0;
fileReader.onload = (e) => {
spark.append(e.target.result);
currentOffset += CHUNK_SIZE;
if (currentOffset < file.value.size) {
readNextChunk();
} else {
fileMd5.value = spark.end();
resolve(fileMd5.value);
}
};
fileReader.onerror = () => {
reject(new Error('文件读取失败'));
};
const readNextChunk = () => {
const blob = file.value.slice(currentOffset, currentOffset + CHUNK_SIZE);
fileReader.readAsArrayBuffer(blob);
};
readNextChunk();
});
};
const queryUploadProgress = async () => {
request({
url: '/upload/getProgress',
method: 'post',
data: { file_md5: fileMd5.value, file_name: file.value.name },
}).then(
(res) => {
uploadedChunks.value = res.data.uploaded_chunks || [];
totalChunks.value = res.data.total_chunks;
progress.value = Math.round((uploadedChunks.value.length / totalChunks.value) * 100);
startUpload();
},
(err) => {
console.error('查询进度失败', err);
},
);
};
const startUpload = async () => {
if (!file.value || !fileMd5.value) {
return;
}
isUploading.value = true;
for (let chunkIndex = 0; chunkIndex < totalChunks.value; chunkIndex++) {
if (uploadedChunks.value.includes(chunkIndex)) {
continue;
}
if (!isUploading.value) {
break;
}
await uploadSingleChunk(chunkIndex);
}
};
const uploadSingleChunk = async (chunkIndex) => {
const start = chunkIndex * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.value.size);
const chunkBlob = file.value.slice(start, end);
const spark = new SparkMD5.ArrayBuffer();
const arrayBuffer = await chunkBlob.arrayBuffer();
spark.append(arrayBuffer);
const chunkMd5 = spark.end();
const formData = new FormData();
formData.append('task_id', task_id.value);
formData.append('chunk_md5', chunkMd5);
formData.append('chunk_id', chunkIndex);
formData.append('chunk', chunkBlob);
abortController.value = new AbortController();
try {
const res = await request.post('/h5/upload/multi/upload-chunk', formData, {
signal: abortController.value.signal,
onUploadProgress: (e) => {
const chunkProgress = e.loaded / e.total;
const totalUploaded = uploadedChunks.value.length + chunkProgress;
progress.value = Math.round((totalUploaded / totalChunks.value) * 100);
},
});
upload_finish.value = get(res, 'data.finished', 0);
upload_finish.value ? (finish_res.value = res) : null;
uploadedChunks.value.push(chunkIndex);
} catch (err) {
if (err.name !== 'CanceledError') {
console.error(`分片${chunkIndex}上传失败,重试中...`, err);
retryCount.value++;
if (retryCount.value < RETRY_COUNT) {
await new Promise((resolve) => setTimeout(resolve, 1000));
await uploadSingleChunk(chunkIndex);
} else {
console.error(`分片${chunkIndex}上传失败,重试${RETRY_COUNT}次后仍失败`);
retryCount.value = 0;
}
}
}
};
const pauseUpload = () => {
isUploading.value = false;
abortController.value?.abort();
};
return {
finish_res,
task_id,
file,
fileMd5,
THRESHOLD,
totalChunks,
uploadedChunks,
progress,
isUploading,
abortController,
CHUNK_SIZE,
queryUploadProgress,
handleFileSelect,
startUpload,
pauseUpload,
sliceChunk,
upload_finish,
};
};