- 基础页面
<template>
<div class="hello">
<input type="file" id="file" @change="getFile" />
<input type="button" id="upload" value="上传" @click="uploadFile" />
<input type="button" id="continue" value="继续上传" @click="continueUpload" />
</div>
</template>
- 点击获取文件数据
const getFile = async (event) => {
const file = event.target.files[0]
fileRef.value = file
}
- 获取文件hash,文件唯一标识,服务端根据该标识判断文件是否已上传,需要下载三方包spark-md5
import SparkMD5 from 'spark-md5'
const getFileHash = (file) => {
return new Promise((resolve, reject) => {
const fileReader = new FileReader()
fileReader.onload = function (e) {
const fileMd5 = SparkMD5.ArrayBuffer.hash(e.target.result)
resolve(fileMd5)
}
fileReader.onerror = function (err) {
reject(err)
}
fileReader.readAsArrayBuffer(file)
})
}
- 获取文件切片,每个切片默认1M
const createChunks = async (file, chunkSize = 1024 * 1024 * 1) => {
const chunks = []
let index = 0
let start = 0
while (start < file.size) {
const blob = file.slice(start, Math.min(start + chunkSize, file.size))
chunks.push({
blob: blob,
uploaded: false, // 分片是否上传
chunkIndex: index, // 分片序号
})
index++
start += chunkSize
}
fileChunks.value = chunks
return chunks
}
- 上传接口封装
const uploadEvent = (chunk) => {
let fd = new FormData();
fd.append('file', chunk.blob);
fd.append('chunkIndex', chunk.chunkIndex);
fd.append('fileHash', fileHash.value);
return axios.post(
'http://localhost:4000/admin/sys/log/file/upload',
fd,
{
headers: {
'Content-Type': 'multipart/form-data',
},
}
);
};
- 批量上传分片
const limitFetch = async (chunks, count = 10) => {
let runningCount = 0
const run = () => {
while (chunks.length > 0 && runningCount < count) {
const chunk = chunks.shift()
uploadEvent(chunk)
.then(() => {
chunk.uploaded = true
})
.finally(() => {
runningCount--
run()
})
runningCount++
}
}
run()
}
- 合并文件分片
const mergeFile = (fileHash, fileName) => {
return axios.post(
'http://localhost:4000/admin/sys/log/file/merge',
{
fileHash,
fileName
}
)
}
- 抽离上传操作,获取总文件hash和切片可以同时进行加快速率,拿到分片后批量上传,最后发起合并请求
const uploadFile = async () => {
Promise.all([
getFileHash(fileRef.value),
createChunks(fileRef.value),
]).then(async (data) => {
const [hashId, chunks] = data
fileHash.value = hashId
await limitFetch(chunks)
await mergeFile(fileHash.value, fileRef.value.name)
})
.catch((err) => {
console.error('获取文件哈希或分片失败:', err)
})
}
- 分片断点续传,只上传uploaded为false的切片
const continueUpload = async () => {
if (fileChunks.value.length == 0 || !fileHash.value) {
return;
}
try {
await uploadEvent(
fileChunks.value.filter((chunk) => !chunk.uploaded)
);
await mergeFile(fileHash.value, fileRef.value.name);
} catch (err) {
return {
mag: '文件上传错误',
success: false,
};
}
};