步骤包含完整代码,跟着一步步来可以实现!
一,切片上传
客户端
- 使用Blob.prototype.slice方法对文件切片
- 使用spark-md5库计算文件hash,并创建web worker开启新的线程执行。(该库不能直接计算整个文件,需要分片进行计算,具体可以看它官网的例子spark-md5-example。
- 将各个分片文件上传到服务器,并携带hash
- 当所有文件上传完后,需要发送合并请求,携带文件hash、后缀名、分片大小
服务端
- 根据接收到的hash创建文件夹,将分片文件存储到文件夹中
- 收到合并请求后,读取各个分片文件。根据hash和后缀名,合并生成完整文件
- 删除存储分片文件的文件夹及其内容
二,秒传
客户端
- 上传文件之前,先计算hash,然后将hash和文件名发送到服务器
- 服务器返回文件是否存在的状态
- 如果存在,客户端提示文件上传成功,否则执行上传动作
服务端
- 服务器根据hash和后缀名,在服务器中查找该文件
- 返回该文件存在的状态
web worker踩坑
self.importScript
函数只能导入绝对路径的文件,但是看文档说是可以使用相对路径,但测试多次都不行。所以将spark.md5.js文件放在public文件夹中。
分片上传、秒传效果图
三,断点续传
- 在上传文件之前,计算hash,将hash传给服务器
- 服务器根据hash和后缀,查看是否已经上传
- 如果没有完整文件,就查找有无分片数据
- 如果有,则返回已上传的分片列表
- 如果没有,客户端需要执行上传动作
断点续传效果图
- 观察图片左边发现上传了部分文件切片
- 点击上传时,发送请求响应已上传的块序号
- 使用array.filter过滤掉已上传的块。(87段变为了81段)
- 合并完后,左上角变成了完整的文件
代码
客户端
upload.js
import React, { useState, useEffect, useMemo } from "react";
import request from "../utils/request";
import styled from "styled-components";
import hashWorker from "../utils/hash-worker";
import WorkerBuilder from "../utils/worker-build";
const CHUNK_SIZE = 1 * 1024 * 1024;
const UpLoadFile = function () {
const [fileName, setFileName] = useState("");
const [fileHash, setFileHash] = useState("")
const [chunkList, setChunkList] = useState([])
const [hashPercentage, setHashPercentage] = useState(0)
// 获取文件后缀名
const getFileSuffix = (fileName) => {
let arr = fileName.split(".");
if (arr.length > 0) {
return arr[arr.length - 1]
}
return "";
}
// 拆分文件
const splitFile = (file, size = CHUNK_SIZE) => {
const fileChunkList = [];
let curChunkIndex = 0;
while (curChunkIndex <= file.size) {
const chunk = file.slice(curChunkIndex, curChunkIndex + size);
fileChunkList.push({ chunk: chunk, })
curChunkIndex += size;
}
return fileChunkList;
}
// 选择文件
const handleFileChange = (e) => {
const { files } = e.target;
if (files.length === 0) return;
// 保存文件名
setFileName(files[0].name);
// 文件分片
const chunkList = splitFile(files[0])
setChunkList(chunkList);
}
// 发送合并请求
const mergeRequest = (hash) => {
request({
url: "http://localhost:3001/merge",
method: "post",
headers: {
"content-type": "application/json"
},
data: JSON.stringify({
// 服务器存储的文件名:hash+文件后缀名
fileHash: hash,
suffix: getFileSuffix(fileName),
// 用于服务器合并文件
size: CHUNK_SIZE
})
})
}
// 上传分片
const uploadChunks = async (chunksData, hash) => {
const formDataList = chunksData.map(({ chunk, hash }) => {
const formData = new FormData()
formData.append("chunk", chunk);
formData.append("hash", hash);
formData.append("suffix", getFileSuffix(fileName));
return { formData };
})
const requestList = formDataList.map(({ formData }, index) => {
return request({
url: "http://localhost:3001/upload",
data: formData,
onprogress: e => {
let list = [...chunksData];
list[index].progress = parseInt(String((e.loaded / e.total) * 100));
setChunkList(list)
}
})
})
// 上传文件
Promise.all(requestList).then(() => {
// 延迟发送合并请求,方便观察服务器合并文件的步骤
setTimeout(() => {
mergeRequest(hash);
}, 1000);
})
}
// 计算文件hash
const calculateHash = (chunkList) => {
return new Promise(resolve => {
const woker = new WorkerBuilder(hashWorker)
woker.postMessage({ chunkList: chunkList })
woker.onmessage = e => {
const { percentage, hash } = e.data;
setHashPercentage(percentage);
if (hash) {
// 当hash计算完成时,执行resolve
resolve(hash)
}
}
})
}
// 上传文件
const handleUpload = async (e) => {
if (!fileName) {
alert("请先选择文件")
return;
}
if (chunkList.length === 0) {
alert("文件拆分中,请稍后...")
return;
}
// 计算hash
const hash = await calculateHash(chunkList)
console.log("文件的hash为:", hash)
setFileHash(hash)
const { shouldUpload, uploadedChunkList } = await verfileIsExist(hash, getFileSuffix(fileName));
console.log(shouldUpload)
if (!shouldUpload) {
alert("文件已存在,无需重复上传");
return;
}
let uploadedChunkIndexList = [];
if (uploadedChunkList && uploadedChunkList.length > 0) {
uploadedChunkIndexList = uploadedChunkList.map(item => {
const arr = item.split("-");
return parseInt(arr[arr.length - 1])
})
console.log(uploadedChunkIndexList)
alert("已上传的区块号:" + uploadedChunkIndexList.toString())
}
const chunksData = chunkList.map(({ chunk }, index) => ({
chunk: chunk,
hash: hash + "-" + index,
progress: 0
})).filter(item2 => {
// 过滤掉已上传的块
const arr = item2.hash.split("-")
return uploadedChunkIndexList.indexOf(parseInt(arr[arr.length - 1])) === -1;
})
console.log(chunksData)
// 保存分片数据
setChunkList(chunksData)
// 开始上传分片
uploadChunks(chunksData, hash)
}
// 秒传:验证文件是否存在服务器
const verfileIsExist = async (fileHash, suffix) => {
const { data } = await request({
url: "http://localhost:3001/verFileIsExist",
headers: {
"content-type": "application/json"
},
data: JSON.stringify({
fileHash: fileHash,
suffix: suffix
})
})
return JSON.parse(data);
}
return (
<div>
<input type="file" onChange={handleFileChange} /><br />
<button onClick={handleUpload}>上传</button>
<ProgressBox chunkList={chunkList} />
</div>
)
}
const BlockWraper = styled.div`
width: ${({ size }) => size + "px"};
height: ${({ size }) => size + "px"};
text-align: center;
font-size: 12px;
line-height: ${({ size }) => size + "px"};
border: 1px solid #ccc;
position: relative;
float: left;
&:before {
content: "${({ chunkIndex }) => chunkIndex}";
position: absolute;
width: 100%;
height: 10px;
left: 0;
top: 0;
font-size: 12px;
text-align: left;
line-height: initial;
color: #000
}
&:after {
content: "";
position: absolute;
width: 100%;
height: ${({ progress }) => progress + "%"};
background-color: pink;
left: 0;
top: 0;
z-index: -1;
}
`
const ChunksProgress = styled.div`
*zoom: 1;
&:after {
content: "";
display: block;
clear: both;
}
`
const Label = styled.h3``
const ProgressWraper = styled.div``
const Block = ({ progress, size, chunkIndex }) => {
return (<BlockWraper size={size} chunkIndex={chunkIndex} progress={progress}>
{progress}%
</BlockWraper>)
}
const ProgressBox = ({ chunkList = [], size = 40 }) => {
const sumProgress = useMemo(() => {
if (chunkList.length === 0) return 0
return chunkList.reduce((pre, cur, sum) => pre + cur.progress / 100, 0) * 100 / (chunkList.length)
}, [chunkList])
return (
<ProgressWraper>
<Label>文件切分为{chunkList.length}段,每段上传进度如下:</Label>
<ChunksProgress>
{chunkList.map(({ progress }, index) => (
<Block key={index} size={size} chunkIndex={index} progress={progress} />
))}
</ChunksProgress>
<Label>总进度:{sumProgress.toFixed(2)}%</Label>
</ProgressWraper >
)
}
export default UpLoadFile;
hash-worker.js
const hashWorker = () => {
self.importScripts("http://localhost:3000/spark-md5.min.js")
self.onmessage = (e) => {
const { chunkList } = e.data;
const spark = new self.SparkMD5.ArrayBuffer();
let percentage = 0;
let count = 0;
const loadNext = index => {
const reader = new FileReader();
reader.readAsArrayBuffer(chunkList[index].chunk);
reader.onload = event => {
count++;
spark.append(event.target.result);
if (count === chunkList.length) {
self.postMessage({
percentage: 100,
hash: spark.end()
})
self.close();
} else {
percentage += (100 / chunkList.length)
self.postMessage({
percentage
})
loadNext(count)
}
}
}
loadNext(count)
}
}
export default hashWorker
worker-build.js
export default class WorkerBuilder extends Worker {
constructor(worker) {
const code = worker.toString();
const blob = new Blob([`(${code})()`]);
return new Worker(URL.createObjectURL(blob));
}
}
request.js
const request = ({
url,
method = "post",
data,
headers = {},
onprogress
}) => {
return new Promise(resolve => {
const xhr = new XMLHttpRequest();
xhr.open(method, url);
Object.keys(headers).forEach(key =>
xhr.setRequestHeader(key, headers[key])
);
xhr.upload.onprogress = onprogress
xhr.send(data);
xhr.onload = e => {
resolve({
data: e.target.response
});
};
});
}
export default request;
服务端
import express from 'express'
import path from "path";
import fse from "fs-extra";
import multiparty from "multiparty";
import bodyParser from "body-parser";
let app = express()
const __dirname = path.resolve(path.dirname(''));
const UPLOAD_FILES_DIR = path.resolve(__dirname, "./filelist")
// 配置请求参数解析器
const jsonParser = bodyParser.json({ extended: false });
// 配置跨域
app.use(function (req, res, next) {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Access-Control-Allow-Headers", "*");
next()
})
// 获取已上传的文件列表
const getUploadedChunkList = async (fileHash) => {
const isExist = fse.existsSync(path.resolve(UPLOAD_FILES_DIR, fileHash))
if (isExist) {
return await fse.readdir(path.resolve(UPLOAD_FILES_DIR, fileHash))
}
return []
}
app.post('/verFileIsExist', jsonParser, async (req, res) => {
const { fileHash, suffix } = req.body;
const filePath = path.resolve(UPLOAD_FILES_DIR, fileHash + "." + suffix);
if (fse.existsSync(filePath)) {
res.send({
code: 200,
shouldUpload: false
})
return;
}
const list = await getUploadedChunkList(fileHash);
if (list.length > 0) {
res.send({
code: 200,
shouldUpload: true,
uploadedChunkList: list
})
return;
}
res.send({
code: 200,
shouldUpload: true,
uploadedChunkList: []
})
})
app.post('/upload', async (req, res) => {
const multipart = new multiparty.Form();
multipart.parse(req, async (err, fields, files) => {
if (err) return;
const [chunk] = files.chunk;
const [hash] = fields.hash;
const [suffix] = fields.suffix;
// 注意这里的hash包含文件的hash和块的索引,所以需要使用split切分
const chunksDir = path.resolve(UPLOAD_FILES_DIR, hash.split("-")[0]);
if (!fse.existsSync(chunksDir)) {
await fse.mkdirs(chunksDir);
}
await fse.move(chunk.path, chunksDir + "/" + hash);
})
res.status(200).send("received file chunk")
})
const pipeStream = (path, writeStream) =>
new Promise(resolve => {
const readStream = fse.createReadStream(path);
readStream.on("end", () => {
fse.unlinkSync(path);
resolve();
});
readStream.pipe(writeStream);
});
// 合并切片
const mergeFileChunk = async (filePath, fileHash, size) => {
const chunksDir = path.resolve(UPLOAD_FILES_DIR, fileHash);
const chunkPaths = await fse.readdir(chunksDir);
chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
console.log("指定位置创建可写流", filePath);
await Promise.all(
chunkPaths.map((chunkPath, index) =>
pipeStream(
path.resolve(chunksDir, chunkPath),
// 指定位置创建可写流
fse.createWriteStream(filePath, {
start: index * size,
end: (index + 1) * size
})
)
)
);
// 合并后删除保存切片的目录
fse.rmdirSync(chunksDir);
};
app.post('/merge', jsonParser, async (req, res) => {
const { fileHash, suffix, size } = req.body;
const filePath = path.resolve(UPLOAD_FILES_DIR, fileHash + "." + suffix);
await mergeFileChunk(filePath, fileHash, size);
res.send({
code: 200,
message: "success"
});
})
app.listen(3001, () => {
console.log('listen:3001')
})
点赞收藏不迷路