在现代 Web 应用中,文件上传是一个常见的功能需求。然而,当涉及到大文件上传时,传统的单次上传方法往往会面临许多挑战,如网络不稳定、上传失败、断点续传等问题。本文将详细介绍如何实现大文件上传,包括切片上传、断点续传等关键技术。
大文件上传的挑战
- 网络不稳定:大文件上传过程中,网络波动可能导致上传失败。
- 上传时间长:大文件上传时间较长,用户体验较差。
- 断点续传:上传过程中断,需要从断点继续上传,避免重新上传整个文件。
- 服务器压力:一次性上传大文件会给服务器带来较大压力。
解决方案:切片上传与断点续传
切片上传
切片上传是将大文件分割成多个小片段(切片),逐个上传到服务器。这样可以有效应对网络不稳定和上传时间长的问题。
实现步骤
- 选择文件:用户选择要上传的文件。
- 文件切片:将文件分割成多个小片段。
- 上传切片:逐个上传切片,并记录已上传的切片。
- 合并切片:所有切片上传完成后,通知服务器合并切片。
断点续传
断点续传是指在上传过程中断时,能够从断点继续上传,避免重新上传整个文件。
实现步骤
- 记录上传进度:在本地存储已上传的切片信息。
- 重新启动上传:从本地存储中获取已上传的切片信息,继续上传未完成的切片。
- 合并切片:所有切片上传完成后,通知服务器合并切片。
实现细节
1. 文件切片
首先,我们需要将文件分割成多个小片段。可以使用 Blob.slice 方法来实现:
function sliceFile(file, chunkSize) {
const chunks = [];
let currentChunk = 0;
while (currentChunk < file.size) {
const chunk = file.slice(currentChunk, currentChunk + chunkSize);
chunks.push(chunk);
currentChunk += chunkSize;
}
return chunks;
}
2. 上传切片
接下来,我们需要逐个上传切片,并记录已上传的切片信息。可以使用 Promise.allSettled 方法来并行上传多个切片:
async function uploadChunks(chunks, uploadUrl) {
const uploadPromises = chunks.map((chunk, index) => {
return fetch(uploadUrl, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${index * chunk.size}-${(index + 1) * chunk.size - 1}/${file.size}`
}
});
});
const results = await Promise.allSettled(uploadPromises);
return results;
}
3. 断点续传
为了实现断点续传,我们需要在本地存储已上传的切片信息,并在重新启动上传时继续上传未完成的切片:
function saveUploadedChunks(chunks) {
localStorage.setItem('uploadedChunks', JSON.stringify(chunks));
}
function getUploadedChunks() {
const chunks = localStorage.getItem('uploadedChunks');
return chunks ? JSON.parse(chunks) : [];
}
async function resumeUpload(file, uploadUrl) {
const uploadedChunks = getUploadedChunks();
const remainingChunks = file.slice(uploadedChunks.length * chunkSize);
const results = await uploadChunks(remainingChunks, uploadUrl);
saveUploadedChunks([...uploadedChunks, ...results]);
}
4. 合并切片
当所有切片上传完成后,我们需要通知服务器合并切片:
async function mergeChunks(uploadUrl) {
const response = await fetch(`${uploadUrl}/merge`, {
method: 'POST'
});
const result = await response.json();
return result;
}
进一步优化
1. 并行上传与重试机制
为了提高上传效率,可以并行上传多个切片,并在上传失败时进行重试:
javascript
Copy code
async function uploadChunksWithRetry(chunks, uploadUrl, maxRetries = 3) {
const uploadPromises = chunks.map((chunk, index) => {
return new Promise(async (resolve, reject) => {
let attempts = 0;
while (attempts < maxRetries) {
try {
const response = await fetch(uploadUrl, {
method: 'POST',
body: chunk,
headers: {
'Content-Range': `bytes ${index * chunk.size}-${(index + 1) * chunk.size - 1}/${file.size}`
}
});
if (response.ok) {
resolve(response);
break;
} else {
attempts++;
}
} catch (error) {
attempts++;
}
}
if (attempts === maxRetries) {
reject(new Error('Max retries reached'));
}
});
});
const results = await Promise.allSettled(uploadPromises);
return results;
}
2. 进度显示
为了提高用户体验,可以在上传过程中显示上传进度:
javascript
Copy code
import React, { useState } from 'react';
const chunkSize = 1024 * 1024; // 1MB
function App() {
const [file, setFile] = useState(null);
const [uploadUrl, setUploadUrl] = useState('/upload');
const [progress, setProgress] = useState(0);
const handleFileChange = (event) => {
setFile(event.target.files[0]);
};
const handleUpload = async () => {
if (!file) return;
const chunks = sliceFile(file, chunkSize);
const totalChunks = chunks.length;
let uploadedChunks = 0;
const results = await uploadChunksWithRetry(chunks, uploadUrl);
results.forEach((result) => {
if (result.status === 'fulfilled') {
uploadedChunks++;
setProgress((uploadedChunks / totalChunks) * 100);
}
});
saveUploadedChunks(results);
const mergeResult = await mergeChunks(uploadUrl);
console.log('File URL:', mergeResult.fileUrl);
};
const handleResumeUpload = async () => {
if (!file) return;
await resumeUpload(file, uploadUrl);
const mergeResult = await mergeChunks(uploadUrl);
console.log('File URL:', mergeResult.fileUrl);
};
return (
<div>
<input type="file" onChange={handleFileChange} />
<button onClick={handleUpload}>Upload</button>
<button onClick={handleResumeUpload}>Resume Upload</button>
<div>Progress: {progress}%</div>
</div>
);
}
export default App;
3. 完整示例
以下是一个完整的示例,展示了如何实现大文件上传,包括切片上传和断点续传:
import React, { useState } from 'react';
const chunkSize = 1024 * 1024; // 1MB
function App() {
const [file, setFile] = useState(null);
const [uploadUrl, setUploadUrl] = useState('/upload');
const [progress, setProgress] = useState(0);
const handleFileChange = (event) => {
setFile(event.target.files[0]);
};
const handleUpload = async () => {
if (!file) return;
const fileId = `${file.name}-${file.size}-${file.lastModified}`;
const chunks = sliceFile(file, chunkSize);
const totalChunks = chunks.length;
let uploadedChunks = 0;
const results = await uploadChunksWithRetry(chunks, uploadUrl, fileId);
results.forEach((result) => {
if (result.status === 'fulfilled') {
uploadedChunks++;
setProgress((uploadedChunks / totalChunks) * 100);
}
});
saveUploadedChunks(results);
const mergeResult = await mergeChunks(uploadUrl, fileId, file.name);
console.log('File URL:', mergeResult.fileUrl);
};
const handleResumeUpload = async () => {
if (!file) return;
const fileId = `${file.name}-${file.size}-${file.lastModified}`;
await resumeUpload(file, uploadUrl, fileId);
const mergeResult = await mergeChunks(uploadUrl, fileId, file.name);
console.log('File URL:', mergeResult.fileUrl);
};
return (
<div>
<input type="file" onChange={handleFileChange} />
<button onClick={handleUpload}>Upload</button>
<button onClick={handleResumeUpload}>Resume Upload</button>
<div>Progress: {progress}%</div>
</div>
);
}
function sliceFile(file, chunkSize) {
const chunks = [];
let currentChunk = 0;
while (currentChunk < file.size) {
const chunk = file.slice(currentChunk, currentChunk + chunkSize);
chunks.push(chunk);
currentChunk += chunkSize;
}
return chunks;
}
async function uploadChunksWithRetry(chunks, uploadUrl, fileId, maxRetries = 3) {
const uploadPromises = chunks.map((chunk, index) => {
return new Promise(async (resolve, reject) => {
let attempts = 0;
while (attempts < maxRetries) {
try {
const formData = new FormData();
formData.append('file', chunk);
formData.append('chunkIndex', index);
formData.append('totalChunks', chunks.length);
formData.append('fileId', fileId);
const response = await fetch(uploadUrl, {
method: 'POST',
body: formData
});
if (response.ok) {
resolve(response);
break;
} else {
attempts++;
}
} catch (error) {
attempts++;
}
}
if (attempts === maxRetries) {
reject(new Error('Max retries reached'));
}
});
});
const results = await Promise.allSettled(uploadPromises);
return results;
}
function saveUploadedChunks(chunks) {
localStorage.setItem('uploadedChunks', JSON.stringify(chunks));
}
function getUploadedChunks() {
const chunks = localStorage.getItem('uploadedChunks');
return chunks ? JSON.parse(chunks) : [];
}
async function resumeUpload(file, uploadUrl, fileId) {
const uploadedChunks = getUploadedChunks();
const remainingChunks = sliceFile(file, chunkSize).slice(uploadedChunks.length);
const results = await uploadChunksWithRetry(remainingChunks, uploadUrl, fileId);
saveUploadedChunks([...uploadedChunks, ...results]);
}
async function mergeChunks(uploadUrl, fileId, fileName) {
const response = await fetch(`${uploadUrl}/merge`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ fileId, fileName })
});
const result = await response.json();
return result;
}
export default App;
3. 服务器端实现
在服务器端,我们需要处理切片上传和合并请求。以下是一个简单的 Node.js 示例:
const express = require('express');
const fs = require('fs');
const path = require('path');
const multer = require('multer');
const app = express();
const upload = multer({ dest: 'uploads/' });
app.use(express.json());
app.post('/upload', upload.single('file'), (req, res) => {
const { originalname, filename } = req.file;
const { chunkIndex, totalChunks, fileId } = req.body;
const chunkDir = path.join(__dirname, 'uploads', fileId);
if (!fs.existsSync(chunkDir)) {
fs.mkdirSync(chunkDir);
}
const chunkPath = path.join(chunkDir, `${chunkIndex}`);
fs.renameSync(req.file.path, chunkPath);
res.sendStatus(200);
});
app.post('/merge', (req, res) => {
const { fileId, fileName } = req.body;
const chunkDir = path.join(__dirname, 'uploads', fileId);
const filePath = path.join(__dirname, 'uploads', fileName);
const writeStream = fs.createWriteStream(filePath);
const chunkFiles = fs.readdirSync(chunkDir).sort((a, b) => a - b);
chunkFiles.forEach((chunkFile) => {
const chunkPath = path.join(chunkDir, chunkFile);
const data = fs.readFileSync(chunkPath);
writeStream.write(data);
fs.unlinkSync(chunkPath);
});
writeStream.end();
fs.rmdirSync(chunkDir);
res.json({ fileUrl: `/uploads/${fileName}` });
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
});