大文件分片上传
大文件分片上传需要前后端一起配合的,后端部分用node展示
背景:
在开发中往往有的时候会存在大文件的情况下,可能有几百M;要是直接传,可能接口因时间太长,直接失败了,这个时候就需要大文件分片上传了,接下来就通过一个小demo,给大家演示一下吧!!!
前端:
1.上传文件会获取一个blob对象,利用slice方法进行切片上传
async handleUpload() { const fileObj = this.fileObj if (!fileObj.file) return
// 严谨一点的话,不使用name, 而是通过文件的hash去比较 const { shouldUpload } = await this.verifyUpload(fileObj.file.name) if (!shouldUpload) { alert('秒传:上传成功') return } const chunkList = this.createChunk(fileObj.file) console.log(chunkList) // 看看chunkList长什么样子 this.fileObj.chunkList = chunkList.map(({ file }, index) => ({ file, size: file.size, percent: 0, chunkName: `${fileObj.file.name}-${index}`, fileName: fileObj.file.name, index, })) console.log(this.fileObj) this.uploadChunks() // 执行上传切片的操作 }, createChunk(file, size = 2 * 1024 * 1024) { const chunkList = [] let cur = 0 while (cur < file.size) { // 使用slice方法切片 chunkList.push({ file: file.slice(cur, cur + size) }) cur += size } return chunkList },
这个切片前会有一个判断文件是否存在的接口,这个后续再说
2.上传切片已经展示进度条
通过axiosd的进度条事件onUploadProgress获取上传文件的进度,显示进度信息
通过async,await把uploadChunks变成同步顺序 循环上传所有切片
import axios from 'axios' axiosRequest({ url, method = 'post', data, headers = {}, onUploadProgress = (e) => e, // 进度回调 }) { return new Promise((resolve, reject) => { axios[method](url, data, { headers, onUploadProgress, // 传入监听进度回调 }) .then((res) => { resolve(res) }) .catch((err) => { reject(err) }) }) },
async uploadChunks() { const requestList = this.fileObj.chunkList .map(({ file, fileName, index, chunkName }) => { const formData = new FormData() formData.append('file', file) formData.append('fileName', fileName) formData.append('chunkName', chunkName) console.log({ formData, index }) return { formData, index } }) .map(({ formData, index }) => this.axiosRequest({ url: 'http://localhost:3000/upload', data: formData, onUploadProgress: this.createProgressHandler( this.fileObj.chunkList[index] ), // 传入监听上传进度回调 }) ) const result = await Promise.all(requestList) // 使用Promise.all进行请求 console.log(result) this.mergeChunks() },
createProgressHandler(item) { return (e) => { // 设置每一个切片的进度百分比 item.percent = parseInt(String((e.loaded / e.total) * 100)) } },
3.由第二步的promise.al结果后通知后端上传完毕,合成切片
mergeChunks(size = 2 * 1024 * 1024) { this.axiosRequest({ url: 'http://localhost:3000/merge', headers: { 'content-type': 'application/json', }, data: JSON.stringify({ size, fileName: this.fileObj.file.name, }), }) },
4.秒传功能,实际就是后端通过检索有无该文件,在第一步中开头执行
async verifyUpload(fileName) { const { data } = await this.axiosRequest({ url: 'http://localhost:3000/verify', headers: { 'content-type': 'application/json', }, data: JSON.stringify({ fileName, }), }) return data },
5.断点续传,则是通过维护一个[]变量,上传完的塞进去,下次再上传则跳过该切片
前端完整代码:
<template> <div> <input type="file" @change="handleFileChange" /> <el-button @click="handleUpload"> 上传 </el-button> <div style="width: 300px"> 总进度: <el-progress :percentage="totalPercent"></el-progress> 切片进度: <div v-for="item in fileObj.chunkList" :key="item"> <span>{{ item.chunkName }}:</span> <el-progress :percentage="item.percent"></el-progress> </div> </div> </div> </template> <script> import axios from 'axios' export default { name: '', data() { return { fileObj: { file: null, chunkList: [], }, } }, computed: { totalPercent() { const fileObj = this.fileObj if (fileObj.chunkList.length === 0) return 0 const loaded = fileObj.chunkList .map(({ size, percent }) => size * percent) .reduce((pre, next) => pre + next) return parseInt((loaded / fileObj.file.size).toFixed(2)) }, }, methods: { axiosRequest({ url, method = 'post', data, headers = {}, onUploadProgress = (e) => e, // 进度回调 }) { return new Promise((resolve, reject) => { axios[method](url, data, { headers, onUploadProgress, // 传入监听进度回调 }) .then((res) => { resolve(res) }) .catch((err) => { reject(err) }) }) }, handleFileChange(e) { const [file] = e.target.files if (!file) return this.fileObj.file = file }, async handleUpload() { const fileObj = this.fileObj if (!fileObj.file) return const { shouldUpload } = await this.verifyUpload(fileObj.file.name) if (!shouldUpload) { alert('秒传:上传成功') return } const chunkList = this.createChunk(fileObj.file) console.log(chunkList) // 看看chunkList长什么样子 this.fileObj.chunkList = chunkList.map(({ file }, index) => ({ file, size: file.size, percent: 0, chunkName: `${fileObj.file.name}-${index}`, fileName: fileObj.file.name, index, })) console.log(this.fileObj) this.uploadChunks() // 执行上传切片的操作 }, createChunk(file, size = 2 * 1024 * 1024) { const chunkList = [] let cur = 0 while (cur < file.size) { // 使用slice方法切片 chunkList.push({ file: file.slice(cur, cur + size) }) cur += size } return chunkList }, async uploadChunks() { const requestList = this.fileObj.chunkList .map(({ file, fileName, index, chunkName }) => { const formData = new FormData() formData.append('file', file) formData.append('fileName', fileName) formData.append('chunkName', chunkName) console.log({ formData, index }) return { formData, index } }) .map(({ formData, index }) => this.axiosRequest({ url: 'http://localhost:3000/upload', data: formData, onUploadProgress: this.createProgressHandler( this.fileObj.chunkList[index] ), // 传入监听上传进度回调 }) ) const result = await Promise.all(requestList) // 使用Promise.all进行请求 console.log(result) this.mergeChunks() }, async verifyUpload(fileName) { const { data } = await this.axiosRequest({ url: 'http://localhost:3000/verify', headers: { 'content-type': 'application/json', }, data: JSON.stringify({ fileName, }), }) return data }, createProgressHandler(item) { return (e) => { // 设置每一个切片的进度百分比 item.percent = parseInt(String((e.loaded / e.total) * 100)) } }, mergeChunks(size = 2 * 1024 * 1024) { this.axiosRequest({ url: 'http://localhost:3000/merge', headers: { 'content-type': 'application/json', }, data: JSON.stringify({ size, fileName: this.fileObj.file.name, }), }) }, }, } </script> <style scoped></style>
后端node实现,就不多做解释
const http = require('http') const path = require('path') const fse = require('fs-extra') const multiparty = require('multiparty') const server = http.createServer() const UPLOAD_DIR = path.resolve(__dirname, '.', `qiepian`) // 切片存储目录 server.on('request', async (req, res) => { res.setHeader('Access-Control-Allow-Origin', '*') res.setHeader('Access-Control-Allow-Headers', '*') if (req.method === 'OPTIONS') { res.status = 200 res.end() return } console.log(req.url) if (req.url === '/upload') { const multipart = new multiparty.Form() multipart.parse(req, async (err, fields, files) => { if (err) { console.log('errrrr', err) return } const [file] = files.file const [fileName] = fields.fileName const [chunkName] = fields.chunkName // 保存切片的文件夹的路径,比如 张远-嘉宾.flac-chunks const chunkDir = path.resolve(UPLOAD_DIR, `${fileName}-chunks`) // // 切片目录不存在,创建切片目录 if (!fse.existsSync(chunkDir)) { await fse.mkdirs(chunkDir) } // 把切片移动到切片文件夹 await fse.move(file.path, `${chunkDir}/${chunkName}`) res.end( JSON.stringify({ code: 0, message: '切片上传成功', }) ) }) } const resolvePost = (req) => new Promise((res) => { let chunk = '' req.on('data', (data) => { chunk += data }) req.on('end', () => { res(JSON.parse(chunk)) }) }) const pipeStream = (path, writeStream) => { console.log('path', path) return new Promise((resolve) => { const readStream = fse.createReadStream(path) readStream.on('end', () => { fse.unlinkSync(path) resolve() }) readStream.pipe(writeStream) }) } // 合并切片 const mergeFileChunk = async (filePath, fileName, size) => { // filePath:你将切片合并到哪里,的路径 const chunkDir = path.resolve(UPLOAD_DIR, `${fileName}-chunks`) let chunkPaths = null // 获取切片文件夹里所有切片,返回一个数组 chunkPaths = await fse.readdir(chunkDir) // 根据切片下标进行排序 // 否则直接读取目录的获得的顺序可能会错乱 chunkPaths.sort((a, b) => a.split('-')[1] - b.split('-')[1]) const arr = chunkPaths.map((chunkPath, index) => { return pipeStream( path.resolve(chunkDir, chunkPath), // 指定位置创建可写流 fse.createWriteStream(filePath, { start: index * size, end: (index + 1) * size, }) ) }) await Promise.all(arr) fse.rmdirSync(chunkDir) // 合并后删除保存切片的目录 } if (req.url === '/merge') { const data = await resolvePost(req) const { fileName, size } = data const filePath = path.resolve(UPLOAD_DIR, fileName) await mergeFileChunk(filePath, fileName, size) res.end( JSON.stringify({ code: 0, message: '文件合并成功', }) ) } if (req.url === '/verify') { const data = await resolvePost(req) const { fileName } = data const filePath = path.resolve(UPLOAD_DIR, fileName) console.log(filePath) if (fse.existsSync(filePath)) { res.end( JSON.stringify({ shouldUpload: false, }) ) } else { res.end( JSON.stringify({ shouldUpload: true, }) ) } } }) server.listen(3000, () => console.log('正在监听 3000 端口'))