js 大文件分片上传
html需要使用 服务器的方式打开, 推荐使用 vscode Live Server 插件, 否则 无法加载本地的 worker.js
axios 和 spark-md5 自己去npmjs.com上找
index.html
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0" > <title>Document</title> </head> <body> <input type="file" name="" id="file1" > <input type="file" name="" id="file2" > <script src="./axios.min.js"></script> <script> const PIECE_SIZE = 1024 * 1024 * 20 const ipt = document.querySelector("#file1") ipt.addEventListener('change', ev => { const file = ev.target.files[0] // 分片 const chunkList = [] for (let i = 0; i < file.size; i += PIECE_SIZE) { chunkList.push(file.slice(i, i + PIECE_SIZE)) } // 电脑一共有多少个线程 const threadNum = navigator.hardwareConcurrency || 4 // 每个线程要完成多少个切片任务 const chunkNum = Math.ceil(chunkList.length / threadNum) let result = [] for (let i = 0; i < threadNum; i++) { const worker = new Worker('./worker.js', { type: 'module' }) const start = chunkNum * i const end = chunkNum * (i + 1) > chunkList.length ? chunkList.length : chunkNum * (i + 1) worker.postMessage({ chunkList, start, end }) console.time('cut') worker.onmessage = ({ data }) => { result = result.concat(data) if (result.length === chunkList.length) { console.timeEnd('cut') console.time('upload') uploadFile(result.sort((a, b) => a.index - b.index), file.name) } } } }) function uploadFile(lis, name) { const hashs = lis.map(i => i.hash) axios({ url: 'http://127.0.0.1:5000/build_file', method: 'post', headers: { 'Content-Type': 'Application/json' }, data: { hashs: hashs, name } }).finally(res => { console.timeEnd('upload') console.log(res, '重组文件成功') }).catch(err => { console.timeEnd('upload') console.log(err, '重组文件失败') }) } </script> <script> const ipt2 = document.querySelector('#file2') ipt2.addEventListener('change', ev => { console.time('t1') const file = ev.target.files[0] const fd = new FormData() fd.append('file', file) axios({ url: 'http://127.0.0.1:5000/load', method: 'post', headers: { 'Content-Type': 'Application/form-data' }, data: fd }).then(res => { console.timeEnd('t1') console.log(res, '文件上传成功') }).catch(err => { console.timeEnd('t1') console.log(err, '文件上传失败') }) }) </script> </body> </html>
worker.js
import './spark-md5.min.js'; import './axios.min.js' function createChunk(list, index) { const blob = list[index] const spark = new SparkMD5.ArrayBuffer() return new Promise((resolve) => { const fr = new FileReader() fr.onload = async ev => { spark.append(ev.target.result) const hash = spark.end() const fd = new FormData() fd.append('hash', hash) fd.append('file', blob) axios({ url: 'http://127.0.0.1:5000/upload', method: 'post', headers: { 'Content-Type': 'Application/form-data' }, data: fd }).then(res => { resolve({ hash, index }) }).catch(err => { resolve({ hash, index }) }) } fr.readAsArrayBuffer(blob) }) } onmessage = async function ({ data }) { const { start, end, chunkList } = data const taskList = [] for (let i = start; i < end; i++) { taskList.push(createChunk(chunkList, i)) } Promise.all(taskList).then(res => { postMessage(res) }) }
后端 python 代码
demo.py 需要在同级别建立一个 temp 文件
from flask import Flask, request
from flask_cors import CORS
import os
app = Flask(__name__)
CORS(app)
@app.route('/upload', methods=['POST'])
def upload():
filename = request.form.get('hash')
temp_file = request.files.get('file')
temp_file.save('./temp/%s' % filename)
return {
'save': "success"
}
@app.route('/build_file', methods=['POST'])
def build_file():
filename = request.json.get('name')
hashs = request.json.get('hashs')
with open('./%s' % filename, 'ab') as file:
for file_hash in hashs:
with open('./temp/%s' % file_hash, 'rb') as temp:
file.write(temp.read())
os.remove('./temp/%s' % file_hash)
return 'success'
@app.route('/load', methods=['POST'])
def load():
file = request.files.get('file')
file.save('./asd.exe')
return 'success'
if __name__ == '__main__':
app.run(debug=True)
本想把生活活成一首诗, 时而优雅 , 时而豪放 , 结果活成了一首歌 , 时而不靠谱 , 时而不着调
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 分享一个免费、快速、无限量使用的满血 DeepSeek R1 模型,支持深度思考和联网搜索!
· 使用C#创建一个MCP客户端
· ollama系列1:轻松3步本地部署deepseek,普通电脑可用
· 基于 Docker 搭建 FRP 内网穿透开源项目(很简单哒)
· 按钮权限的设计及实现