Code
"""
FastDFS并发测试脚本
Usage::
$ python <me>.py 200 --show
"""
import functools
import itertools
import json
import os
import pickle
import sys
import time
from pathlib import Path
from typing import Callable, NoReturn, TypeVar
import tqdm
from asynctor import bulk_gather, run, timeit
from asynctor.tasks import ThreadGroup
from httpx import AsyncClient
from rich import print
from fastdfs_client import FastdfsClient
T = TypeVar("T")
def catch_cost(func: Callable[..., T]) -> Callable[..., tuple[float, T]]:
@functools.wraps(func)
def wrapper(*args, **kw) -> tuple[float, T]:
start = time.time()
rv = func(*args, **kw)
cost = round(time.time() - start, 1)
return cost, rv
return wrapper
def catch_async_cost(func):
@functools.wraps(func)
async def wrapper(*args, **kw):
start = time.time()
rv = await func(*args, **kw)
cost = round(time.time() - start, 1)
print(rv)
return cost, rv
return wrapper
@timeit
async def show_result(output: Path, dfs: FastdfsClient) -> None:
"""展示上传结果,验证返回的URL"""
results = json.loads(output.read_bytes())
print("Upload result:")
print(results)
urls = [url for _, url in results]
if not (_nf := os.getenv("NO_FETCH")) or _nf == "0":
async with AsyncClient(follow_redirects=True, timeout=80) as client:
checks = (client.get(i) for i in urls)
rs = await bulk_gather(checks, limit=50)
print("URL concurrency result:\nidx\tstatus_code\telapsed\turl\tContentLength")
for i, r in enumerate(rs, 1):
print(
i,
r.status_code,
r.elapsed,
r.url,
len(r.content) if r.status_code == 200 else r.text,
)
else:
print(f"{len(results) = }")
if "-d" in sys.argv or "--delete" in sys.argv:
print("=" * 20)
await delete_all(urls, dfs)
@timeit
async def delete_all(urls: list[str], dfs: FastdfsClient) -> None:
results = await bulk_gather([catch_async_cost(dfs.delete)(url) for url in urls])
for res in results:
print(res)
print(f"total={len(results)}; success={sum(isinstance(i, tuple) for i in results)}")
def multi_threads_delete(urls, dfs):
"""使用多线程批量删除远程文件"""
with ThreadGroup(max_workers=30) as tg:
for url in urls:
tg.soonify(catch_cost(dfs.delete_file))(url)
return tg.results
def abort(msg: str) -> NoReturn:
print(f"[red]ERROR:[/red] {msg}")
sys.exit(1)
def multi_threads_upload(client, total, images):
with ThreadGroup() as tg:
for index, p in tqdm.tqdm(zip(range(total), itertools.cycle(images))):
tg.soonify(catch_cost(client.upload_as_url))(p.read_bytes())
return tg.results
async def upload_many(client, total, images):
return await bulk_gather(
[
catch_async_cost(client.upload)(p.read_bytes())
for _, p in tqdm.tqdm(zip(range(total), itertools.cycle(images)))
]
)
@timeit
async def main() -> None:
total = 10
client = FastdfsClient(["dfs.waketzheng.top"])
if args := sys.argv[1:]:
if (a1 := args[0]).isdigit():
total = int(a1)
elif (p := Path(a1)).is_file():
await show_result(p, client)
return
else:
abort("Invalid argument `{a1}`! Must be int or filepath.")
d = Path.home() / "Pictures"
assert d.exists(), f"文件夹({d})不存在"
images = list(d.rglob("*.jp*g")) + list(d.rglob("*.JP*G"))
assert images, f"{d}中没有jpeg图片"
results = await upload_many(client, total, images)
try:
res = json.dumps(results)
except TypeError:
print(results)
success = [i for i in results if isinstance(i, tuple)]
print(f"total={len(results)}; success={len(success)}")
p = Path("err.pickle")
size = p.write_bytes(pickle.dumps(results))
print(f"Failed to dump results: Write err info to {p} with {size=}")
res = json.dumps(success)
(p := Path("output.json")).write_text(res)
print(f"{total = }\nSave results to '{p}'.")
if "--show" in args:
await show_result(p, client)
if __name__ == "__main__":
run(main)
Usage
python ts.py
python ts.py output.json -d
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 分享一个免费、快速、无限量使用的满血 DeepSeek R1 模型,支持深度思考和联网搜索!
· 基于 Docker 搭建 FRP 内网穿透开源项目(很简单哒)
· ollama系列01:轻松3步本地部署deepseek,普通电脑可用
· 25岁的心里话
· 按钮权限的设计及实现