fastapi 通过依赖注入模式使用apscheduler
就是一个简单记录,方便后续使用
参考代码
- api 服务
import uuid
from fastapi import FastAPI,Header,Depends,HTTPException
from typing import Annotated
async def tenant_id(x_tenant_id: Annotated[str, Header()]):
if x_tenant_id != "demo":
raise HTTPException(status_code=400, detail="X-Token header invalid")
return x_tenant_id
from instance import SingleBackgroudScheduler
def apschedulere():
instance = SingleBackgroudScheduler()
if not instance.running:
instance.start()
return instance
app = FastAPI(dependencies=[Depends(tenant_id)])
scheduler = apschedulere()
@app.on_event("startup")
async def startup_event():
if not scheduler.running:
scheduler.start()
print("scheduler started")
@app.on_event("shutdown")
async def shutdown_event():
scheduler = apschedulere()
if scheduler.running:
scheduler.shutdown()
class DBContext():
def __init__(self):
self.name = "dalong"
@app.get("/")
async def add_task(scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
scheduler.add_job("mytask:my_job",name=str(uuid.uuid4()),id=str(uuid.uuid4()),kwargs={"name":"dalong","age":333,"job_args":{"name":"demo","id":333,"version":"v1"},"db_context":DBContext()},trigger='interval', seconds=5)
@app.get("/pause/{job_id}")
async def pause_job(job_id: str, scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
scheduler.pause_job(job_id)
@app.get("/jobs")
async def jobs(scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
return [ {"id":item.id,"name":item.name} for item in scheduler.get_jobs()]
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
- SingleBackgroudScheduler 定义
"""
init one BackgroudScheduler
"""
import logging
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
logging.basicConfig()
logging.getLogger('apscheduler').setLevel(logging.INFO)
class SingleBackgroudScheduler:
_instance = None
def __new__(cls, *args, **kwargs):
scheduler_config = {
"db_connection": kwargs.get("db_connection", "sqlite:///jobs.sqlite"),
"thread_pool": kwargs.get("thread_pool", 20),
"process_pool": kwargs.get("process_pool", 5),
"coalesce": kwargs.get("coalesce", False),
"max_instances": kwargs.get("max_instances", 3),
}
if cls._instance is None:
jobstores = {
'default': SQLAlchemyJobStore(url=scheduler_config["db_connection"])
}
executors = {
'default': ThreadPoolExecutor(scheduler_config["thread_pool"]),
'processpool': ProcessPoolExecutor(scheduler_config["process_pool"])
}
job_defaults = {
'coalesce': scheduler_config.get("coalesce", False),
'max_instances': scheduler_config.get("max_instances", 3),
}
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults,
timezone=utc)
cls._instance = scheduler
cls._instance.status_message = "BackgroundScheduler Initialized"
return cls._instance
说明
核心上是添加了单例模式,之后通过Depends 将获取单例的BackgroundScheduler的方法进行注入,方便后续业务api 的使用
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 全程不用写代码,我用AI程序员写了一个飞机大战
· DeepSeek 开源周回顾「GitHub 热点速览」
· 记一次.NET内存居高不下排查解决与启示
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· .NET10 - 预览版1新功能体验(一)
2021-10-10 cube cloud集成git 模式说明二
2020-10-10 nexus Invalid state: DELETED; allowed: [STARTED] 问题解决
2018-10-10 git server side hook 试用
2017-10-10 openfaas 私有镜像配置
2017-10-10 转 OpenFaaS 介绍
2016-10-10 重定向 url cookie