fastapi 通过依赖注入模式使用apscheduler
就是一个简单记录,方便后续使用
参考代码
- api 服务
import uuid
from fastapi import FastAPI,Header,Depends,HTTPException
from typing import Annotated
async def tenant_id(x_tenant_id: Annotated[str, Header()]):
if x_tenant_id != "demo":
raise HTTPException(status_code=400, detail="X-Token header invalid")
return x_tenant_id
from instance import SingleBackgroudScheduler
def apschedulere():
instance = SingleBackgroudScheduler()
if not instance.running:
instance.start()
return instance
app = FastAPI(dependencies=[Depends(tenant_id)])
scheduler = apschedulere()
@app.on_event("startup")
async def startup_event():
if not scheduler.running:
scheduler.start()
print("scheduler started")
@app.on_event("shutdown")
async def shutdown_event():
scheduler = apschedulere()
if scheduler.running:
scheduler.shutdown()
class DBContext():
def __init__(self):
self.name = "dalong"
@app.get("/")
async def add_task(scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
scheduler.add_job("mytask:my_job",name=str(uuid.uuid4()),id=str(uuid.uuid4()),kwargs={"name":"dalong","age":333,"job_args":{"name":"demo","id":333,"version":"v1"},"db_context":DBContext()},trigger='interval', seconds=5)
@app.get("/pause/{job_id}")
async def pause_job(job_id: str, scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
scheduler.pause_job(job_id)
@app.get("/jobs")
async def jobs(scheduler: SingleBackgroudScheduler = Depends(apschedulere)):
return [ {"id":item.id,"name":item.name} for item in scheduler.get_jobs()]
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)
- SingleBackgroudScheduler 定义
"""
init one BackgroudScheduler
"""
import logging
from apscheduler.schedulers.background import BackgroundScheduler
from pytz import utc
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
logging.basicConfig()
logging.getLogger('apscheduler').setLevel(logging.INFO)
class SingleBackgroudScheduler:
_instance = None
def __new__(cls, *args, **kwargs):
scheduler_config = {
"db_connection": kwargs.get("db_connection", "sqlite:///jobs.sqlite"),
"thread_pool": kwargs.get("thread_pool", 20),
"process_pool": kwargs.get("process_pool", 5),
"coalesce": kwargs.get("coalesce", False),
"max_instances": kwargs.get("max_instances", 3),
}
if cls._instance is None:
jobstores = {
'default': SQLAlchemyJobStore(url=scheduler_config["db_connection"])
}
executors = {
'default': ThreadPoolExecutor(scheduler_config["thread_pool"]),
'processpool': ProcessPoolExecutor(scheduler_config["process_pool"])
}
job_defaults = {
'coalesce': scheduler_config.get("coalesce", False),
'max_instances': scheduler_config.get("max_instances", 3),
}
scheduler = BackgroundScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults,
timezone=utc)
cls._instance = scheduler
cls._instance.status_message = "BackgroundScheduler Initialized"
return cls._instance
说明
核心上是添加了单例模式,之后通过Depends 将获取单例的BackgroundScheduler的方法进行注入,方便后续业务api 的使用