| @router.post('/openai', response_class=EventSourceResponse) |
| async def simple_chat(user_input=Body(..., embed=True, alias='input'), |
| chat_history=Body(..., embed=True, alias='chat_history')): |
| app_input = user_input |
| callback_handler = StreamingCallbackHandler() |
| chat_prompt = PromptTemplate( |
| input_variables=['human_input', 'chat_history'], |
| template='''you are a robot having a conversation with a human being. |
| previous conversation: |
| {chat_history} |
| |
| human: {human_input} |
| robot:''' |
| ) |
| chain = LLMChain( |
| llm=OpenAI( |
| temperature=0.8, |
| max_retries=1, |
| max_tokens=2048, |
| streaming=True, |
| verbose=True, |
| ), |
| prompt=chat_prompt, |
| ) |
| task = chain.arun({'human_input': app_input, 'chat_history': chat_history}, callbacks=[callback_handler]) |
| loop = asyncio.get_event_loop() |
| asyncio.run_coroutine_threadsafe(task, loop) |
| |
| def resp(): |
| while True: |
| try: |
| |
| tk = callback_handler.tokens.get() |
| if tk is StopIteration:raise tk |
| yield ServerSentEvent(data=json.dumps(tk, ensure_ascii=False)) |
| except StopIteration: |
| break |
| |
| return EventSourceResponse(resp()) |
| |
- 后台启动
| nohup gunicorn main:app -w 4 -b 0.0.0.0:8081 --worker-class uvicorn.workers.UvicornWorker > test.log 2>&1 & |
- libreoffice 安装
参考:https://blog.csdn.net/weixin_43148701/article/details/129403372
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· winform 绘制太阳,地球,月球 运作规律
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 上周热点回顾(3.3-3.9)
· 超详细:普通电脑也行Windows部署deepseek R1训练数据并当服务器共享给他人