python实现DeepSeek问答页(stream式)
1 from openai import OpenAI 2 from flask import Flask, request, jsonify ,Response 3 from flask_cors import CORS 4 import requests 5 import os 6 7 app = Flask(__name__) 8 CORS(app) 9 # 初始化OpenAI客户端 10 client = OpenAI( 11 # 如果没有配置环境变量,请用百炼API Key替换:api_key="sk-xxx" 12 api_key = "sk-034c7d21eaec4", 13 base_url="https://dashscope.aliyuncs.com/compatible-mode/v1" 14 ) 15 16 @app.route('/chat', methods=['GET']) 17 def chat(): 18 q = request.args.get('q') 19 def generate(): 20 21 is_answering = False 22 total_token = 0 23 24 try: 25 # 创建聊天完成请求 26 completion = client.chat.completions.create( 27 model="deepseek-v3", # 此处以 deepseek-r1 为例,可按需更换模型名称 28 messages=[ 29 {"role": "user", "content": q} 30 ], 31 stream=True, 32 # 解除以下注释会在最后一个chunk返回Token使用量 33 stream_options={ 34 "include_usage": True 35 } 36 ) 37 38 for chunk in completion: 39 # 如果chunk.choices为空,则打印usage 40 if not chunk.choices: 41 print("\nUsage:") 42 print(chunk.usage) 43 else: 44 delta = chunk.choices[0].delta 45 # 打印思考过程 46 if hasattr(delta, 'reasoning_content') and delta.reasoning_content != None: 47 #print(delta.reasoning_content, end='', flush=True) 48 reasoning_content += delta.reasoning_content 49 else: 50 # 开始回复 51 if delta.content != "" and is_answering == False: 52 #print("\n" + "=" * 20 + "完整回复" + "=" * 20 + "\n") 53 is_answering = True 54 # 打印回复过程 55 #print(delta.content, end='', flush=True) 56 yield f"data:{delta.content}\n\n" 57 58 yield f"data: [END]\n\n" 59 60 except Exception as e: 61 return jsonify({"error": str(e)}), 500 62 63 return Response(generate(), mimetype='text/event-stream') 64 65 if __name__ == '__main__': 66 app.run(host='0.0.0.0',debug=True,port="5000")
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>大模型问答页</title> <style> body { font-family: Arial, sans-serif; padding: 20px; } #question { width: 100%; padding: 10px; margin-bottom: 10px; } #submit { padding: 10px 20px; background-color: #007BFF; color: white; border: none; cursor: pointer; } #answer { margin-top: 20px; padding: 10px; border: 1px solid #ccc; min-height: 100px; } </style> </head> <body> <input type="text" id="question" placeholder="请输入你的问题"> <button id="submit">提交</button> <div id="answer"></div> <script> document.getElementById('submit').addEventListener('click', function () { const question = document.getElementById('question').value; if (question) { // 清空之前的回答 document.getElementById('answer').textContent = ''; // 创建 EventSource 对象,请求后端流式接口 const eventSource = new EventSource(`http://127.0.0.1:5000/chat?q=${encodeURIComponent(question)}`); eventSource.onmessage = function (event) { // console.log(event) const answerElement = document.getElementById('answer'); // 将新接收到的数据追加到回答区域 if (event.data === '[END]') { console.log('数据接收结束'); answerElement.textContent += "[回答完毕]" eventSource.close(); }else{ answerElement.textContent += event.data; } }; eventSource.onerror = function (error) { console.error('连接出错:', error); eventSource.close(); const answerElement = document.getElementById('answer'); answerElement.textContent = '连接出错,请稍后重试。'; }; } }); </script> </body> </html>
说明:
1、接口是阿里云百炼平台的,包括key。
2、python 需要安装 flask、openai、flask-cors、requests等模块。
3、仅供学习
滴水成冰,世间不存在毫无意义的付出,时间终会给你答案。
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 分享一个免费、快速、无限量使用的满血 DeepSeek R1 模型,支持深度思考和联网搜索!
· 基于 Docker 搭建 FRP 内网穿透开源项目(很简单哒)
· ollama系列01:轻松3步本地部署deepseek,普通电脑可用
· 25岁的心里话
· 按钮权限的设计及实现