LangChain官方示例教程(Build a Simple LLM Application)。重新组织顺序及说明方式,更加适合新手阅读。

pip install langchain
# 接入ollama本地大模型
pip install langchian-ollama
# 接入兼容OpenAI接口的国产大模型
pip install langchain-openai
提示词
输入
- 提示词主要有三种角色,LangChain有相应的Message类
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
messages = [
{"role": "system", "content": "将下面的内容翻译成 英语"},
{"role": "user", "content": "你好,吃了吗?"},
{"role": "assistant", "content": "Hello, have you eaten yet?"},
]
messages = [
SystemMessage(content="将下面的内容翻译成 英语"),
HumanMessage(content="你好,吃了吗?"),
AIMessage(content="Hello, have you eaten yet?"),
]
messages = [
("system", "将下面的内容翻译成 英语"),
("human", "你好,吃了吗?"),
("ai", "Hello, have you eaten yet?"),
]
from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
message = AIMessage(content="Hello, have you eaten yet?")
message.pretty_print()
print(message.model_dump_json())
模板
from langchain_core.prompts import ChatPromptTemplate
inputs = {"language": "英语", "text": "你好,吃了吗?"}
template = ChatPromptTemplate(
[("system", "将下面的内容翻译成 {language} "), ("human", "{text}")]
)
result = template.invoke(inputs)
接入大模型
实例化大模型
from langchain_ollama import ChatOllama
llm = ChatOllama(base_url="http://localhost:11434", model="qwen2.5:latest")
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(
openai_api_base="各个大平台兼容OpenAI的地址",
openai_api_key="xxx-xxx",
model_name="模型名称/endpoint等"
)
from langchain_ollama import ChatOllama
from langchain_core.messages import HumanMessage
llm = ChatOllama(base_url="http://localhost:11434", model="qwen2.5:latest")
messages = [HumanMessage(content="你好,吃了吗?")]
result = llm.invoke(messages)
主线(模板+大模型)
from langchain_ollama import ChatOllama
from langchain_core.prompts import ChatPromptTemplate
inputs = {"language": "英语", "text": "你好,吃了吗?"}
template = ChatPromptTemplate(
[("system", "将下面的内容翻译成 {language} "), ("human", "{text}")]
)
llm = ChatOllama(base_url="http://localhost:11434", model="qwen2.5:latest")
chain = template | llm
result = chain.invoke(inputs)
输出转换
输出转换器
from langchain_core.output_parsers import StrOutputParser, JsonOutputParser
from langchain_core.messages import AIMessage
message = AIMessage(content='{"name": "Alice", "age": 30}')
str_parser = StrOutputParser()
result = str_parser.invoke(message)
print(type(result))
print(result)
json_parser = JsonOutputParser()
result = json_parser.invoke(message)
print(type(result))
print(result)
主线(模板+大模型+输出)
from langchain_ollama import ChatOllama
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
inputs = {"language": "英语", "text": "你好,吃了吗?"}
template = ChatPromptTemplate(
[("system", "将下面的内容翻译成 {language} "), ("human", "{text}")]
)
llm = ChatOllama(base_url="http://localhost:11434", model="qwen2.5:latest")
parser = StrOutputParser()
chain = template | llm | parser
result = chain.invoke(inputs)
其他补充
模板
from langchain_core.prompts import ChatPromptTemplate
template = ChatPromptTemplate(
[("system", "你是导游,回答用户提出的问题"), ("placeholder", "{conversation}")]
)
inputs = {
"conversation": [
("human", "福州"),
("ai", "福州是一个....."),
("human", "什么季节去合适?"),
],
}
messages = template.invoke(inputs)
from langchain_core.prompts import ChatPromptTemplate
template = ChatPromptTemplate(
[("system", "你是{role},回答用户提出的问题"), ("placeholder", "{conversation}")]
)
inputs = {
"role": "导游",
"conversation": [
("human", "福州"),
("ai", "福州是一个....."),
("human", "什么季节去合适?"),
],
}
messages = template.invoke(inputs)
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 全程不用写代码,我用AI程序员写了一个飞机大战
· DeepSeek 开源周回顾「GitHub 热点速览」
· 记一次.NET内存居高不下排查解决与启示
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· .NET10 - 预览版1新功能体验(一)