test

导入HuggingFace API Token

import os
os.environ['HUGGINGFACEHUB_API_TOKEN'] = '你的HuggingFace API Token'

导入必要的库

from langchain import PromptTemplate, HuggingFaceHub, LLMChain

初始化HF LLM

llm = HuggingFaceHub(
repo_id="google/flan-t5-small",
#repo_id="meta-llama/Llama-2-7b-chat-hf",
)

创建简单的question-answering提示模板

template = """Question: {question}
Answer: """

创建Prompt

prompt = PromptTemplate(template=template, input_variables=["question"])

调用LLM Chain --- 我们以后会详细讲LLM Chain

llm_chain = LLMChain(
prompt=prompt,
llm=llm
)

准备问题

question = "Rose is which type of flower?"

调用模型并返回结果

print(llm_chain.run(question))

posted @ 2023-11-22 21:45  哈哈哈喽喽喽  阅读(5)  评论(0编辑  收藏  举报