摘要:
1. 参考 https://blog.csdn.net/m0_60791400/article/details/140023407 搭建 xinference 服务 2.启动xinference 1)进入 xinference 的conda环境 2)xinference-local --host 0 阅读全文
摘要:
import json from langchain.llms.base import LLM from transformers import AutoTokenizer, AutoModel, AutoConfig from typing import List, Optional class 阅读全文
摘要:
from langchain.chains import APIChain from langchain_community.llms.ollama import Ollama llm = Ollama(model="qwen:7b") api_docs = """ BASE URL: https: 阅读全文
摘要:
import os from langchain.chains import RetrievalQA from langchain_community.document_loaders import TextLoader from langchain_community.embeddings imp 阅读全文
摘要:
from langchain.chains import RetrievalQA from langchain.evaluation import QAEvalChain from langchain_community.document_loaders import TextLoader from 阅读全文
摘要:
from langchain.chains import RetrievalQA from langchain_community.document_loaders import TextLoader from langchain_community.embeddings import Ollama 阅读全文
摘要:
from langchain.chains import load_summarize_chain from langchain_community.document_loaders import TextLoader #文本加载器 from langchain_community.llms.oll 阅读全文
摘要:
from langchain.chains import LLMChain, SimpleSequentialChain #简单序列链 from langchain_community.llms.ollama import Ollama from langchain_core.prompts imp 阅读全文
摘要:
from langchain.output_parsers import ResponseSchema, StructuredOutputParser from langchain_community.llms.ollama import Ollama from langchain_core.pro 阅读全文
摘要:
from langchain_community.embeddings import OllamaEmbeddings from langchain_community.llms.ollama import Ollama from langchain_community.vectorstores.f 阅读全文