openai-python本地使用

环境

下载安装: Ollama
拉取大模型: Ollama项目

旧版本转换

embeddings_utils

embeddings_utils 实现代码

import openai
import numpy as np

my_client = openai.OpenAI(
    base_url='http://localhost:11434/v1/',
    api_key='ollama',  # required but ignored
)


def get_embedding(text, engine="text-embedding-ada-002"):
    text = text.replace("\n", " ")
    return my_client.embeddings.create(input=[text], model=engine).data[0].embedding


def cosine_similarity(a, b):
    return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))

基本使用

import openai

my_client = openai.OpenAI(
    base_url='http://localhost:11434/v1/',
    api_key='ollama',  # required but ignored
)

token_len = 1024
temperature = 0

# 一问一答
response = my_client.chat.completions.create(
                            model='qwen2.5:0.5b',
                            messages=[
                                {
                                    'role': 'user',
                                    'content': "I have a question",
                                }
                            ],
                            temperature=temperature,
                            max_tokens=token_len
                        )
ret = response.choices[0].message.content
print(ret)

# stream 适用于大数据
stream = client.chat.completions.create(
        model='qwen2.5:7b',
        messages=[
            {
                'role': 'user',
                'content': "I have a question",
            }
        ],
        stream=True,
    )

for chunk in stream:
    if chunk.choices[0].delta.content is not None:
        print(chunk.choices[0].delta.content, end="")
    if chunk.usage is not None:
        print('prompt_tokens', chunk.usage.prompt_tokens)
        print('completion_tokens', chunk.usage.completion_tokens)
        print('total_tokens', chunk.usage.total_tokens)
posted @ 2024-10-08 17:30  漫漫长夜何时休  阅读(9)  评论(0编辑  收藏  举报