bolt.new本地化运行踩坑
接入OpenAI端点
安装依赖pnpm add @ai-sdk/openai
diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts
index f0d695c..5217697 100644
--- a/app/lib/.server/llm/model.ts
+++ b/app/lib/.server/llm/model.ts
@@ -1,9 +1,34 @@
import { createAnthropic } from '@ai-sdk/anthropic';
+import { createOpenAI } from '@ai-sdk/openai';
export function getAnthropicModel(apiKey: string) {
const anthropic = createAnthropic({
- apiKey,
+ apiKey: 'sk-xxxx',
+ baseURL: 'https://api.openai-proxy.org/anthropic/v1'
});
return anthropic('claude-3-5-sonnet-20240620');
}
+
+
+export function getOpenAIModel() {
+ // const openai = createOpenAI({
+ // baseURL: 'https://api.openai-proxy.org/v1',
+ // apiKey: 'sk-xxxx',
+ // });
+ // return openai('gpt-4o-2024-08-06');
+}
+
diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts
index cf937fd..2619f89 100644
--- a/app/lib/.server/llm/stream-text.ts
+++ b/app/lib/.server/llm/stream-text.ts
@@ -1,6 +1,6 @@
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getAPIKey } from '~/lib/.server/llm/api-key';
-import { getAnthropicModel } from '~/lib/.server/llm/model';
+import { getAnthropicModel, getOpenAIModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { getSystemPrompt } from './prompts';
@@ -23,11 +23,12 @@ export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
return _streamText({
- model: getAnthropicModel(getAPIKey(env)),
+ // model: getAnthropicModel(getAPIKey(env)),
+ model: getOpenAIModel(getAPIKey(env)),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
headers: {
- 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
+ // 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
},
messages: convertToCoreMessages(messages),
...options,
diff --git a/app/lib/.server/llm/constants.ts b/app/lib/.server/llm/constants.ts
index b24acdf..e8f6b1c 100644
--- a/app/lib/.server/llm/constants.ts
+++ b/app/lib/.server/llm/constants.ts
@@ -1,5 +1,6 @@
// see https://docs.anthropic.com/en/docs/about-claude/models
-export const MAX_TOKENS = 8192;
+export const MAX_TOKENS = 4000;
+// export const MAX_TOKENS = 8192;
// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;
修改ai模块node_modules/ai/dist/index.mjs
,屏蔽报错
get partialObjectStream() {
return createAsyncIterableStream(this.originalStream, {
transform(chunk, controller) {
switch (chunk.type) {
case "object":
controller.enqueue(chunk.object);
break;
case "text-delta":
case "finish":
break;
case "error":
controller.error(chunk.error);
break;
default: {
const _exhaustiveCheck = chunk;
-- throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);
++ // throw new Error(`Unsupported chunk type: ${_exhaustiveCheck}`);
}
}
}
});
增加下载文件功能
先安装两个依赖pnpm add jszip file-saver
diff --git a/app/components/workbench/Workbench.client.tsx b/app/components/workbench/Workbench.client.tsx
index b8142a6..07cb2ab 100644
--- a/app/components/workbench/Workbench.client.tsx
+++ b/app/components/workbench/Workbench.client.tsx
@@ -3,6 +3,8 @@ import { motion, type HTMLMotionProps, type Variants } from 'framer-motion';
import { computed } from 'nanostores';
import { memo, useCallback, useEffect } from 'react';
import { toast } from 'react-toastify';
+import JSZip from 'jszip';
+import FileSaver from 'file-saver';
import {
type OnChangeCallback as OnEditorChange,
type OnScrollCallback as OnEditorScroll,
@@ -99,6 +101,23 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
workbenchStore.resetCurrentDocument();
}, []);
+ const downloadFiles = () => {
+ const files = workbenchStore.files.get();
+
+ const zip = new JSZip();
+
+ for (const [fsPath, dirent] of Object.entries(files)) {
+ if (dirent?.type === 'file') {
+ const filename = fsPath.replace('/home/project/', '');
+ zip.file(filename, dirent.content);
+ }
+ }
+
+ zip.generateAsync({ type: 'blob' }).then((content) => {
+ FileSaver.saveAs(content, 'download.zip');
+ });
+ };
+
return (
chatStarted && (
<motion.div
@@ -140,6 +159,14 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
workbenchStore.showWorkbench.set(false);
}}
/>
+ <IconButton
+ icon="i-ph:download"
+ className="-mr-1"
+ size="xl"
+ onClick={() => {
+ downloadFiles();
+ }}
+ />
</div>
<div className="relative flex-1 overflow-hidden">
<View
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 单元测试从入门到精通
· 上周热点回顾(3.3-3.9)
· winform 绘制太阳,地球,月球 运作规律