-
Notifications
You must be signed in to change notification settings - Fork 819
Expand file tree
/
Copy pathutils.ts
More file actions
297 lines (261 loc) · 9.05 KB
/
utils.ts
File metadata and controls
297 lines (261 loc) · 9.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
import { toast } from "@/hooks/use-toast";
import { Store } from "@tauri-apps/plugin-store";
import OpenAI from 'openai';
import { AiConfig } from "@/app/core/setting/config";
import { readFile } from "@tauri-apps/plugin-fs";
import { fetch as tauriFetch } from '@tauri-apps/plugin-http';
/**
* 获取当前的prompt内容
*/
export async function getPromptContent(): Promise<string> {
const store = await Store.load('store.json')
const currentPromptId = await store.get<string>('currentPromptId')
let promptContent = ''
if (currentPromptId) {
const promptList = await store.get<Array<{id: string, content: string}>>('promptList')
if (promptList) {
const currentPrompt = promptList.find(prompt => prompt.id === currentPromptId)
if (currentPrompt && currentPrompt.content) {
promptContent = currentPrompt.content
}
}
}
return promptContent
}
/**
* 获取AI设置
*/
export async function getAISettings(modelType?: string): Promise<AiConfig | undefined> {
const store = await Store.load('store.json')
const aiConfigs = await store.get<AiConfig[]>('aiModelList')
const modelId = await store.get(modelType || 'primaryModel')
if (!modelId || !aiConfigs) {
return undefined
}
// 在新的数据结构中,需要找到包含指定模型ID的配置
for (const config of aiConfigs) {
// 检查新的 models 数组结构
if (config.models && config.models.length > 0) {
// 首先尝试直接匹配模型ID
let targetModel = config.models.find(model => model.id === modelId)
// 如果没找到,尝试匹配组合键格式 ${config.key}-${model.id}
if (!targetModel && typeof modelId === 'string' && modelId.includes('-')) {
const expectedPrefix = `${config.key}-`
if (modelId.startsWith(expectedPrefix)) {
const originalModelId = modelId.substring(expectedPrefix.length)
targetModel = config.models.find(model => model.id === originalModelId)
}
}
if (targetModel) {
const result = {
...config,
model: targetModel.model,
modelType: targetModel.modelType,
temperature: targetModel.temperature,
topP: targetModel.topP,
voice: targetModel.voice,
enableStream: targetModel.enableStream
}
return result
}
} else {
// 向后兼容:处理旧的单模型结构
if (config.key === modelId) {
return config
}
}
}
return undefined
}
/**
* 检查AI服务配置是否有效
*/
export async function validateAIService(baseURL: string | undefined): Promise<string | null> {
if (!baseURL) {
toast({
title: 'AI 错误',
description: '请先设置 AI 地址',
variant: 'destructive',
})
return null
}
return baseURL
}
/**
* 将图片 URL 转换为 base64 格式
*/
export async function convertImageToBase64(imageUrl: string): Promise<string | null> {
try {
// 如果已经是 base64 格式,直接返回
if (imageUrl.startsWith('data:image')) {
return imageUrl
}
// 从 Tauri URL 中提取文件路径
// convertFileSrc 生成的 URL 格式类似: tauri://localhost/path 或 asset://localhost/path
let filePath = imageUrl
// 移除 tauri:// 或 asset:// 协议前缀
if (imageUrl.startsWith('tauri://localhost/')) {
filePath = imageUrl.replace('tauri://localhost/', '')
} else if (imageUrl.startsWith('asset://localhost/')) {
filePath = imageUrl.replace('asset://localhost/', '')
} else if (imageUrl.startsWith('http://tauri.localhost/')) {
filePath = imageUrl.replace('http://tauri.localhost/', '')
}
// URL 解码
filePath = decodeURIComponent(filePath)
// 读取文件
const fileData = await readFile(filePath)
// 转换为 base64
const base64 = btoa(
new Uint8Array(fileData).reduce((data, byte) => data + String.fromCharCode(byte), '')
)
// 根据文件扩展名确定 MIME 类型
let mimeType = 'image/png'
if (filePath.toLowerCase().endsWith('.jpg') || filePath.toLowerCase().endsWith('.jpeg')) {
mimeType = 'image/jpeg'
} else if (filePath.toLowerCase().endsWith('.gif')) {
mimeType = 'image/gif'
} else if (filePath.toLowerCase().endsWith('.webp')) {
mimeType = 'image/webp'
}
return `data:${mimeType};base64,${base64}`
} catch (error) {
console.error('Failed to convert image to base64:', error)
return null
}
}
/**
* 处理AI请求错误
*/
export function handleAIError(error: any, showToast = true): string | null {
const errorMessage = error instanceof Error ? error.message : '未知错误'
// 检查是否是取消请求的错误,如果是则静默处理
if (error.message === 'Request was aborted.') {
// 静默处理取消请求,不显示任何消息
return null
}
if (showToast) {
toast({
description: errorMessage || 'AI错误',
variant: 'destructive',
})
}
return `请求失败: ${errorMessage}`
}
/**
* 为不同AI类型准备消息
* @param text 用户输入文本(如果提供了 baseMessages,此参数将作为最后一条用户消息)
* @param baseMessages 基础消息数组(如对话历史),如果提供,将合并到返回结果中
*/
export async function prepareMessages(
text: string,
baseMessages?: OpenAI.Chat.ChatCompletionMessageParam[]
): Promise<{
messages: OpenAI.Chat.ChatCompletionMessageParam[],
geminiText?: string
}> {
// 获取prompt内容
let promptContent = await getPromptContent()
// 加载记忆上下文
try {
const { contextLoader } = await import('@/lib/context/loader')
// 确定用于检索记忆的查询文本
let queryText = text || ''
if (baseMessages && baseMessages.length > 0) {
// 如果提供了消息数组,使用最后一条用户消息作为查询
const lastUserMessage = [...baseMessages].reverse().find(m => m.role === 'user')
if (lastUserMessage) {
queryText = typeof lastUserMessage.content === 'string' ? lastUserMessage.content : queryText
}
}
if (queryText) {
const memoryContext = await contextLoader.getContextForQuery(queryText)
if (memoryContext.preferences.length > 0 || memoryContext.memory.length > 0) {
const memoryPrompt = contextLoader.formatMemoriesForPrompt(memoryContext)
promptContent += '\n\n' + memoryPrompt
}
}
} catch (error) {
// 如果记忆加载失败,不影响正常对话
console.error('Failed to load memory context:', error)
}
// 如果提供了基础消息数组,直接使用它
if (baseMessages && baseMessages.length > 0) {
// 检查是否已经有 system 消息
const hasSystemMessage = baseMessages.some(msg => msg.role === 'system')
const messages: OpenAI.Chat.ChatCompletionMessageParam[] = []
// 如果需要添加 system prompt 且当前没有 system 消息
if (promptContent && !hasSystemMessage) {
messages.push({
role: 'system',
content: promptContent
})
}
// 添加所有基础消息
messages.push(...baseMessages)
// 添加系统提示词(如果有且原消息中没有)
if (promptContent && hasSystemMessage) {
// 如果已有 system 消息,合并内容
const firstSystemIndex = messages.findIndex(msg => msg.role === 'system')
if (firstSystemIndex !== -1) {
const existingContent = typeof messages[firstSystemIndex].content === 'string'
? messages[firstSystemIndex].content
: ''
messages[firstSystemIndex] = {
role: 'system',
content: existingContent + '\n\n' + promptContent
}
}
}
return { messages, geminiText: undefined }
}
// 定义消息数组(旧逻辑,保持向后兼容)
const messages: OpenAI.Chat.ChatCompletionMessageParam[] = []
let geminiText: string | undefined
if (promptContent) {
messages.push({
role: 'system',
content: promptContent
})
}
messages.push({
role: 'user',
content: text
})
return { messages, geminiText }
}
/**
* 创建OpenAI客户端,适用于所有AI类型
*/
export async function createOpenAIClient(AiConfig?: AiConfig) {
const store = await Store.load('store.json')
let baseURL
let apiKey
if (AiConfig) {
baseURL = AiConfig.baseURL
apiKey = AiConfig.apiKey
} else {
baseURL = await store.get<string>('baseURL')
apiKey = await store.get<string>('apiKey')
}
const proxyUrl = await store.get<string>('proxy')
// 创建OpenAI客户端
return new OpenAI({
apiKey: apiKey || '',
baseURL: baseURL,
dangerouslyAllowBrowser: true,
fetch: tauriFetch as unknown as typeof globalThis.fetch,
defaultHeaders:{
"x-stainless-arch": null,
"x-stainless-lang": null,
"x-stainless-os": null,
"x-stainless-package-version": null,
"x-stainless-retry-count": null,
"x-stainless-runtime": null,
"x-stainless-runtime-version": null,
"x-stainless-timeout": null,
...(AiConfig?.customHeaders || {})
},
...(proxyUrl ? { httpAgent: proxyUrl } : {})
})
}