Skip to content

Commit dccd858

Browse files
committed
fix: 更新请求处理以支持可选的流处理和修复类型声明
1 parent dfad717 commit dccd858

File tree

6 files changed

+83
-72
lines changed

6 files changed

+83
-72
lines changed

packages/lib/core/src/agent/azure.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ import { ImageSupportFormat, renderOpenAIMessages } from './openai';
1010
import { requestChatCompletions } from './request';
1111
import { convertStringToResponseMessages, loadModelsList } from './utils';
1212

13-
1413
export class AzureChatAI implements ChatAgent {
1514
readonly name = 'azure';
1615
readonly modelKey = 'AZURE_CHAT_MODEL';
@@ -35,7 +34,7 @@ export class AzureChatAI implements ChatAgent {
3534
messages: await renderOpenAIMessages(prompt, messages, [ImageSupportFormat.URL, ImageSupportFormat.BASE64]),
3635
stream: onStream != null,
3736
};
38-
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream));
37+
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream, null));
3938
};
4039

4140
readonly modelList = async (context: AgentUserConfig): Promise<string[]> => {
@@ -55,7 +54,7 @@ export class AzureImageAI implements ImageAgent {
5554
return ctx.AZURE_IMAGE_MODEL;
5655
};
5756

58-
readonly request = async (prompt: string, context: AgentUserConfig): Promise<string> => {
57+
readonly request = async (prompt: string, context: AgentUserConfig): Promise<string | Blob> => {
5958
const url = `https://${context.AZURE_RESOURCE_NAME}.openai.azure.com/openai/deployments/${context.AZURE_IMAGE_MODEL}/images/generations?api-version=${context.AZURE_API_VERSION}`;
6059
const header = {
6160
'Content-Type': 'application/json',

packages/lib/core/src/agent/gemini.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export class Gemini implements ChatAgent {
2929
model: context.GOOGLE_COMPLETIONS_MODEL,
3030
stream: onStream != null,
3131
};
32-
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream));
32+
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream, null));
3333
};
3434

3535
readonly modelList = async (context: AgentUserConfig): Promise<string[]> => {

packages/lib/core/src/agent/mistralai.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ export class Mistral implements ChatAgent {
3030
stream: onStream != null,
3131
};
3232

33-
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream));
33+
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream, null));
3434
};
3535

3636
readonly modelList = async (context: AgentUserConfig): Promise<string[]> => {

packages/lib/core/src/agent/openai.ts

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@ export enum ImageSupportFormat {
1717
BASE64 = 'base64',
1818
}
1919

20-
async function
21-
renderOpenAIMessage(item: HistoryItem, supportImage?: ImageSupportFormat[] | null): Promise<any> {
20+
async function renderOpenAIMessage(item: HistoryItem, supportImage?: ImageSupportFormat[] | null): Promise<any> {
2221
const res: any = {
2322
role: item.role,
2423
content: item.content,
@@ -101,7 +100,7 @@ export class OpenAI extends OpenAIBase implements ChatAgent {
101100
stream: onStream != null,
102101
};
103102

104-
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream));
103+
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream, null));
105104
};
106105

107106
readonly modelList = async (context: AgentUserConfig): Promise<string[]> => {
@@ -120,15 +119,15 @@ export class OpenAI extends OpenAIBase implements ChatAgent {
120119
export class Dalle extends OpenAIBase implements ImageAgent {
121120
readonly modelKey = 'OPENAI_DALLE_API';
122121

123-
enable = (context: AgentUserConfig): boolean => {
122+
readonly enable = (context: AgentUserConfig): boolean => {
124123
return context.OPENAI_API_KEY.length > 0;
125124
};
126125

127-
model = (ctx: AgentUserConfig): string => {
126+
readonly model = (ctx: AgentUserConfig): string => {
128127
return ctx.DALL_E_MODEL;
129128
};
130129

131-
request = async (prompt: string, context: AgentUserConfig): Promise<string> => {
130+
readonly request = async (prompt: string, context: AgentUserConfig): Promise<string | Blob> => {
132131
const url = `${context.OPENAI_API_BASE}/images/generations`;
133132
const header = {
134133
'Content-Type': 'application/json',

packages/lib/core/src/agent/request.ts

Lines changed: 27 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ export function isEventStreamResponse(resp: Response): boolean {
4141
return false;
4242
}
4343

44-
export async function streamHandler<T>(stream: AsyncIterable<T>, contentExtractor: (data: T) => string | null, onStream: (text: string) => Promise<any>): Promise<string> {
44+
export async function streamHandler<T>(stream: AsyncIterable<T>, contentExtractor: (data: T) => string | null, onStream?: (text: string) => Promise<any>): Promise<string> {
4545
let contentFull = '';
4646
let lengthDelta = 0;
4747
let updateStep = 50;
@@ -64,7 +64,7 @@ export async function streamHandler<T>(stream: AsyncIterable<T>, contentExtracto
6464
}
6565
lengthDelta = 0;
6666
updateStep += 20;
67-
await onStream(`${contentFull}\n...`);
67+
await onStream?.(`${contentFull}\n...`);
6868
}
6969
}
7070
} catch (e) {
@@ -73,28 +73,10 @@ export async function streamHandler<T>(stream: AsyncIterable<T>, contentExtracto
7373
return contentFull;
7474
}
7575

76-
export async function requestChatCompletions(url: string, header: Record<string, string>, body: any, onStream: ChatStreamTextHandler | null, options: SseChatCompatibleOptions | null = null): Promise<string> {
77-
const controller = new AbortController();
78-
const { signal } = controller;
79-
80-
let timeoutID = null;
81-
if (ENV.CHAT_COMPLETE_API_TIMEOUT > 0) {
82-
timeoutID = setTimeout(() => controller.abort(), ENV.CHAT_COMPLETE_API_TIMEOUT);
83-
}
84-
85-
const resp = await fetch(url, {
86-
method: 'POST',
87-
headers: header,
88-
body: JSON.stringify(body),
89-
signal,
90-
});
91-
if (timeoutID) {
92-
clearTimeout(timeoutID);
93-
}
94-
95-
options = fixOpenAICompatibleOptions(options);
76+
export async function mapResponseToAnswer(resp: Response, controller: AbortController, options: SseChatCompatibleOptions | null, onStream: ((text: string) => Promise<any>) | null): Promise<string> {
77+
options = fixOpenAICompatibleOptions(options || null);
9678
if (onStream && resp.ok && isEventStreamResponse(resp)) {
97-
const stream = options.streamBuilder?.(resp, controller);
79+
const stream = options.streamBuilder?.(resp, controller || new AbortController());
9880
if (!stream) {
9981
throw new Error('Stream builder error');
10082
}
@@ -114,3 +96,25 @@ export async function requestChatCompletions(url: string, header: Record<string,
11496

11597
return options.fullContentExtractor?.(result) || '';
11698
}
99+
100+
export async function requestChatCompletions(url: string, header: Record<string, string>, body: any, onStream: ChatStreamTextHandler | null, options: SseChatCompatibleOptions | null): Promise<string> {
101+
const controller = new AbortController();
102+
const { signal } = controller;
103+
104+
let timeoutID = null;
105+
if (ENV.CHAT_COMPLETE_API_TIMEOUT > 0) {
106+
timeoutID = setTimeout(() => controller.abort(), ENV.CHAT_COMPLETE_API_TIMEOUT);
107+
}
108+
109+
const resp = await fetch(url, {
110+
method: 'POST',
111+
headers: header,
112+
body: JSON.stringify(body),
113+
signal,
114+
});
115+
if (timeoutID) {
116+
clearTimeout(timeoutID);
117+
}
118+
119+
return await mapResponseToAnswer(resp, controller, options, onStream);
120+
}

packages/lib/core/src/agent/workersai.ts

Lines changed: 47 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,63 +1,50 @@
1-
import type { AgentUserConfig } from '#/config';
21
import type { SseChatCompatibleOptions } from './request';
32
import type {
43
ChatAgent,
54
ChatAgentResponse,
65
ChatStreamTextHandler,
7-
HistoryItem,
86
ImageAgent,
97
LLMChatParams,
108
} from './types';
9+
import { type AgentUserConfig, ENV } from '#/config';
1110
import { renderOpenAIMessages } from './openai';
12-
import { isJsonResponse, requestChatCompletions } from './request';
11+
import { isJsonResponse, mapResponseToAnswer, requestChatCompletions } from './request';
1312
import { convertStringToResponseMessages, loadModelsList } from './utils';
1413

15-
class WorkerBase {
16-
readonly name = 'workers';
17-
readonly run = async (model: string, body: any, id: string, token: string): Promise<Response> => {
18-
return await fetch(
19-
`https://api.cloudflare.com/client/v4/accounts/${id}/ai/run/${model}`,
20-
{
21-
headers: { Authorization: `Bearer ${token}` },
22-
method: 'POST',
23-
body: JSON.stringify(body),
24-
},
25-
);
26-
};
14+
async function sendWorkerRequest(model: string, body: any, id: string, token: string): Promise<Response> {
15+
return await fetch(
16+
`https://api.cloudflare.com/client/v4/accounts/${id}/ai/run/${model}`,
17+
{
18+
headers: { Authorization: `Bearer ${token}` },
19+
method: 'POST',
20+
body: JSON.stringify(body),
21+
},
22+
);
23+
};
2724

28-
readonly enable = (context: AgentUserConfig): boolean => {
29-
return !!(context.CLOUDFLARE_ACCOUNT_ID && context.CLOUDFLARE_TOKEN);
30-
};
31-
}
25+
function isWorkerAIEnable(context: AgentUserConfig): boolean {
26+
if (ENV.AI_BINDING) {
27+
return true;
28+
}
29+
return !!(context.CLOUDFLARE_ACCOUNT_ID && context.CLOUDFLARE_TOKEN);
30+
};
3231

33-
export class WorkersChat extends WorkerBase implements ChatAgent {
32+
export class WorkersChat implements ChatAgent {
33+
readonly name = 'workers';
3434
readonly modelKey = 'WORKERS_CHAT_MODEL';
35+
readonly enable = isWorkerAIEnable;
3536

3637
readonly model = (ctx: AgentUserConfig): string | null => {
3738
return ctx.WORKERS_CHAT_MODEL;
3839
};
3940

40-
private render = (item: HistoryItem): any => {
41-
return {
42-
role: item.role,
43-
content: item.content,
44-
};
45-
};
46-
4741
readonly request = async (params: LLMChatParams, context: AgentUserConfig, onStream: ChatStreamTextHandler | null): Promise<ChatAgentResponse> => {
4842
const { prompt, messages } = params;
49-
const id = context.CLOUDFLARE_ACCOUNT_ID;
50-
const token = context.CLOUDFLARE_TOKEN;
5143
const model = context.WORKERS_CHAT_MODEL;
52-
const url = `https://api.cloudflare.com/client/v4/accounts/${id}/ai/run/${model}`;
53-
const header = {
54-
Authorization: `Bearer ${token}`,
55-
};
5644
const body = {
5745
messages: await renderOpenAIMessages(prompt, messages, null),
5846
stream: onStream !== null,
5947
};
60-
6148
const options: SseChatCompatibleOptions = {};
6249
options.contentExtractor = function (data: any) {
6350
return data?.response;
@@ -68,6 +55,19 @@ export class WorkersChat extends WorkerBase implements ChatAgent {
6855
options.errorExtractor = function (data: any) {
6956
return data?.errors?.at(0)?.message;
7057
};
58+
59+
if (ENV.AI_BINDING) {
60+
const resp = ENV.AI_BINDING.run(model, body);
61+
const answer = mapResponseToAnswer(resp, new AbortController(), options, onStream);
62+
return convertStringToResponseMessages(answer);
63+
}
64+
65+
const id = context.CLOUDFLARE_ACCOUNT_ID;
66+
const token = context.CLOUDFLARE_TOKEN;
67+
const url = `https://api.cloudflare.com/client/v4/accounts/${id}/ai/run/${model}`;
68+
const header = {
69+
Authorization: `Bearer ${token}`,
70+
};
7171
return convertStringToResponseMessages(requestChatCompletions(url, header, body, onStream, options));
7272
};
7373

@@ -86,20 +86,29 @@ export class WorkersChat extends WorkerBase implements ChatAgent {
8686
};
8787
}
8888

89-
export class WorkersImage extends WorkerBase implements ImageAgent {
89+
export class WorkersImage implements ImageAgent {
90+
readonly name = 'workers';
9091
readonly modelKey = 'WORKERS_IMAGE_MODEL';
92+
readonly enable = isWorkerAIEnable;
9193

9294
readonly model = (ctx: AgentUserConfig): string => {
9395
return ctx.WORKERS_IMAGE_MODEL;
9496
};
9597

96-
readonly request = async (prompt: string, context: AgentUserConfig): Promise<Blob> => {
98+
readonly request = async (prompt: string, context: AgentUserConfig): Promise<string | Blob> => {
9799
const id = context.CLOUDFLARE_ACCOUNT_ID;
98100
const token = context.CLOUDFLARE_TOKEN;
99-
if (!id || !token) {
101+
let raw: Response | null = null;
102+
if (ENV.AI_BINDING) {
103+
raw = ENV.AI_BINDING.run(context.WORKERS_IMAGE_MODEL, { prompt });
104+
} else if (id && token) {
105+
raw = await sendWorkerRequest(context.WORKERS_IMAGE_MODEL, { prompt }, id, token);
106+
} else {
100107
throw new Error('Cloudflare account ID or token is not set');
101108
}
102-
const raw = await this.run(context.WORKERS_IMAGE_MODEL, { prompt }, id, token);
109+
if (!raw) {
110+
throw new Error('Invalid response');
111+
}
103112
if (isJsonResponse(raw)) {
104113
const { result } = await raw.json();
105114
const image = result?.image;

0 commit comments

Comments
 (0)