-
Notifications
You must be signed in to change notification settings - Fork 807
Expand file tree
/
Copy pathindex.ts
More file actions
118 lines (102 loc) · 3.17 KB
/
index.ts
File metadata and controls
118 lines (102 loc) · 3.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import { OpenAIClient } from './OpenAIClient'
import { DEFAULT_TEMPERATURE, LLM_MAX_RETRIES } from './constants'
import { InvokeError, InvokeErrorType } from './errors'
import type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool } from './types'
export { InvokeError, InvokeErrorType }
export type { InvokeOptions, InvokeResult, LLMClient, LLMConfig, Message, Tool }
export function parseLLMConfig(config: LLMConfig): Required<LLMConfig> {
// Runtime validation as defensive programming (types already guarantee these)
if (!config.baseURL || !config.apiKey || !config.model) {
throw new Error(
'[PageAgent] LLM configuration required. Please provide: baseURL, apiKey, model. ' +
'See: https://alibaba.github.io/page-agent/docs/features/models'
)
}
return {
baseURL: config.baseURL,
apiKey: config.apiKey,
model: config.model,
temperature: config.temperature ?? DEFAULT_TEMPERATURE,
maxRetries: config.maxRetries ?? LLM_MAX_RETRIES,
customFetch: (config.customFetch ?? fetch).bind(globalThis), // fetch will be illegal unless bound
}
}
export class LLM extends EventTarget {
config: Required<LLMConfig>
client: LLMClient
constructor(config: LLMConfig) {
super()
this.config = parseLLMConfig(config)
// Default to OpenAI client
this.client = new OpenAIClient(this.config)
}
/**
* - call llm api *once*
* - invoke tool call *once*
* - return the result of the tool
*/
async invoke(
messages: Message[],
tools: Record<string, Tool>,
abortSignal: AbortSignal,
options?: InvokeOptions
): Promise<InvokeResult> {
return await withRetry(
async () => {
// in case user aborted before invoking
if (abortSignal.aborted) throw new Error('AbortError')
const result = await this.client.invoke(messages, tools, abortSignal, options)
return result
},
// retry settings
{
maxRetries: this.config.maxRetries,
onRetry: (attempt: number) => {
this.dispatchEvent(
new CustomEvent('retry', { detail: { attempt, maxAttempts: this.config.maxRetries } })
)
},
onError: (error: Error) => {
this.dispatchEvent(new CustomEvent('error', { detail: { error } }))
},
}
)
}
}
async function withRetry<T>(
fn: () => Promise<T>,
settings: {
maxRetries: number
onRetry: (attempt: number) => void
onError: (error: Error) => void
}
): Promise<T> {
let attempt = 0
let lastError: Error | null = null
while (attempt <= settings.maxRetries) {
if (attempt > 0) {
settings.onRetry(attempt)
await new Promise((resolve) => setTimeout(resolve, 100))
}
try {
return await fn()
} catch (error: unknown) {
// do not retry if aborted by user
if (
error instanceof InvokeError &&
error.rawError instanceof Error &&
error.rawError.name === 'AbortError'
) {
throw error
}
console.error(error)
settings.onError(error instanceof Error ? error : new Error(String(error)))
// do not retry if error is not retryable (InvokeError)
if (error instanceof InvokeError && !error.retryable) throw error
lastError = error instanceof Error ? error : new Error(String(error))
attempt++
await new Promise((resolve) => setTimeout(resolve, 100))
}
}
throw lastError!
}