-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathai-analysis.ts
More file actions
96 lines (86 loc) · 2.85 KB
/
ai-analysis.ts
File metadata and controls
96 lines (86 loc) · 2.85 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import * as fs from 'fs';
import { generateText, type LanguageModel } from 'ai';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createDeepSeek } from '@ai-sdk/deepseek';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { createOpenAI } from '@ai-sdk/openai';
import { buildPrompt } from './prompt';
export interface AIAnalysisResult {
analysis: string;
provider: string;
model: string;
}
type Provider = 'anthropic' | 'openai' | 'google' | 'deepseek' | 'qwen';
function detectProvider(model: string): Provider {
const m = model.toLowerCase();
if (m.startsWith('claude')) return 'anthropic';
if (m.startsWith('gemini')) return 'google';
if (m.startsWith('deepseek')) return 'deepseek';
if (m.startsWith('qwen')) return 'qwen';
return 'openai';
}
function createModel(provider: Provider, model: string, token: string): LanguageModel {
switch (provider) {
case 'anthropic': {
const anthropic = createAnthropic({ apiKey: token });
return anthropic(model);
}
case 'google': {
const google = createGoogleGenerativeAI({ apiKey: token });
return google(model);
}
case 'deepseek': {
const deepseek = createDeepSeek({ apiKey: token });
return deepseek(model);
}
case 'qwen': {
const qwen = createOpenAI({
apiKey: token,
baseURL: 'https://dashscope.aliyuncs.com/compatible-mode/v1',
});
return qwen(model);
}
default: {
const openai = createOpenAI({ apiKey: token });
return openai(model);
}
}
}
/**
* Run AI degradation analysis on a bundle-diff JSON file.
*
* @param diffJsonPath Path to the JSON file produced by `rsdoctor bundle-diff --json`
* @param token AI API key (Anthropic or OpenAI)
* @param model Model name — auto-detects provider from prefix (default: claude-3-5-haiku-latest)
*/
export async function analyzeWithAI(
diffJsonPath: string,
token: string,
model = 'claude-3-5-haiku-latest',
): Promise<AIAnalysisResult | null> {
if (!token) {
console.log('ℹ️ No AI token provided, skipping AI analysis');
return null;
}
if (!fs.existsSync(diffJsonPath)) {
console.log(`⚠️ Bundle diff JSON not found at ${diffJsonPath}, skipping AI analysis`);
return null;
}
try {
const diffData: unknown = JSON.parse(fs.readFileSync(diffJsonPath, 'utf8'));
const prompt = buildPrompt(diffData);
const provider = detectProvider(model);
console.log(`🤖 Running AI analysis with ${provider} (${model})...`);
const llm = createModel(provider, model, token);
const { text: analysis } = await generateText({
model: llm,
maxOutputTokens: 2048,
prompt,
});
console.log('✅ AI analysis completed');
return { analysis, provider, model };
} catch (error) {
console.warn(`⚠️ AI analysis failed: ${error}`);
return null;
}
}