Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
1f25d60
refactor: re-implement puter_path coerce and cleanup
KernelDeimos Jul 15, 2025
50c9736
dev: idea for how various LLMs are registered
KernelDeimos Jul 15, 2025
28790b4
refactor: move general OpenAI-style message adaptation
KernelDeimos Jul 16, 2025
024d79c
refactor: separate LLMs from other AI services
KernelDeimos Jul 9, 2025
c923036
refactor: add airouter.js package
KernelDeimos Jul 9, 2025
0934d6d
refactor: use CompletionWriter and rm AIChatStream
KernelDeimos Jul 10, 2025
b345aa8
refactor: move Messages to the new airouter module
KernelDeimos Jul 10, 2025
d5ec01e
refactor: move function calling to airouter module
KernelDeimos Jul 11, 2025
e3e3c9d
refactor: move Claude stream coercion to new module
KernelDeimos Jul 11, 2025
dac9595
fix: require of ES module
KernelDeimos Jul 11, 2025
b9cb032
fix: update more imports
KernelDeimos Jul 11, 2025
ab62b20
refactor: move Claude logic to AnthropicAPIType
KernelDeimos Jul 11, 2025
de191f6
fix: access to undefined symbol
KernelDeimos Jul 13, 2025
e87e287
fix: issues in handle_files
KernelDeimos Jul 13, 2025
d4de754
refactor: move OpenAI stream adapter to new module
KernelDeimos Jul 16, 2025
df9a360
refactor: OpenAIAPIType
KernelDeimos Jul 17, 2025
5bb8e5b
refactor: give models a dedicated directory
KernelDeimos Jul 17, 2025
a9ce968
dev: migrate deepseek to new module
KernelDeimos Jul 18, 2025
49ae39c
fix: openai models.json wasn't comitted?
KernelDeimos Jul 18, 2025
eb977b2
dev: add "how to get X from Y" rules (gateway.js)o
KernelDeimos Jul 21, 2025
29da46b
test: gateway.js registry
KernelDeimos Jul 21, 2025
2b8c54d
refactor: migrate Anthropic provider to registry
KernelDeimos Jul 21, 2025
f691059
dev: improve resolution of information providers
KernelDeimos Jul 21, 2025
a7c2217
refactor: update OpenAI migration
KernelDeimos Jul 21, 2025
d9d83fa
refactor: remove DeepSeek-specific implementation
KernelDeimos Jul 21, 2025
c84a78a
dev: add vibe-coded doc generator; might be replaced
KernelDeimos Jul 22, 2025
f749fce
refactor: tie in the prompt normalizer
KernelDeimos Jul 23, 2025
78b34a7
dev: update process_input_messages in OpenAIUtil
KernelDeimos Aug 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

49 changes: 49 additions & 0 deletions src/airouter.js/airouter.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import { Registry } from './core/Registry.js';


const registry = new Registry();
const define = registry.getDefineAPI();

import convenienceRegistrants from './common/convenience.js';
convenienceRegistrants(define);

import commonRegistrants from './common/index.js';
commonRegistrants(define);

import anthropicRegistrants from './anthropic/index.js';
anthropicRegistrants(define);

import openaiRegistrants from './openai/index.js';
openaiRegistrants(define);

export const obtain = registry.getObtainAPI();

export * from './common/types.js';

// Streaming Utilities
export { CompletionWriter } from './common/stream/CompletionWriter.js';
export { MessageWriter } from './common/stream/MessageWriter.js';
export { ToolUseWriter } from './common/stream/ToolUseWriter.js';
export { TextWriter } from './common/stream/TextWriter.js';
export { BaseWriter } from './common/stream/BaseWriter.js';

// Common prompt processing
export { NormalizedPromptUtil } from './common/prompt/NormalizedPromptUtil.js';
export { UniversalToolsNormalizer } from './common/prompt/UniversalToolsNormalizer.js';

// Conventional Processing
export { OpenAIStyleMessagesAdapter } from './convention/openai/OpenAIStyleMessagesAdapter.js';
export { OpenAIStyleStreamAdapter } from './convention/openai/OpenAIStyleStreamAdapter.js';

// Model-Specific Processing
export { OpenAIToolsAdapter } from './openai/OpenAIToolsAdapter.js';
export { GeminiToolsAdapter } from './gemini/GeminiToolsAdapter.js';

// API Keys
export { ANTHROPIC_API_KEY } from './anthropic/index.js';
export { OPENAI_CLIENT } from './openai/index.js';

import openai_models from './models/openai.json' with { type: 'json' };
export const models = {
openai: openai_models,
};
1 change: 1 addition & 0 deletions src/airouter.js/anthropic/consts.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const betas = ['files-api-2025-04-14'];
59 changes: 59 additions & 0 deletions src/airouter.js/anthropic/handle_files.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import { toFile } from "@anthropic-ai/sdk";
import { betas } from "./consts.js";

export default async ({ client, cleanups, messages }) => {
const file_input_tasks = [];
for ( const message of messages ) {
// We can assume `message.content` is not undefined because
// UniversalPromptNormalizer ensures this.
for ( const contentPart of message.content ) {
if ( contentPart.type !== 'data' ) continue;
const { data } = contentPart;
delete contentPart.data;
file_input_tasks.push({
data,
contentPart,
});
}
}

if ( file_input_tasks.length === 0 ) return false;

const promises = [];
for ( const task of file_input_tasks ) promises.push((async () => {
const stream = await task.data.getStream();
const mimeType = await task.data.getMimeType();

const fileUpload = await client.files.upload({
file: await toFile(stream, undefined, { type: mimeType })
}, { betas });

cleanups.push(() => client.files.delete( fileUpload.id, { betas }));

// We have to copy a table from the documentation here:
// https://docs.anthropic.com/en/docs/build-with-claude/files
const contentBlockTypeForFileBasedOnMime = (() => {
if ( mimeType.startsWith('image/') ) {
return 'image';
}
if ( mimeType.startsWith('text/') ) {
return 'document';
}
if ( mimeType === 'application/pdf' || mimeType === 'application/x-pdf' ) {
return 'document';
}
return 'container_upload';
})();

delete task.contentPart.data,
task.contentPart.type = contentBlockTypeForFileBasedOnMime;
task.contentPart.source = {
type: 'file',
file_id: fileUpload.id,
};
})());

await Promise.all(promises);

return true;
}
110 changes: 110 additions & 0 deletions src/airouter.js/anthropic/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import { ASYNC_RESPONSE, COERCED_PARAMS, COERCED_TOOLS, COMPLETION_WRITER, NORMALIZED_LLM_PARAMS, NORMALIZED_LLM_TOOLS, PROVIDER_NAME, STREAM, SYNC_RESPONSE, USAGE_WRITER } from "../common/types.js";

import { NormalizedPromptUtil } from '../common/prompt/NormalizedPromptUtil.js';
import Anthropic from "@anthropic-ai/sdk";

import { betas } from "./consts.js";
import handle_files from "./handle_files.js";
import write_to_stream from "./write_to_stream.js";

export const ANTHROPIC_API_KEY = Symbol('ANTHROPIC_API_KEY');
export const ANTHROPIC_CLIENT = Symbol('ANTHROPIC_CLIENT');

export default define => {
// Define how to get parameters for the Anthropic client
define.howToGet(COERCED_PARAMS).from(NORMALIZED_LLM_PARAMS)
.provided(x => x.get(PROVIDER_NAME) == 'anthropic')
.as(async x => {
const params = x.get(NORMALIZED_LLM_PARAMS);
params.tools = await x.obtain(COERCED_TOOLS);

let system_prompts;
[system_prompts, params.messages] = NormalizedPromptUtil.extract_and_remove_system_messages(params.messages);

if ( ! x.memo.cleanups ) x.memo.cleanups = [];
await handle_files({
client: await x.obtain(ANTHROPIC_CLIENT),
cleanups: x.memo.cleanups,
messages: params.messages
});

return {
model: params.model,
max_tokens: Math.floor(params.max_tokens) ||
((
params.model === 'claude-3-5-sonnet-20241022'
|| params.model === 'claude-3-5-sonnet-20240620'
) ? 8192 : 4096), //required
temperature: params.temperature || 0, // required
...(system_prompts ? {
system: system_prompts.length > 1
? JSON.stringify(system_prompts)
: JSON.stringify(system_prompts[0])
} : {}),
messages: params.messages,
...(params.tools ? { tools: params.tools } : {}),
betas,
};
});

// Define how to get tools in the format expected by Anthropic
define.howToGet(COERCED_TOOLS).from(NORMALIZED_LLM_TOOLS)
.provided(x => x.get(PROVIDER_NAME) == 'anthropic')
.as(async x => {
const tools = x.get(NORMALIZED_LLM_TOOLS);
if ( ! tools ) return undefined;
return tools.map(tool => {
const { name, description, parameters } = tool.function;
return {
name,
description,
input_schema: parameters,
};
});
});

define.howToGet(ANTHROPIC_CLIENT).from(ANTHROPIC_API_KEY).as(async x => {
let client = new Anthropic({
apiKey: await x.obtain(ANTHROPIC_API_KEY),
});
return client.beta;
});

define.howToGet(ASYNC_RESPONSE).from(NORMALIZED_LLM_PARAMS)
.provided(x => x.get(PROVIDER_NAME) == 'anthropic')
.as(async x => {
const anthropic_params = await x.obtain(COERCED_PARAMS, {
[NORMALIZED_LLM_PARAMS]: x.get(NORMALIZED_LLM_PARAMS),
});
let client = await x.obtain(ANTHROPIC_CLIENT);

const anthropicStream = await client.messages.stream(anthropic_params);

const completionWriter = x.get(COMPLETION_WRITER);
await write_to_stream({
input: anthropicStream,
completionWriter,
usageWriter: x.get(USAGE_WRITER) ?? { resolve: () => {} },
});
if ( x.memo?.cleanups ) await Promise.all(x.memo.cleanups);
});

define.howToGet(SYNC_RESPONSE).from(NORMALIZED_LLM_PARAMS)
.provided(x => x.get(PROVIDER_NAME) == 'anthropic')
.as(async x => {
const anthropic_params = await x.obtain(COERCED_PARAMS, {
[NORMALIZED_LLM_PARAMS]: x.get(NORMALIZED_LLM_PARAMS),
});
let client = await x.obtain(ANTHROPIC_CLIENT);

const msg = await client.messages.create(anthropic_params);

if ( x.memo?.cleanups ) await Promise.all(x.memo.cleanups);

return {
message: msg,
usage: msg.usage,
finish_reason: 'stop',
};
})
};
57 changes: 57 additions & 0 deletions src/airouter.js/anthropic/write_to_stream.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
export default async ({ input, completionWriter, usageWriter }) => {
let message, contentBlock;
let counts = { input_tokens: 0, output_tokens: 0 };
for await ( const event of input ) {
const input_tokens =
(event?.usage ?? event?.message?.usage)?.input_tokens;
const output_tokens =
(event?.usage ?? event?.message?.usage)?.output_tokens;

if ( input_tokens ) counts.input_tokens += input_tokens;
if ( output_tokens ) counts.output_tokens += output_tokens;

if ( event.type === 'message_start' ) {
message = completionWriter.message();
continue;
}
if ( event.type === 'message_stop' ) {
message.end();
message = null;
continue;
}

if ( event.type === 'content_block_start' ) {
if ( event.content_block.type === 'tool_use' ) {
contentBlock = message.contentBlock({
type: event.content_block.type,
id: event.content_block.id,
name: event.content_block.name,
});
continue;
}
contentBlock = message.contentBlock({
type: event.content_block.type,
});
continue;
}

if ( event.type === 'content_block_stop' ) {
contentBlock.end();
contentBlock = null;
continue;
}

if ( event.type === 'content_block_delta' ) {
if ( event.delta.type === 'input_json_delta' ) {
contentBlock.addPartialJSON(event.delta.partial_json);
continue;
}
if ( event.delta.type === 'text_delta' ) {
contentBlock.addText(event.delta.text);
continue;
}
}
}
completionWriter.end();
usageWriter.resolve(counts);
}
14 changes: 14 additions & 0 deletions src/airouter.js/common/convenience.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { NORMALIZED_LLM_MESSAGES, NORMALIZED_LLM_PARAMS, NORMALIZED_LLM_TOOLS, SDK_STYLE, USAGE_SDK_STYLE } from "./types.js"

export default define => {
define.howToGet(NORMALIZED_LLM_TOOLS).from(NORMALIZED_LLM_PARAMS).as(x => {
return x.get(NORMALIZED_LLM_PARAMS).tools;
})
define.howToGet(NORMALIZED_LLM_MESSAGES).from(NORMALIZED_LLM_PARAMS).as(x => {
return x.get(NORMALIZED_LLM_PARAMS).messages;
})

define.howToGet(USAGE_SDK_STYLE).from(SDK_STYLE).as(x => {
return x.get(SDK_STYLE);
});
}
Loading