Skip to content

Commit 21d4f2e

Browse files
feat: expose model name (#41)
1 parent ea21cc7 commit 21d4f2e

File tree

5 files changed

+24
-9
lines changed

5 files changed

+24
-9
lines changed

.changeset/hot-spoons-worry.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@callstack/byorg-core': minor
3+
---
4+
5+
core: expose `ChatModel.name` property

packages/core/src/ai/types.ts

+1
Original file line numberDiff line numberDiff line change
@@ -16,5 +16,6 @@ export type ModelUsage = {
1616
};
1717

1818
export interface ChatModel {
19+
name: string;
1920
generateResponse(context: RequestContext): Promise<AssistantResponse>;
2021
}

packages/core/src/ai/vercel.ts

+16-8
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ const VERCEL_AI_SHARED_OPTIONS = {
2929
},
3030
};
3131

32-
export type VercelChatModelAdapterOptions = {
32+
export type VercelChatModelAdapterConfig = {
3333
languageModel: LanguageModel;
3434
maxTokens?: number;
3535
maxSteps?: number;
@@ -51,7 +51,15 @@ type AiExecutionResult = {
5151
};
5252

5353
export class VercelChatModelAdapter implements ChatModel {
54-
constructor(private readonly _options: VercelChatModelAdapterOptions) {}
54+
config: VercelChatModelAdapterConfig;
55+
56+
constructor(config: VercelChatModelAdapterConfig) {
57+
this.config = config;
58+
}
59+
60+
get name(): string {
61+
return this.config.languageModel.modelId;
62+
}
5563

5664
async generateResponse(context: RequestContext): Promise<AssistantResponse> {
5765
let systemPrompt = context.systemPrompt();
@@ -123,10 +131,10 @@ export class VercelChatModelAdapter implements ChatModel {
123131
const startTime = performance.now();
124132
const result = await streamText({
125133
...VERCEL_AI_SHARED_OPTIONS,
126-
model: this._options.languageModel,
134+
model: this.config.languageModel,
135+
maxTokens: this.config.maxTokens,
136+
maxSteps: this.config.maxSteps,
127137
messages: context.messages,
128-
maxTokens: this._options.maxTokens,
129-
maxSteps: this._options.maxSteps,
130138
tools: context.tools,
131139
});
132140

@@ -156,10 +164,10 @@ export class VercelChatModelAdapter implements ChatModel {
156164
const startTime = performance.now();
157165
const result = await generateText({
158166
...VERCEL_AI_SHARED_OPTIONS,
159-
model: this._options.languageModel,
167+
model: this.config.languageModel,
168+
maxTokens: this.config.maxTokens,
169+
maxSteps: this.config.maxSteps,
160170
messages: context.messages,
161-
maxTokens: this._options.maxTokens,
162-
maxSteps: this._options.maxSteps,
163171
tools: context.tools,
164172
});
165173
const responseTime = performance.now() - startTime;

packages/core/src/index.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ export { createApp } from './application.js';
2323
export type { Middleware, NextFunction } from './middleware.js';
2424

2525
export type { AssistantResponse, ChatModel, ModelUsage } from './ai/types.js';
26-
export type { VercelChatModelAdapterOptions } from './ai/vercel.js';
26+
export type { VercelChatModelAdapterConfig } from './ai/vercel.js';
2727
export { VercelChatModelAdapter } from './ai/vercel.js';
2828

2929
export type { Command, CommandsPluginConfig } from './plugins/commands.js';

packages/core/src/mock/mock-model.ts

+1
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ export function createMockChatModel(config?: MockChatModelConfig): MockChatModel
3434
let lastRandom = config?.seed ?? Date.now();
3535
return {
3636
calls,
37+
name: 'mock',
3738
generateResponse: async (context: RequestContext): Promise<AssistantResponse> => {
3839
calls.push([context]);
3940
lastRandom = random(lastRandom);

0 commit comments

Comments
 (0)