Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 16 additions & 15 deletions packages/client/src/components/agent-action-viewer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ const ITEMS_PER_PAGE = 15;
enum ActionType {
all = 'all',
llm = 'llm',
embedding = 'embedding',
transcription = 'transcription',
image = 'image',
other = 'other',
Expand All @@ -59,7 +60,9 @@ type AgentActionViewerProps = {
// Helper functions
function getModelUsageType(modelType: string): string {
if (
(modelType.includes('TEXT') || modelType.includes('OBJECT')) &&
(modelType.includes('TEXT') ||
modelType.includes('OBJECT') ||
modelType.includes('REASONING')) &&
!modelType.includes('EMBEDDING') &&
!modelType.includes('TRANSCRIPTION')
) {
Expand All @@ -74,15 +77,7 @@ function getModelUsageType(modelType: string): string {
if (modelType.includes('IMAGE')) {
return 'Image';
}
if (
!modelType.includes('TEXT') &&
!modelType.includes('IMAGE') &&
!modelType.includes('EMBEDDING') &&
!modelType.includes('TRANSCRIPTION')
) {
return 'Other';
}
return 'Unknown';
return 'Other';
}

function formatDate(timestamp: number | undefined) {
Expand Down Expand Up @@ -164,9 +159,10 @@ function ActionCard({ action, onDelete }: ActionCardProps) {

const modelType = action.body?.modelType || '';
const modelKey = action.body?.modelKey || '';
const isActionLog = action.type === 'action';
const logType = action.type || '';
const isActionLog = logType === 'action';
const actionName = action.body?.action || '';
const IconComponent = getModelIcon(isActionLog ? 'ACTION' : modelType);
const IconComponent = getModelIcon(isActionLog ? 'ACTION' : modelType || logType);
const usageType = isActionLog ? 'Action' : getModelUsageType(modelType);
const responseObj = typeof action.body?.response === 'object' ? action.body.response : undefined;
const tokenUsage = formatTokenUsage(responseObj?.usage || action.body?.usage);
Expand Down Expand Up @@ -361,7 +357,7 @@ function ActionCard({ action, onDelete }: ActionCardProps) {
<div className="flex items-center gap-2 mb-1">
<h4 className="font-semibold text-sm">{isActionLog ? actionName : usageType}</h4>
<span className="text-xs px-2 py-0.5 rounded bg-muted text-muted-foreground">
{isActionLog ? 'Action' : modelType}
{isActionLog ? 'Action' : modelType || logType}
</span>
{action.body?.promptCount && action.body.promptCount > 1 && (
<Badge variant="secondary" className="text-xs px-1.5">
Expand Down Expand Up @@ -601,8 +597,11 @@ export function AgentActionViewer({ agentId, roomId }: AgentActionViewerProps) {

switch (selectedType) {
case ActionType.llm:
// Include both LLM calls and actions (which often contain LLM prompts)
if (usageType !== 'LLM' && !isActionLog) return false;
// Only show LLM model calls (not actions)
if (usageType !== 'LLM') return false;
break;
case ActionType.embedding:
if (usageType !== 'Embedding') return false;
break;
case ActionType.transcription:
if (usageType !== 'Transcription') return false;
Expand All @@ -611,6 +610,7 @@ export function AgentActionViewer({ agentId, roomId }: AgentActionViewerProps) {
if (usageType !== 'Image') return false;
break;
case ActionType.other:
// "Other" includes actions and unknown model types
if (usageType !== 'Other' && usageType !== 'Unknown' && !isActionLog) return false;
break;
}
Expand Down Expand Up @@ -761,6 +761,7 @@ export function AgentActionViewer({ agentId, roomId }: AgentActionViewerProps) {
<SelectContent>
<SelectItem value={ActionType.all}>All Actions</SelectItem>
<SelectItem value={ActionType.llm}>LLM Calls</SelectItem>
<SelectItem value={ActionType.embedding}>Embeddings</SelectItem>
<SelectItem value={ActionType.transcription}>Transcriptions</SelectItem>
<SelectItem value={ActionType.image}>Image Operations</SelectItem>
<SelectItem value={ActionType.other}>Other</SelectItem>
Expand Down
11 changes: 9 additions & 2 deletions packages/client/src/hooks/use-query-hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -614,8 +614,15 @@ export function useAgentActions(agentId: UUID, roomId?: UUID, excludeTypes?: str
const response = await getClient().agents.getAgentLogs(agentId, {
limit: 50,
});
// Map the API logs to client format
return response ? response.map(mapApiLogToClient) : [];
if (!response) return [];

// Filter to only include model calls (useModel:*) and actions
const relevantLogs = response.filter((log) => {
const logType = log.type || '';
return logType.startsWith('useModel:') || logType === 'action';
});

return relevantLogs.map(mapApiLogToClient);
},
refetchInterval: 1000,
staleTime: 1000,
Expand Down
69 changes: 69 additions & 0 deletions packages/core/src/__tests__/runtime-streaming.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -403,4 +403,73 @@ describe('useModel Streaming', () => {
expect(result).toBe('XYZ');
});
});

describe('database logging', () => {
it('should log streaming model calls to database', async () => {
const mockChunks = ['Hello', ' ', 'World'];
const mockAdapter = createMockAdapter();

const streamingRuntime = new AgentRuntime({
agentId: stringToUuid('test-logging-agent'),
character: mockCharacter,
adapter: mockAdapter,
});

streamingRuntime.registerModel(
ModelType.TEXT_LARGE,
async (_rt, params) => {
if ((params as any).stream) {
return createMockTextStreamResult(mockChunks);
}
return mockChunks.join('');
},
'test-provider'
);

await streamingRuntime.useModel(ModelType.TEXT_LARGE, {
prompt: 'Test prompt',
onStreamChunk: () => {},
});

// Verify adapter.log was called
const logCalls = (mockAdapter.log as any).mock.calls;
expect(logCalls.length).toBeGreaterThan(0);

// Verify the log contains correct model info
const logCall = logCalls[0][0];
expect(logCall.type).toBe('useModel:TEXT_LARGE');
expect(logCall.body.modelKey).toBe('TEXT_LARGE');
expect(logCall.body.response).toBe('Hello World');
});

it('should log non-streaming model calls to database', async () => {
const mockAdapter = createMockAdapter();

const nonStreamingRuntime = new AgentRuntime({
agentId: stringToUuid('test-logging-agent-2'),
character: mockCharacter,
adapter: mockAdapter,
});

nonStreamingRuntime.registerModel(
ModelType.TEXT_LARGE,
async () => 'Non-streamed response',
'test-provider'
);

await nonStreamingRuntime.useModel(ModelType.TEXT_LARGE, {
prompt: 'Test prompt',
});

// Verify adapter.log was called
const logCalls = (mockAdapter.log as any).mock.calls;
expect(logCalls.length).toBeGreaterThan(0);

// Verify the log contains correct model info
const logCall = logCalls[0][0];
expect(logCall.type).toBe('useModel:TEXT_LARGE');
expect(logCall.body.modelKey).toBe('TEXT_LARGE');
expect(logCall.body.response).toBe('Non-streamed response');
});
});
});
108 changes: 65 additions & 43 deletions packages/core/src/runtime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2166,6 +2166,61 @@ export class AgentRuntime implements IAgentRuntime {
return Object.keys(modelSettings).length > 0 ? modelSettings : null;
}

/**
* Helper to log model calls to the database (used by both streaming and non-streaming paths)
*/
private logModelCall(
modelType: string,
modelKey: string,
params: unknown,
promptContent: string | null,
elapsedTime: number,
provider: string | undefined,
response: unknown
): void {
// Log prompts to action context (except embeddings)
if (modelKey !== ModelType.TEXT_EMBEDDING && promptContent) {
if (this.currentActionContext) {
this.currentActionContext.prompts.push({
modelType: modelKey,
prompt: promptContent,
timestamp: Date.now(),
});
}
}

// Log to database
this.adapter.log({
entityId: this.agentId,
roomId: this.currentRoomId ?? this.agentId,
body: {
modelType,
modelKey,
params: {
...(typeof params === 'object' && !Array.isArray(params) && params ? params : {}),
prompt: promptContent,
},
prompt: promptContent,
systemPrompt: this.character?.system || null,
runId: this.getCurrentRunId(),
timestamp: Date.now(),
executionTime: elapsedTime,
provider: provider || this.models.get(modelKey)?.[0]?.provider || 'unknown',
actionContext: this.currentActionContext
? {
actionName: this.currentActionContext.actionName,
actionId: this.currentActionContext.actionId,
}
: undefined,
response:
Array.isArray(response) && response.every((x) => typeof x === 'number')
? '[array]'
: response,
},
type: `useModel:${modelKey}`,
});
}

async useModel<T extends keyof ModelParamsMap, R = ModelResultMap[T]>(
modelType: T,
params: ModelParamsMap[T],
Expand Down Expand Up @@ -2332,6 +2387,15 @@ export class AgentRuntime implements IAgentRuntime {
'Model output (stream with callback complete)'
);

this.logModelCall(
modelType,
modelKey,
params,
promptContent,
elapsedTime,
provider,
fullText
);
return fullText as R;
}

Expand All @@ -2351,49 +2415,7 @@ export class AgentRuntime implements IAgentRuntime {
'Model output'
);

// Log all prompts except TEXT_EMBEDDING to track agent behavior
if (modelKey !== ModelType.TEXT_EMBEDDING && promptContent) {
// If we're in an action context, collect the prompt
if (this.currentActionContext) {
this.currentActionContext.prompts.push({
modelType: modelKey,
prompt: promptContent,
timestamp: Date.now(),
});
}
}

// Keep the existing model logging for backward compatibility
this.adapter.log({
entityId: this.agentId,
roomId: this.currentRoomId ?? this.agentId,
body: {
modelType,
modelKey,
params: {
...(typeof params === 'object' && !Array.isArray(params) && params ? params : {}),
prompt: promptContent,
},
prompt: promptContent,
systemPrompt: this.character?.system || null,
runId: this.getCurrentRunId(),
timestamp: Date.now(),
executionTime: elapsedTime,
provider: provider || this.models.get(modelKey)?.[0]?.provider || 'unknown',
actionContext: this.currentActionContext
? {
actionName: this.currentActionContext.actionName,
actionId: this.currentActionContext.actionId,
}
: undefined,
response:
Array.isArray(response) && response.every((x) => typeof x === 'number')
? '[array]'
: response,
},
type: `useModel:${modelKey}`,
});

this.logModelCall(modelType, modelKey, params, promptContent, elapsedTime, provider, response);
return response as R;
}

Expand Down
Loading