Skip to content

Commit ac77395

Browse files
committed
Google Gen AI llm event refactor
1 parent b314096 commit ac77395

File tree

22 files changed

+282
-260
lines changed

22 files changed

+282
-260
lines changed

lib/llm-events-new/base.js

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ const { makeId } = require('../util/hashes')
1010

1111
/**
1212
* The base LLM event class that contains logic and properties
13-
* that are common to all LLM events (e.g. `LlmChatCompletionMessage`).
13+
* (e.g. `trace_id`, `vendor`) that are common to all LLM events.
1414
*
1515
* @property {string} id UUID or identifier for the event
1616
* @property {string} request_id ID from request/response headers
@@ -19,7 +19,7 @@ const { makeId } = require('../util/hashes')
1919
* @property {string} response.model Model name from response
2020
* @property {string} vendor Lowercased vendor name, e.g. "openai"
2121
* @property {string} ingest_source Always set to 'Node'
22-
* @property {boolean|undefined} error set to `true` if an error occurred during creation call, omitted if no error occurred
22+
* @property {boolean|undefined} error Set to `true` if an error occurred during creation call, omitted if no error occurred
2323
*/
2424
class LlmEvent {
2525
ingest_source = 'Node'
@@ -33,20 +33,24 @@ class LlmEvent {
3333
* @param {string} params.vendor Lowercase vendor name, e.g. "openai"
3434
* @param {string} params.responseModel Model name from response
3535
* @param {string} params.requestId ID from request/response headers
36-
* @param {boolean} [params.error] set to `true` if an error occurred during creation call, omitted if no error occurred
36+
* @param {boolean} [params.error] Set to `true` if an error occurred during creation call, omitted if no error occurred
3737
*/
38-
constructor({ agent, segment, transaction, vendor, responseModel, requestId, error = null }) {
38+
constructor({ agent, segment, transaction, vendor, responseModel, requestId, error }) {
3939
this.id = makeId(36)
40-
this.request_id = requestId
4140
this.span_id = segment?.id
4241
this.trace_id = transaction?.traceId
43-
this['response.model'] = responseModel
4442
this.vendor = vendor
4543
this.metadata = agent
4644

45+
// Omit `error` property if no error occurred
4746
if (error === true) {
4847
this.error = error
4948
}
49+
50+
// If a certain attribute value is not accessible via instrumentation,
51+
// it can be omitted from the event.
52+
if (requestId) this.request_id = requestId
53+
if (responseModel) this['response.model'] = responseModel
5054
}
5155

5256
// eslint-disable-next-line accessor-pairs

lib/llm-events-new/chat-summary.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,8 @@ class LlmChatCompletionSummary extends LlmEvent {
4545
this['request.max_tokens'] = maxTokens
4646
this['request.temperature'] = temperature
4747
this['response.number_of_messages'] = numMsgs
48-
this['response.choices.finish_reason'] = finishReason
49-
this['response.organization'] = responseOrg
48+
if (finishReason) this['response.choices.finish_reason'] = finishReason
49+
if (responseOrg) this['response.organization'] = responseOrg
5050
this.timestamp = segment.timer.start
5151
this.duration = segment.getDurationInMillis()
5252
}

lib/llm-events-new/embedding.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ class LlmEmbedding extends LlmEvent {
3333
*/
3434
constructor({ agent, segment, transaction, requestId, requestInput, requestModel, responseModel, responseOrg, vendor, error }) {
3535
super({ agent, segment, requestId, responseModel, transaction, vendor, error })
36-
this['request.model'] = requestModel
37-
this['response.organization'] = responseOrg
38-
this.duration = segment?.getDurationInMillis()
36+
if (requestModel) this['request.model'] = requestModel
37+
if (responseOrg) this['response.organization'] = responseOrg
38+
this.duration = segment.getDurationInMillis()
3939

4040
if (agent.config.ai_monitoring.record_content.enabled === true) {
4141
this.input = requestInput
Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
const LlmChatCompletionMessage = require('../chat-message')
7+
const { getUsageTokens } = require('./utils')
8+
9+
/**
10+
* Encapsulates a Google Gen AI LlmChatCompletionMessage.
11+
*/
12+
class GoogleGenAiLlmChatCompletionMessage extends LlmChatCompletionMessage {
13+
constructor({ agent,
14+
segment,
15+
transaction,
16+
request = {},
17+
response = {},
18+
sequence = 0,
19+
content, role,
20+
completionId,
21+
isResponse }) {
22+
super({ agent,
23+
segment,
24+
transaction,
25+
vendor: 'gemini',
26+
sequence,
27+
content,
28+
role,
29+
completionId,
30+
isResponse,
31+
responseModel: response?.modelVersion })
32+
33+
this.setTokenCount(agent, request, response)
34+
}
35+
36+
setTokenCount(agent, request, response) {
37+
const tokenCB = agent.llm?.tokenCountCallback
38+
39+
if (tokenCB) {
40+
const promptContent = typeof request?.contents === 'string'
41+
? request?.contents
42+
: request?.contents?.join(' ')
43+
44+
const responseContent = response?.candidates?.[0]?.content?.parts
45+
const completionContent = responseContent?.map((content) => content.text).join(' ')
46+
47+
if (promptContent && completionContent) {
48+
this.setTokenFromCallback(
49+
{
50+
tokenCB,
51+
reqModel: request.model,
52+
resModel: this['response.model'],
53+
promptContent,
54+
completionContent
55+
}
56+
)
57+
}
58+
return
59+
}
60+
61+
const tokens = getUsageTokens(response)
62+
this.setTokenInCompletionMessage(tokens)
63+
}
64+
}
65+
66+
module.exports = GoogleGenAiLlmChatCompletionMessage

lib/llm-events/google-genai/chat-completion-summary.js renamed to lib/llm-events-new/google-genai/chat-summary.js

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,35 @@
11
/*
2-
* Copyright 2025 New Relic Corporation. All rights reserved.
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
33
* SPDX-License-Identifier: Apache-2.0
44
*/
55

6-
'use strict'
7-
const LlmEvent = require('./event')
6+
const LlmChatCompletionSummary = require('../chat-summary')
7+
const { getUsageTokens } = require('./utils')
8+
9+
/**
10+
* Encapsulates a Google Gen AI LlmChatCompletionSummary.
11+
*/
12+
class GoogleGenAiLlmChatCompletionSummary extends LlmChatCompletionSummary {
13+
constructor({ agent, segment, transaction, request, response, error }) {
14+
super({ agent,
15+
segment,
16+
transaction,
17+
responseModel: response?.modelVersion,
18+
requestModel: request?.model,
19+
finishReason: response?.candidates?.[0]?.finishReason,
20+
maxTokens: request.config?.maxOutputTokens,
21+
temperature: request.config?.temperature,
22+
vendor: 'gemini',
23+
error })
824

9-
module.exports = class LlmChatCompletionSummary extends LlmEvent {
10-
constructor({ agent, segment, request = {}, response = {}, withError = false, transaction }) {
11-
super({ agent, segment, request, response, responseAttrs: true, transaction })
12-
this.error = withError
1325
let requestMessagesLength = 0
1426
if (Array.isArray(request?.contents)) {
1527
requestMessagesLength = request.contents.length
1628
} else if (typeof request?.contents === 'string') {
1729
requestMessagesLength = 1
1830
}
1931
this['response.number_of_messages'] = requestMessagesLength + (response?.candidates?.length || 0)
20-
this['response.choices.finish_reason'] = response?.candidates?.[0]?.finishReason
21-
this['request.max_tokens'] = request.config?.maxOutputTokens
22-
this['request.temperature'] = request.config?.temperature
2332

24-
this.timestamp = segment.timer.start
2533
this.setTokens(agent, request, response)
2634
}
2735

@@ -51,7 +59,9 @@ module.exports = class LlmChatCompletionSummary extends LlmEvent {
5159
return
5260
}
5361

54-
const tokens = this.getUsageTokens(response)
62+
const tokens = getUsageTokens(response)
5563
this.setTokensInResponse(tokens)
5664
}
5765
}
66+
67+
module.exports = GoogleGenAiLlmChatCompletionSummary
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
const LlmEmbedding = require('../embedding')
7+
8+
/**
9+
* Encapsulates a Google Gen AI LlmEmbedding.
10+
*/
11+
class GoogleGenAiLlmEmbedding extends LlmEmbedding {
12+
constructor({ agent, segment, transaction, request = {}, response = {}, error }) {
13+
super({ agent,
14+
segment,
15+
transaction,
16+
requestInput: request?.contents,
17+
requestModel: request?.model,
18+
responseModel: response?.modelVersion,
19+
vendor: 'gemini',
20+
error })
21+
}
22+
}
23+
24+
module.exports = GoogleGenAiLlmEmbedding
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
8+
const LlmChatCompletionMessage = require('./chat-message')
9+
const LlmChatCompletionSummary = require('./chat-summary')
10+
const LlmEmbedding = require('./embedding')
11+
12+
module.exports = {
13+
LlmChatCompletionMessage,
14+
LlmChatCompletionSummary,
15+
LlmEmbedding
16+
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
function getUsageTokens(response) {
7+
const promptTokens = Number(response?.usageMetadata?.promptTokenCount)
8+
const completionTokens = Number(response?.usageMetadata?.candidatesTokenCount)
9+
const totalTokens = Number(response?.usageMetadata?.totalTokenCount)
10+
return { promptTokens, completionTokens, totalTokens }
11+
}
12+
13+
module.exports = { getUsageTokens }

lib/llm-events/google-genai/chat-completion-message.js

Lines changed: 0 additions & 72 deletions
This file was deleted.

lib/llm-events/google-genai/embedding.js

Lines changed: 0 additions & 21 deletions
This file was deleted.

0 commit comments

Comments
 (0)