Skip to content

Commit f4dd09e

Browse files
feat: Add timestamp to Google Gen AI LlmChatCompletionMessages (newrelic#3686)
1 parent 08cd38b commit f4dd09e

File tree

5 files changed

+10
-5
lines changed

5 files changed

+10
-5
lines changed

lib/llm-events/google-genai/chat-completion-message.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,11 @@ module.exports = class LlmChatCompletionMessage extends LlmEvent {
3333
this.content = this.is_response ? message?.parts?.[0]?.text : message
3434
}
3535

36+
// only add timestamp for request/input messages
37+
if (this.is_response === false) {
38+
this.timestamp = segment.timer.start
39+
}
40+
3641
this.setTokenCount(agent, request, response)
3742
}
3843

lib/subscribers/google-genai/embed-content.js

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,7 @@
66
const { AiMonitoringEmbeddingSubscriber } = require('../ai-monitoring')
77
const { AI } = require('../../../lib/metrics/names')
88
const { GEMINI } = AI
9-
const {
10-
LlmEmbedding
11-
} = require('../../../lib/llm-events/google-genai')
9+
const { LlmEmbedding } = require('#agentlib/llm-events/google-genai/index.js')
1210

1311
class GoogleGenAIEmbedContentSubscriber extends AiMonitoringEmbeddingSubscriber {
1412
constructor ({ agent, logger }) {

lib/subscribers/google-genai/generate-content-stream.js

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,12 +73,11 @@ class GoogleGenAIGenerateContentStreamSubscriber extends GoogleGenAIGenerateCont
7373
}
7474

7575
asyncEnd(data) {
76+
// Check config to see if ai_monitoring is still enabled
7677
if (!this.enabled) {
7778
this.logger.debug('`ai_monitoring.enabled` is set to false, stream will not be instrumented.')
7879
return
7980
}
80-
81-
// Check config is ai_monitoring is still enabled
8281
if (!this.streamingEnabled) {
8382
this.logger.warn(
8483
'`ai_monitoring.streaming.enabled` is set to `false`, stream will not be instrumented.'

test/unit/llm-events/google-genai/chat-completion-message.test.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ test('should create a LlmChatCompletionMessage event', (t, end) => {
4343
index: 0
4444
})
4545
const expected = getExpectedResult(tx, chatMessageEvent, 'message', summaryId)
46+
expected.timestamp = segment.timer.start
4647
assert.deepEqual(chatMessageEvent, expected)
4748
end()
4849
})

test/versioned/google-genai/common.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,13 @@ function assertChatCompletionMessages(
3737
expectedChatMsg.id = /[a-f0-9]{36}/
3838
expectedChatMsg.content = reqContent
3939
expectedChatMsg.token_count = 0
40+
expectedChatMsg.timestamp = /\d{13}/
4041
} else if (msg[1].sequence === 1) {
4142
expectedChatMsg.sequence = 1
4243
expectedChatMsg.id = /[a-f0-9]{36}/
4344
expectedChatMsg.content = 'What does 1 plus 1 equal?'
4445
expectedChatMsg.token_count = 0
46+
expectedChatMsg.timestamp = /\d{13}/
4547
} else {
4648
expectedChatMsg.sequence = 2
4749
expectedChatMsg.role = 'model'

0 commit comments

Comments
 (0)