Skip to content

Commit 7ff953d

Browse files
refactor: AWS Bedrock llm events refactor (#3759)
1 parent 8503d25 commit 7ff953d

22 files changed

+179
-402
lines changed

lib/instrumentation/aws-sdk/v3/bedrock.js

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,22 +4,22 @@
44
*/
55

66
'use strict'
7+
const StreamHandler = require('./stream-handler')
8+
const ConverseStreamHandler = require('./converse-stream-handler')
79
const {
810
LlmChatCompletionMessage,
911
LlmChatCompletionSummary,
1012
LlmEmbedding,
11-
LlmErrorMessage,
1213
BedrockCommand,
13-
BedrockResponse,
14-
StreamHandler
15-
} = require('../../../llm-events/aws-bedrock')
14+
BedrockResponse
15+
} = require('#agentlib/llm-events/aws-bedrock/index.js')
16+
const LlmErrorMessage = require('#agentlib/llm-events/error-message.js')
1617

1718
const { DESTINATIONS } = require('../../../config/attribute-filter')
1819
const { AI } = require('../../../metrics/names')
1920
const { RecorderSpec } = require('../../../shim/specs')
2021
const InstrumentationDescriptor = require('../../../instrumentation-descriptor')
2122
const { extractLlmContext } = require('../../../util/llm-utils')
22-
const ConverseStreamHandler = require('../../../llm-events/aws-bedrock/converse-stream-handler')
2323

2424
let TRACKING_METRIC
2525

@@ -73,7 +73,6 @@ function isStreamingEnabled({ commandName, config }) {
7373
* @param {object} params.msg LLM event
7474
*/
7575
function recordEvent({ agent, type, msg }) {
76-
msg.serialize()
7776
const llmContext = extractLlmContext(agent)
7877
const timestamp = msg?.timestamp ?? Date.now()
7978

@@ -134,7 +133,7 @@ function recordChatCompletionMessages({
134133
bedrockCommand,
135134
transaction,
136135
segment,
137-
isError: err !== null
136+
error: err !== null
138137
})
139138

140139
// Record context message(s)
@@ -149,7 +148,7 @@ function recordChatCompletionMessages({
149148
content: contextMessage.content,
150149
role: contextMessage.role,
151150
bedrockResponse,
152-
index: i,
151+
sequence: i,
153152
completionId: summary.id
154153
})
155154
recordEvent({ agent, type: 'LlmChatCompletionMessage', msg })
@@ -164,7 +163,7 @@ function recordChatCompletionMessages({
164163
bedrockCommand,
165164
bedrockResponse,
166165
isResponse: true,
167-
index: promptContextMessages.length + i,
166+
sequence: promptContextMessages.length + i,
168167
content,
169168
role: 'assistant',
170169
completionId: summary.id
@@ -217,9 +216,9 @@ function recordEmbeddingMessage({
217216
segment,
218217
transaction,
219218
bedrockCommand,
220-
input: prompt.content,
219+
requestInput: prompt.content,
221220
bedrockResponse,
222-
isError: err !== null
221+
error: err !== null
223222
}))
224223

225224
for (const embedding of embeddings) {

lib/llm-events/aws-bedrock/converse-stream-handler.js renamed to lib/instrumentation/aws-sdk/v3/converse-stream-handler.js

File renamed without changes.
File renamed without changes.

lib/llm-events/aws-bedrock/bedrock-command.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
'use strict'
77

8-
const { stringifyClaudeChunkedMessage, stringifyConverseChunkedMessage } = require('./utils')
8+
const { stringifyClaudeChunkedMessage, stringifyConverseChunkedMessage } = require('./stringify-message')
99

1010
/**
1111
* Parses an AWS Bedrock command instance into a re-usable entity,

lib/llm-events/aws-bedrock/bedrock-response.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
'use strict'
77

8-
const { stringifyClaudeChunkedMessage, stringifyConverseChunkedMessage } = require('./utils')
8+
const { stringifyClaudeChunkedMessage, stringifyConverseChunkedMessage } = require('./stringify-message')
99

1010
/**
1111
* @typedef {object} AwsBedrockMiddlewareResponse
Lines changed: 40 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,82 +1,59 @@
11
/*
2-
* Copyright 2023 New Relic Corporation. All rights reserved.
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
33
* SPDX-License-Identifier: Apache-2.0
44
*/
55

66
'use strict'
77

8-
const LlmEvent = require('./event')
9-
/**
10-
* @typedef {object} LlmChatCompletionParams
11-
* @augments LlmEventParams
12-
* @property {string} completionId An identifier for the completion message.
13-
* @property {string} content The human readable response from the LLM.
14-
* @property {number} [index=0] The order of the message in the conversation.
15-
* @property {boolean} [isResponse=false] Indicates if the message represents
16-
* a response from the LLM.
17-
* @property {object} message The message sent to the LLM.
18-
* @property {OutgoingMessage} request The outgoing HTTP request used in the
19-
* LLM conversation.
20-
*/
21-
/**
22-
* @type {LlmChatCompletionParams}
23-
*/
24-
const defaultParams = {
25-
completionId: '',
26-
content: '',
27-
index: 0,
28-
isResponse: false,
29-
message: {},
30-
request: {}
31-
}
8+
const LlmChatCompletionMessage = require('../chat-completion-message')
329

33-
/**
34-
* Represents an LLM chat completion.
35-
*/
36-
class LlmChatCompletionMessage extends LlmEvent {
37-
constructor(params = defaultParams) {
38-
params = Object.assign({}, defaultParams, params)
39-
super(params)
40-
41-
const { agent, content, isResponse, index, completionId, role, segment } = params
42-
const recordContent = agent.config?.ai_monitoring?.record_content?.enabled
10+
module.exports = class AwsBedrockLlmChatCompletionMessage extends LlmChatCompletionMessage {
11+
/**
12+
*
13+
* @param {object} params constructor parameters
14+
* @param {Agent} params.agent New Relic agent instance
15+
* @param {object} params.segment Current segment
16+
* @param {object} params.transaction Current and active transaction
17+
* @param {object} params.bedrockCommand AWS Bedrock Command object, represents the request
18+
* @param {object} params.bedrockResponse AWS Bedrock Response object
19+
* @param {string} params.content Content of the message
20+
* @param {string} [params.role] Role of the message creator (e.g. `user`, `assistant`, `tool`)
21+
* @param {string} params.completionId ID of the `LlmChatCompletionSummary` event that
22+
* this message event is connected to
23+
* @param {number} params.sequence Index (beginning at 0) associated with
24+
* each message including the prompt and responses
25+
* @param {boolean} [params.isResponse] Indiciates if this message is the response
26+
*/
27+
constructor({ agent, segment, transaction, bedrockCommand, bedrockResponse, content, role, completionId, sequence = 0, isResponse }) {
28+
super({ agent,
29+
segment,
30+
transaction,
31+
vendor: 'bedrock',
32+
content,
33+
role,
34+
sequence,
35+
requestId: bedrockResponse?.requestId,
36+
responseId: bedrockResponse?.id,
37+
responseModel: bedrockCommand?.modelId, // we can assume requestModel==responseModel in bedrock
38+
completionId,
39+
isResponse })
4340

44-
this.is_response = isResponse
45-
this.completion_id = completionId
46-
this.sequence = index
47-
this.content = recordContent === true ? content : undefined
48-
this.role = role
49-
if (this.is_response === false) {
50-
// Only record for request/input messages
51-
this.timestamp = segment.timer.start
52-
}
53-
54-
this.#setId(index)
55-
this.setTokenCount(agent)
41+
this.setTokenCount(agent, bedrockCommand, bedrockResponse)
5642
}
5743

58-
#setId(index) {
59-
const cmd = this.bedrockCommand
60-
if (cmd.isConverse || cmd.isTitan() === true || cmd.isClaude() === true) {
61-
this.id = `${this.id}-${index}`
62-
} else if (cmd.isCohere() === true) {
63-
this.id = `${this.bedrockResponse.id || this.id}-${index}`
64-
}
65-
}
66-
67-
setTokenCount(agent) {
44+
setTokenCount(agent, bedrockCommand, bedrockResponse) {
6845
const tokenCB = agent?.llm?.tokenCountCallback
6946

7047
if (tokenCB) {
71-
const promptContent = this.bedrockCommand?.prompt?.map((msg) => msg.content).join(' ')
72-
const completionContent = this.bedrockResponse?.completions?.join(' ')
48+
const promptContent = bedrockCommand?.prompt?.map((msg) => msg.content).join(' ')
49+
const completionContent = bedrockResponse?.completions?.join(' ')
7350

7451
if (promptContent && completionContent) {
7552
this.setTokenFromCallback(
7653
{
7754
tokenCB,
78-
reqModel: this.bedrockCommand.modelId,
79-
resModel: this.bedrockCommand.modelId,
55+
reqModel: bedrockCommand.modelId,
56+
resModel: bedrockCommand.modelId,
8057
promptContent,
8158
completionContent
8259
}
@@ -85,8 +62,7 @@ class LlmChatCompletionMessage extends LlmEvent {
8562
return
8663
}
8764

88-
this.setTokenInCompletionMessage({ promptTokens: this.bedrockResponse.inputTokenCount, completionTokens: this.bedrockResponse.outputTokenCount })
65+
this.setTokenInCompletionMessage({ promptTokens: bedrockResponse.inputTokenCount,
66+
completionTokens: bedrockResponse.outputTokenCount })
8967
}
9068
}
91-
92-
module.exports = LlmChatCompletionMessage
Lines changed: 37 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,68 +1,62 @@
11
/*
2-
* Copyright 2024 New Relic Corporation. All rights reserved.
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
33
* SPDX-License-Identifier: Apache-2.0
44
*/
55

66
'use strict'
77

8-
const LlmEvent = require('./event')
9-
10-
/**
11-
* @typedef {object} LlmChatCompletionSummaryParams
12-
* @augments LlmEventParams
13-
* @property {string} segment the segment associated with this LlmChatCompletionSummary
14-
* @property {boolean} isError whether this event represents an error
15-
*/
16-
/**
17-
* @type {LlmChatCompletionSummaryParams}
18-
*/
19-
const defaultParams = {}
20-
21-
/**
22-
* Represents an LLM chat completion summary.
23-
*/
24-
class LlmChatCompletionSummary extends LlmEvent {
25-
constructor(params = defaultParams) {
26-
super(params)
27-
28-
const { segment, isError, agent } = params
29-
this.error = isError
30-
this.duration = segment.getDurationInMillis()
31-
32-
const cmd = this.bedrockCommand
33-
const res = this.bedrockResponse
34-
35-
this['request.max_tokens'] = cmd.maxTokens
36-
this['response.choices.finish_reason'] = res.finishReason
37-
this['request.temperature'] = cmd.temperature
38-
this['response.number_of_messages'] = (cmd.prompt.length ?? 0) + (res.completions.length ?? 0)
39-
40-
this.timestamp = segment.timer.start
41-
this.setTokens(agent)
8+
const LlmChatCompletionSummary = require('../chat-completion-summary')
9+
10+
module.exports = class AwsBedrockLlmChatCompletionSummary extends LlmChatCompletionSummary {
11+
/**
12+
*
13+
* @param {object} params constructor parameters
14+
* @param {Agent} params.agent New Relic agent instance
15+
* @param {object} params.segment Current segment
16+
* @param {object} params.transaction Current and active transaction
17+
* @param {object} params.bedrockCommand AWS Bedrock Command object, represents the request
18+
* @param {object} params.bedrockResponse AWS Bedrock Response object
19+
* @param {boolean} [params.error] Set to `true` if an error occurred during creation call, omitted if no error occurred
20+
*/
21+
constructor({ agent, segment, transaction, bedrockCommand, bedrockResponse, error }) {
22+
super({ agent,
23+
segment,
24+
transaction,
25+
error,
26+
vendor: 'bedrock',
27+
requestModel: bedrockCommand?.modelId,
28+
requestId: bedrockResponse?.requestId,
29+
responseModel: bedrockCommand?.modelId, // we can assume requestModel==responseModel in bedrock
30+
temperature: bedrockCommand.temperature,
31+
maxTokens: bedrockCommand.maxTokens,
32+
numMsgs: (bedrockCommand.prompt.length ?? 0) + (bedrockResponse.completions.length ?? 0),
33+
finishReason: bedrockResponse?.finishReason })
34+
35+
this.setTokens(agent, bedrockCommand, bedrockResponse)
4236
}
4337

44-
setTokens(agent) {
38+
setTokens(agent, bedrockCommand, bedrockResponse) {
4539
const tokenCB = agent?.llm?.tokenCountCallback
4640

4741
// Prefer callback for prompt and completion tokens; if unavailable, fall back to response data.
4842
if (tokenCB) {
49-
const promptContent = this.bedrockCommand?.prompt?.map((msg) => msg.content).join(' ')
50-
const completionContent = this.bedrockResponse?.completions?.join(' ')
43+
const promptContent = bedrockCommand?.prompt?.map((msg) => msg.content).join(' ')
44+
const completionContent = bedrockResponse?.completions?.join(' ')
5145

5246
this.setTokenUsageFromCallback(
5347
{
5448
tokenCB,
55-
reqModel: this.bedrockCommand.modelId,
56-
resModel: this.bedrockCommand.modelId,
49+
reqModel: bedrockCommand.modelId,
50+
resModel: bedrockCommand.modelId,
5751
promptContent,
5852
completionContent
5953
}
6054
)
6155
return
6256
}
6357

64-
this.setTokensInResponse({ promptTokens: this.bedrockResponse.inputTokenCount, completionTokens: this.bedrockResponse.outputTokenCount, totalTokens: this.bedrockResponse.totalTokenCount })
58+
this.setTokensInResponse({ promptTokens: bedrockResponse.inputTokenCount,
59+
completionTokens: bedrockResponse.outputTokenCount,
60+
totalTokens: bedrockResponse.totalTokenCount })
6561
}
6662
}
67-
68-
module.exports = LlmChatCompletionSummary

0 commit comments

Comments
 (0)