11/*
2- * Copyright 2023 New Relic Corporation. All rights reserved.
2+ * Copyright 2026 New Relic Corporation. All rights reserved.
33 * SPDX-License-Identifier: Apache-2.0
44 */
55
66'use strict'
77
8- const LlmEvent = require ( './event' )
9- /**
10- * @typedef {object } LlmChatCompletionParams
11- * @augments LlmEventParams
12- * @property {string } completionId An identifier for the completion message.
13- * @property {string } content The human readable response from the LLM.
14- * @property {number } [index=0] The order of the message in the conversation.
15- * @property {boolean } [isResponse=false] Indicates if the message represents
16- * a response from the LLM.
17- * @property {object } message The message sent to the LLM.
18- * @property {OutgoingMessage } request The outgoing HTTP request used in the
19- * LLM conversation.
20- */
21- /**
22- * @type {LlmChatCompletionParams }
23- */
24- const defaultParams = {
25- completionId : '' ,
26- content : '' ,
27- index : 0 ,
28- isResponse : false ,
29- message : { } ,
30- request : { }
31- }
8+ const LlmChatCompletionMessage = require ( '../chat-completion-message' )
329
33- /**
34- * Represents an LLM chat completion.
35- */
36- class LlmChatCompletionMessage extends LlmEvent {
37- constructor ( params = defaultParams ) {
38- params = Object . assign ( { } , defaultParams , params )
39- super ( params )
40-
41- const { agent, content, isResponse, index, completionId, role, segment } = params
42- const recordContent = agent . config ?. ai_monitoring ?. record_content ?. enabled
10+ module . exports = class AwsBedrockLlmChatCompletionMessage extends LlmChatCompletionMessage {
11+ /**
12+ *
13+ * @param {object } params constructor parameters
14+ * @param {Agent } params.agent New Relic agent instance
15+ * @param {object } params.segment Current segment
16+ * @param {object } params.transaction Current and active transaction
17+ * @param {object } params.bedrockCommand AWS Bedrock Command object, represents the request
18+ * @param {object } params.bedrockResponse AWS Bedrock Response object
19+ * @param {string } params.content Content of the message
20+ * @param {string } [params.role] Role of the message creator (e.g. `user`, `assistant`, `tool`)
21+ * @param {string } params.completionId ID of the `LlmChatCompletionSummary` event that
22+ * this message event is connected to
23+ * @param {number } params.sequence Index (beginning at 0) associated with
24+ * each message including the prompt and responses
25+ * @param {boolean } [params.isResponse] Indiciates if this message is the response
26+ */
27+ constructor ( { agent, segment, transaction, bedrockCommand, bedrockResponse, content, role, completionId, sequence = 0 , isResponse } ) {
28+ super ( { agent,
29+ segment,
30+ transaction,
31+ vendor : 'bedrock' ,
32+ content,
33+ role,
34+ sequence,
35+ requestId : bedrockResponse ?. requestId ,
36+ responseId : bedrockResponse ?. id ,
37+ responseModel : bedrockCommand ?. modelId , // we can assume requestModel==responseModel in bedrock
38+ completionId,
39+ isResponse } )
4340
44- this . is_response = isResponse
45- this . completion_id = completionId
46- this . sequence = index
47- this . content = recordContent === true ? content : undefined
48- this . role = role
49- if ( this . is_response === false ) {
50- // Only record for request/input messages
51- this . timestamp = segment . timer . start
52- }
53-
54- this . #setId( index )
55- this . setTokenCount ( agent )
41+ this . setTokenCount ( agent , bedrockCommand , bedrockResponse )
5642 }
5743
58- #setId( index ) {
59- const cmd = this . bedrockCommand
60- if ( cmd . isConverse || cmd . isTitan ( ) === true || cmd . isClaude ( ) === true ) {
61- this . id = `${ this . id } -${ index } `
62- } else if ( cmd . isCohere ( ) === true ) {
63- this . id = `${ this . bedrockResponse . id || this . id } -${ index } `
64- }
65- }
66-
67- setTokenCount ( agent ) {
44+ setTokenCount ( agent , bedrockCommand , bedrockResponse ) {
6845 const tokenCB = agent ?. llm ?. tokenCountCallback
6946
7047 if ( tokenCB ) {
71- const promptContent = this . bedrockCommand ?. prompt ?. map ( ( msg ) => msg . content ) . join ( ' ' )
72- const completionContent = this . bedrockResponse ?. completions ?. join ( ' ' )
48+ const promptContent = bedrockCommand ?. prompt ?. map ( ( msg ) => msg . content ) . join ( ' ' )
49+ const completionContent = bedrockResponse ?. completions ?. join ( ' ' )
7350
7451 if ( promptContent && completionContent ) {
7552 this . setTokenFromCallback (
7653 {
7754 tokenCB,
78- reqModel : this . bedrockCommand . modelId ,
79- resModel : this . bedrockCommand . modelId ,
55+ reqModel : bedrockCommand . modelId ,
56+ resModel : bedrockCommand . modelId ,
8057 promptContent,
8158 completionContent
8259 }
@@ -85,8 +62,7 @@ class LlmChatCompletionMessage extends LlmEvent {
8562 return
8663 }
8764
88- this . setTokenInCompletionMessage ( { promptTokens : this . bedrockResponse . inputTokenCount , completionTokens : this . bedrockResponse . outputTokenCount } )
65+ this . setTokenInCompletionMessage ( { promptTokens : bedrockResponse . inputTokenCount ,
66+ completionTokens : bedrockResponse . outputTokenCount } )
8967 }
9068}
91-
92- module . exports = LlmChatCompletionMessage
0 commit comments