Skip to content

Commit 1a57c80

Browse files
committed
LangChain chat msg and summary refactor
1 parent 87c3ca8 commit 1a57c80

File tree

14 files changed

+150
-145
lines changed

14 files changed

+150
-145
lines changed

lib/llm-events-new/base.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,9 @@ class LlmEvent {
4141
this.trace_id = transaction?.traceId
4242
this.vendor = vendor
4343
this.metadata = agent
44+
// TODO: Does not appear in AIM spec, but was a
45+
// requirement for LangChain instrumentation back in 2024?
46+
// this.appName = agent.config.applications()[0]
4447

4548
// Omit `error` property if no error occurred
4649
if (error === true) {

lib/llm-events-new/chat-summary.js

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,13 @@ class LlmChatCompletionSummary extends LlmEvent {
4141
responseOrg, temperature, maxTokens, numMsgs, finishReason }) {
4242
super({ agent, segment, transaction, vendor, responseModel, requestId, error })
4343

44-
this['request.model'] = requestModel
45-
this['request.max_tokens'] = maxTokens
46-
this['request.temperature'] = temperature
47-
this['response.number_of_messages'] = numMsgs
44+
if (requestModel) this['request.model'] = requestModel
45+
if (maxTokens) this['request.max_tokens'] = maxTokens
46+
if (temperature) this['request.temperature'] = temperature
4847
if (finishReason) this['response.choices.finish_reason'] = finishReason
4948
if (responseOrg) this['response.organization'] = responseOrg
49+
50+
this['response.number_of_messages'] = numMsgs
5051
this.timestamp = segment.timer.start
5152
this.duration = segment.getDurationInMillis()
5253
}
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
const LlmChatCompletionMessage = require('../chat-message')
8+
const { isSimpleObject } = require('../../util/objects')
9+
10+
/**
11+
* Encapsulates a LangChain LlmChatCompletionMessage.
12+
*/
13+
class LangChainLlmChatCompletionMessage extends LlmChatCompletionMessage {
14+
virtual_llm = true
15+
/**
16+
* @param {object} params constructor parameters
17+
* @param {Agent} params.agent New Relic agent instance
18+
* @param {object} params.segment Current segment
19+
* @param {object} params.transaction Current and active transaction
20+
* @param {string} params.runId LangChain run ID (will be used as response ID)
21+
* @param {number} params.sequence Index (beginning at 0) associated with
22+
* each message including the prompt and responses
23+
* @param {string} params.content Content of the message
24+
* @param {string} [params.role] Role of the message creator (e.g. `user`, `assistant`, `tool`)
25+
* @param {string} params.completionId ID of the `LlmChatCompletionSummary` event that
26+
* this message event is connected to
27+
* @param {boolean} [params.isResponse] `true` if a message is the result of a chat
28+
* completion and not an input message - omitted in `false` cases
29+
* @param {object} params.metadata LangChain metadata object
30+
* @param {object[]} params.tags LangChain tags
31+
*/
32+
constructor({ agent, segment, transaction, runId, sequence, role, content, completionId, isResponse, metadata, tags }) {
33+
super({ agent,
34+
segment,
35+
transaction,
36+
vendor: 'langchain',
37+
responseId: runId,
38+
requestId: runId,
39+
sequence,
40+
content,
41+
role,
42+
completionId,
43+
isResponse })
44+
45+
// TODO: Does not appear in AIM spec, but was a
46+
// requirement for LangChain instrumentation back in 2024?
47+
this.appName = agent.config.applications()[0]
48+
this.langchainMeta = metadata
49+
this.tags = Array.isArray(tags) ? tags.join(',') : tags
50+
}
51+
52+
// eslint-disable-next-line accessor-pairs
53+
set langchainMeta(value) {
54+
if (isSimpleObject(value) === false) {
55+
return
56+
}
57+
for (const [key, val] of Object.entries(value)) {
58+
this[`metadata.${key}`] = val
59+
}
60+
}
61+
}
62+
63+
module.exports = LangChainLlmChatCompletionMessage
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
const LlmChatCompletionSummary = require('../chat-summary')
8+
const { isSimpleObject } = require('../../util/objects')
9+
10+
/**
11+
* Encapsulates a LangChain LlmChatCompletionSummary.
12+
*/
13+
class LangChainLlmChatCompletionSummary extends LlmChatCompletionSummary {
14+
virtual_llm = true
15+
constructor({ agent, segment, transaction, error, numMsgs = 0, runId, metadata = {}, tags = '' }) {
16+
super({ agent,
17+
segment,
18+
transaction,
19+
vendor: 'langchain',
20+
requestId: runId,
21+
error,
22+
numMsgs })
23+
24+
// TODO: Does not appear in AIM spec, but was a
25+
// requirement for LangChain instrumentation back in 2024?
26+
this.appName = agent.config.applications()[0]
27+
this.langchainMeta = metadata
28+
this.tags = Array.isArray(tags) ? tags.join(',') : tags
29+
}
30+
31+
// eslint-disable-next-line accessor-pairs
32+
set langchainMeta(value) {
33+
if (isSimpleObject(value) === false) {
34+
return
35+
}
36+
for (const [key, val] of Object.entries(value)) {
37+
this[`metadata.${key}`] = val
38+
}
39+
}
40+
}
41+
42+
module.exports = LangChainLlmChatCompletionSummary
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
8+
const LlmChatCompletionMessage = require('./chat-message')
9+
const LlmChatCompletionSummary = require('./chat-summary')
10+
11+
module.exports = {
12+
LlmChatCompletionMessage,
13+
LlmChatCompletionSummary,
14+
}

lib/llm-events/langchain/chat-completion-message.js

Lines changed: 0 additions & 72 deletions
This file was deleted.

lib/llm-events/langchain/chat-completion-summary.js

Lines changed: 0 additions & 42 deletions
This file was deleted.

lib/llm-events/langchain/index.js

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77

88
module.exports = {
99
LangChainEvent: require('./event'),
10-
LangChainCompletionMessage: require('./chat-completion-message'),
11-
LangChainCompletionSummary: require('./chat-completion-summary'),
1210
LangChainVectorSearch: require('./vector-search'),
1311
LangChainVectorSearchResult: require('./vector-search-result'),
1412
LangChainTool: require('./tool')

lib/subscribers/langchain/runnable.js

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
const { AiMonitoringChatSubscriber } = require('../ai-monitoring')
77
const { AI: { LANGCHAIN } } = require('../../metrics/names')
88
const {
9-
LangChainCompletionMessage,
10-
LangChainCompletionSummary
11-
} = require('#agentlib/llm-events/langchain/index.js')
9+
LlmChatCompletionMessage,
10+
LlmChatCompletionSummary
11+
} = require('#agentlib/llm-events-new/langchain/index.js')
1212
const { langchainRunId } = require('#agentlib/symbols.js')
1313

1414
class LangchainRunnableSubscriber extends AiMonitoringChatSubscriber {
@@ -42,15 +42,15 @@ class LangchainRunnableSubscriber extends AiMonitoringChatSubscriber {
4242
// Stream calls that error on initial call lack a response message
4343
// so create an empty array in that case
4444
const messages = response ? [response] : []
45-
return new LangChainCompletionSummary({
45+
return new LlmChatCompletionSummary({
4646
agent: this.agent,
4747
segment,
4848
transaction,
4949
error: !!err,
50-
messages,
50+
numMsgs: messages?.length,
51+
runId: segment[langchainRunId],
5152
metadata,
52-
tags,
53-
runId: segment[langchainRunId]
53+
tags
5454
})
5555
}
5656

@@ -85,7 +85,7 @@ class LangchainRunnableSubscriber extends AiMonitoringChatSubscriber {
8585
const isResponse = message === response
8686
const { content, role } = this.extractContentAndRole(message)
8787

88-
return new LangChainCompletionMessage({
88+
return new LlmChatCompletionMessage({
8989
sequence: index,
9090
agent: this.agent,
9191
content,

test/unit/llm-events/langchain/chat-completion-message.test.js

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
const test = require('node:test')
99
const assert = require('node:assert')
10-
const LangChainCompletionMessage = require('../../../../lib/llm-events/langchain/chat-completion-message')
10+
const LlmChatCompletionMessage = require('#agentlib/llm-events-new/langchain/chat-message.js')
1111

1212
test.beforeEach((ctx) => {
1313
ctx.nr = {}
@@ -49,12 +49,13 @@ test.beforeEach((ctx) => {
4949
}
5050
}
5151

52+
ctx.nr.completionId = '4bea415a30e702d45f5dd521c74b6216d209'
5253
ctx.nr.runId = 'run-1'
5354
ctx.nr.metadata = { foo: 'foo' }
5455
})
5556

5657
test('creates entity', async (t) => {
57-
const msg = new LangChainCompletionMessage({
58+
const msg = new LlmChatCompletionMessage({
5859
...t.nr,
5960
sequence: 1,
6061
content: 'hello world',
@@ -73,12 +74,12 @@ test('creates entity', async (t) => {
7374
assert.equal(msg.sequence, 1)
7475
assert.equal(msg.role, 'assistant', 'should assume assistant role based on isResponse=true')
7576
assert.equal(msg.content, 'hello world')
76-
assert.match(msg.completion_id, /[a-z0-9-]{36}/)
77+
assert.equal(msg.completion_id, t.nr.completionId)
7778
assert.equal(msg.timestamp, undefined, 'should not have a timestamp defined if isResponse=true')
7879
})
7980

8081
test('assigns role if given', async(t) => {
81-
const msg = new LangChainCompletionMessage({
82+
const msg = new LlmChatCompletionMessage({
8283
...t.nr,
8384
sequence: 1,
8485
content: 'hello world',
@@ -88,7 +89,7 @@ test('assigns role if given', async(t) => {
8889
})
8990

9091
test('assigns role and timestamp correctly if isResponse is false', async(t) => {
91-
const msg = new LangChainCompletionMessage({
92+
const msg = new LlmChatCompletionMessage({
9293
...t.nr,
9394
sequence: 0,
9495
content: 'hello world',
@@ -99,16 +100,16 @@ test('assigns role and timestamp correctly if isResponse is false', async(t) =>
99100
})
100101

101102
test('assigns id correctly', async (t) => {
102-
let msg = new LangChainCompletionMessage({ ...t.nr, runId: '', sequence: 1 })
103-
assert.match(msg.id, /[a-z0-9-]{36}-1/)
103+
let msg = new LlmChatCompletionMessage({ ...t.nr, runId: '', sequence: 1 })
104+
assert.match(msg.id, /[a-z0-9-]{36}/)
104105

105-
msg = new LangChainCompletionMessage({ ...t.nr, runId: '123456', sequence: 42 })
106+
msg = new LlmChatCompletionMessage({ ...t.nr, runId: '123456', sequence: 42 })
106107
assert.equal(msg.id, '123456-42')
107108
})
108109

109110
test('respects record_content setting', async (t) => {
110111
t.nr.agent.config.ai_monitoring.record_content.enabled = false
111-
const search = new LangChainCompletionMessage({
112+
const search = new LlmChatCompletionMessage({
112113
...t.nr,
113114
sequence: 1,
114115
content: 'hello world'

0 commit comments

Comments
 (0)