Skip to content

Commit f339675

Browse files
feat: Add instrumentation support for @langchain/langgraph (#3645)
1 parent 6ff6961 commit f339675

File tree

14 files changed

+1128
-75
lines changed

14 files changed

+1128
-75
lines changed

lib/llm-events/error-message.js

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,20 +21,26 @@ module.exports = class LlmErrorMessage {
2121
* @param {LlmVectorStoreSearch} [params.vectorsearch] Details about the vector
2222
* search if it was a vector search event.
2323
* @param {LlmTool} [params.tool] Details about the tool event if it was a tool event.
24+
* @param {object} [params.aiAgent] Details about the AI agent event if it was an AI agent event.
2425
* @param {boolean} [params.useNameAsCode] defaults to false, only Bedrock sets it to true so far
2526
*/
26-
constructor({ response, cause, summary = {}, embedding = {}, vectorsearch = {}, tool = {}, useNameAsCode = false } = {}) {
27+
constructor({ response, cause, summary = {}, embedding = {}, vectorsearch = {}, tool = {}, aiAgent = {}, useNameAsCode = false } = {}) {
2728
this['http.statusCode'] = response?.statusCode ?? response?.status ?? cause?.status
2829
this['error.message'] = cause?.message
29-
this['error.code'] = response?.code ?? cause?.error?.code
30+
this['error.code'] = response?.code ?? cause?.error?.code ?? cause?.code
3031
if (useNameAsCode) {
3132
this['error.code'] = cause?.name
3233
}
34+
if (cause?.['lc_error_code']) {
35+
// this is where langchain error codes live
36+
this['error.code'] = cause['lc_error_code']
37+
}
3338
this['error.param'] = response?.param ?? cause?.error?.param
3439
this.completion_id = summary?.id
3540
this.embedding_id = embedding?.id
3641
this.vector_store_id = vectorsearch?.id
3742
this.tool_id = tool?.id
43+
this.agent_id = aiAgent?.id
3844

3945
if (embedding?.vendor === 'gemini' || summary?.vendor === 'gemini') {
4046
this._handleGemini(cause)

lib/llm-events/langgraph/agent.js

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
/*
2+
* Copyright 2026 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
const BaseLlmEvent = require('../event')
7+
const { makeId } = require('../../util/hashes')
8+
9+
/**
10+
* @typedef {object} LangGraphAgentEventParams
11+
* @property {Agent} agent The New Relic agent instance.
12+
* @property {string} name The name of the LangGraph agent, defaults to 'agent'.
13+
* @property {object} segment The associated NR segment.
14+
* @property {object} transaction The associated NR transaction.
15+
* @property {boolean} error A boolean flag to indicate if an error occurred.
16+
*/
17+
18+
module.exports = class LangGraphAgentEvent extends BaseLlmEvent {
19+
id = makeId(36)
20+
span_id
21+
trace_id
22+
ingest_source = 'Node'
23+
vendor = 'langgraph'
24+
25+
/**
26+
* @param {LangGraphAgentEventParams} params should contain all necessary and optional LangGraph data
27+
*/
28+
constructor(params) {
29+
super(params)
30+
const { agent, segment, transaction, error = false, name = 'agent' } = params
31+
32+
this.name = name
33+
this.span_id = segment.id
34+
this.trace_id = transaction.traceId
35+
this.error = error
36+
37+
// Setting `metadata` as the NR agent instance will allow `BaseLlmEvent`
38+
// to extract the relevant `llm.<user_defined_metadata>`.
39+
this.metadata = agent
40+
}
41+
}

lib/metrics/names.js

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,8 @@ const AI = {
171171
COMPLETION: 'Llm/completion',
172172
TOOL: 'Llm/tool',
173173
CHAIN: 'Llm/chain',
174-
VECTORSTORE: 'Llm/vectorstore'
174+
VECTORSTORE: 'Llm/vectorstore',
175+
AGENT: 'Llm/agent'
175176
}
176177

177178
AI.GEMINI = {
@@ -199,6 +200,11 @@ AI.LANGCHAIN = {
199200
VECTORSTORE: `${AI.VECTORSTORE}/LangChain`
200201
}
201202

203+
AI.LANGGRAPH = {
204+
TRACKING_PREFIX: `${AI.TRACKING_PREFIX}/LangGraph`,
205+
AGENT: `${AI.AGENT}/LangGraph`
206+
}
207+
202208
const MCP = {
203209
TRACKING_PREFIX: `${AI.TRACKING_PREFIX}/MCP`,
204210
TOOL: `${AI.TOOL}/MCP`,

lib/subscriber-configs.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ const subscribers = {
1818
...require('./subscribers/ioredis/config'),
1919
...require('./subscribers/iovalkey/config'),
2020
...require('./subscribers/langchain/config'),
21+
...require('./subscribers/langgraph/config'),
2122
...require('./subscribers/mcp-sdk/config'),
2223
...require('./subscribers/mysql/config'),
2324
...require('./subscribers/mysql2/config'),

lib/subscribers/langchain/runnable-stream.js

Lines changed: 76 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -16,37 +16,39 @@ class LangchainRunnableStreamSubscriber extends LangchainRunnableSubscriber {
1616
this.logger.debug('`ai_monitoring.enabled` is set to false, stream will not be instrumented.')
1717
return
1818
}
19-
2019
if (!this.streamingEnabled) {
2120
this.logger.debug('`ai_monitoring.streaming.enabled` is set to false, stream will not be instrumented.')
2221
this.agent.metrics.getOrCreateMetric(STREAMING_DISABLED).incrementCallCount()
2322
return
2423
}
2524

2625
const ctx = this.agent.tracer.getContext()
27-
2826
const { transaction } = ctx
2927
if (transaction?.isActive() !== true) {
3028
return
3129
}
3230

33-
// Extract data.
3431
const request = data?.arguments?.[0]
32+
// Requests via LangGraph API have the `messages` property with the
33+
// information we need, otherwise it just lives on the `request`
34+
// object directly.
35+
const userRequest = request?.messages ? request.messages?.[0] : request
3536
const params = data?.arguments?.[1] || {}
3637
const metadata = params?.metadata ?? {}
3738
const tags = params?.tags ?? []
3839
const { result: response, error: err } = data
3940

40-
// Instrument stream.
41-
if (response?.next) {
42-
this.wrapNextHandler({ response, ctx, request, metadata, tags })
41+
// Note: as of 18.x `ReadableStream` is a global
42+
// eslint-disable-next-line n/no-unsupported-features/node-builtins
43+
if (response instanceof ReadableStream) {
44+
this.instrumentStream({ response, ctx, request: userRequest, metadata, tags })
4345
} else {
4446
// Input error occurred which means a stream was not created.
4547
// Skip instrumenting streaming and create Llm Events from
4648
// the data we have
4749
this.recordChatCompletionEvents({
4850
ctx,
49-
request,
51+
request: userRequest,
5052
err,
5153
metadata,
5254
tags
@@ -55,7 +57,7 @@ class LangchainRunnableStreamSubscriber extends LangchainRunnableSubscriber {
5557
}
5658

5759
/**
58-
* Wraps the next method on the IterableReadableStream. It will also record the Llm
60+
* Wraps `read` method on the ReadableStream reader. It will also record the Llm
5961
* events when the stream is done processing.
6062
*
6163
* @param {object} params function params
@@ -65,56 +67,82 @@ class LangchainRunnableStreamSubscriber extends LangchainRunnableSubscriber {
6567
* @param {object} params.metadata metadata for the call
6668
* @param {Array} params.tags tags for the call
6769
*/
68-
wrapNextHandler({ ctx, response, request, metadata, tags }) {
70+
instrumentStream({ ctx, response, request, metadata, tags }) {
6971
const self = this
70-
const orig = response.next
71-
let content = ''
72-
const { segment } = ctx
73-
74-
async function wrappedIterator(...args) {
75-
try {
76-
const result = await orig.apply(this, args)
77-
// only create Llm events when stream iteration is done
78-
if (result?.done) {
72+
const orig = response.getReader
73+
response.getReader = function wrapedGetReader() {
74+
const reader = orig.apply(this, arguments)
75+
const origRead = reader.read
76+
let responseContent = ''
77+
reader.read = async function wrappedRead(...args) {
78+
try {
79+
const result = await origRead.apply(this, args)
80+
if (result?.done) {
81+
// only create Llm events when stream iteration is done
82+
self.recordChatCompletionEvents({
83+
ctx,
84+
response: responseContent,
85+
request,
86+
metadata,
87+
tags
88+
})
89+
} else {
90+
// Concat the streamed content
91+
responseContent = self.concatResponseContent(result, responseContent)
92+
}
93+
return result
94+
} catch (error) {
7995
self.recordChatCompletionEvents({
8096
ctx,
8197
request,
82-
response: content,
98+
response: responseContent,
8399
metadata,
84-
tags
100+
tags,
101+
err: error
85102
})
86-
} else {
87-
// Concat the streamed content
88-
if (typeof result?.value?.content === 'string') {
89-
// LangChain BaseMessageChunk case
90-
content += result.value.content
91-
} else if (typeof result?.value === 'string') {
92-
// Base LangChain case
93-
content += result.value
94-
} else if (typeof result?.value?.[0] === 'string') {
95-
// Array parser case
96-
content += result.value[0]
97-
}
103+
throw error
104+
} finally {
105+
// update segment duration on every stream
106+
// iteration to extend the timer
107+
ctx.segment.touch()
98108
}
99-
return result
100-
} catch (error) {
101-
self.recordChatCompletionEvents({
102-
ctx,
103-
request,
104-
response: content,
105-
metadata,
106-
tags,
107-
err: error
108-
})
109-
throw error
110-
} finally {
111-
// update segment duration on every stream iteration to extend
112-
// the timer
113-
segment.touch()
114109
}
110+
return reader
111+
}
112+
}
113+
114+
/**
115+
* Concats streamed content from various LangChain/LangGraph result formats.
116+
*
117+
* @param {object} result the stream result chunk
118+
* @param {string|object} content the response so far
119+
* @returns {string|object} updated response content. For LangGraph, it will return an object
120+
* (e.g. AIMessage), so we have more info if we need to drop this response if it is incomplete
121+
* (e.g outgoing tool call).
122+
*/
123+
concatResponseContent(result, content) {
124+
if (result?.value?.messages || result?.value?.agent?.messages) {
125+
// LangGraph case:
126+
// The result.value.%messages field contains all messages,
127+
// request and response, and appends new events at the
128+
// end of the array. Therefore, the last message is the
129+
// relevant response object.
130+
const langgraphMessages = result?.value?.messages ?? result?.value?.agent?.messages
131+
if (langgraphMessages.length > 0) {
132+
content = langgraphMessages[langgraphMessages.length - 1]
133+
}
134+
} else if (typeof result?.value?.content === 'string') {
135+
// LangChain MessageChunk case
136+
content += result.value.content
137+
} else if (typeof result?.value === 'string') {
138+
// Base LangChain case
139+
content += result.value
140+
} else if (typeof result?.value?.[0] === 'string') {
141+
// LangChain array parser case
142+
content += result.value[0]
115143
}
116144

117-
response.next = this.agent.tracer.bindFunction(wrappedIterator, ctx, false)
145+
return content
118146
}
119147
}
120148

0 commit comments

Comments
 (0)