Skip to content

Commit a8e0444

Browse files
committed
Don't create OpenAI segment if ai_monitoring is disabled
1 parent d1e90e4 commit a8e0444

File tree

6 files changed

+101
-13
lines changed

6 files changed

+101
-13
lines changed

lib/subscribers/openai/base.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ class OpenAISubscriber extends Subscriber {
1414
}
1515

1616
get enabled() {
17-
return super.enabled && this.config.ai_monitoring?.enabled
17+
return super.enabled && this.agent.config.ai_monitoring.enabled
1818
}
1919

2020
/**

lib/subscribers/openai/chat.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ class OpenAIChatCompletions extends OpenAISubscriber {
3232
this.logger.warn(`Instrumenting chat completion streams is only supported with openai version ${MIN_STREAM_VERSION}+.`)
3333
return ctx
3434
}
35+
if (!this.enabled) {
36+
this.logger.debug('OpenAI instrumentation is disabled, not creating segment.')
37+
return
38+
}
3539

3640
const segment = this.agent.tracer.createSegment({
3741
name: OPENAI.COMPLETION,
@@ -43,6 +47,10 @@ class OpenAIChatCompletions extends OpenAISubscriber {
4347
}
4448

4549
asyncEnd(data) {
50+
if (!this.enabled) {
51+
this.logger.debug('OpenAI instrumentation is disabled, not recording Llm events.')
52+
return
53+
}
4654
const ctx = this.agent.tracer.getContext()
4755
if (!ctx?.segment || !ctx?.transaction) {
4856
return

lib/subscribers/openai/embeddings.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@ class OpenAIEmbeddings extends OpenAISubscriber {
1818
}
1919

2020
handler(data, ctx) {
21+
if (!this.enabled) {
22+
this.logger.debug('OpenAI instrumentation is disabled, not creating segment.')
23+
return
24+
}
2125
const segment = this.agent.tracer.createSegment({
2226
name: OPENAI.EMBEDDING,
2327
parent: ctx.segment,
@@ -28,6 +32,10 @@ class OpenAIEmbeddings extends OpenAISubscriber {
2832
}
2933

3034
asyncEnd(data) {
35+
if (!this.enabled) {
36+
this.logger.debug('OpenAI instrumentation is disabled, not recording Llm events.')
37+
return
38+
}
3139
const ctx = this.agent.tracer.getContext()
3240
if (!ctx?.segment || !ctx?.transaction) {
3341
return

test/versioned/openai/chat-completions-res-api.test.js

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -191,6 +191,25 @@ test('responses.create', async (t) => {
191191
assert.equal(events.length, 0, 'should not create llm events')
192192
})
193193

194+
await t.test('should not create segment or llm events when ai_monitoring.enabled is false', (t, end) => {
195+
const { client, agent } = t.nr
196+
agent.config.ai_monitoring.enabled = false
197+
helper.runInTransaction(agent, async (tx) => {
198+
await client.responses.create({
199+
input: [{ role: 'user', content: 'You are a mathematician.' }]
200+
})
201+
202+
const events = agent.customEventAggregator.events.toArray()
203+
assert.equal(events.length, 0, 'should not create llm events when ai_monitoring is disabled')
204+
205+
const children = tx.trace.segments.root.children
206+
assert.equal(children.length, 0, 'should not create OpenAI completion segment')
207+
208+
tx.end()
209+
end()
210+
})
211+
})
212+
194213
await t.test('auth errors should be tracked', (t, end) => {
195214
const { client, agent } = t.nr
196215
helper.runInTransaction(agent, async (tx) => {
@@ -468,7 +487,7 @@ test('responses.create', async (t) => {
468487
})
469488

470489
await t.test('should not create llm events when ai_monitoring.streaming.enabled is false', (t, end) => {
471-
const { client, agent } = t.nr
490+
const { client, agent, host, port } = t.nr
472491
agent.config.ai_monitoring.streaming.enabled = false
473492
helper.runInTransaction(agent, async (tx) => {
474493
const content = 'Streamed response'
@@ -488,7 +507,16 @@ test('responses.create', async (t) => {
488507
assert.equal(res, expectedRes.body.response.output[0].content[0].text)
489508

490509
const events = agent.customEventAggregator.events.toArray()
491-
assert.equal(events.length, 0, 'should not llm events when streaming is disabled')
510+
assert.equal(events.length, 0, 'should not create llm events when streaming is disabled')
511+
512+
// Should still create the OPENAI.COMPLETION segment since ai_monitoring is enabled
513+
assertSegments(
514+
tx.trace,
515+
tx.trace.root,
516+
[OPENAI.COMPLETION, [`External/${host}:${port}/responses`]],
517+
{ exact: false }
518+
)
519+
492520
const metrics = agent.metrics.getOrCreateMetric(TRACKING_METRIC)
493521
assert.equal(metrics.callCount > 0, true)
494522
const attributes = tx.trace.attributes.get(DESTINATIONS.TRANS_EVENT)

test/versioned/openai/chat-completions.test.js

Lines changed: 34 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,11 @@ test('chat.completions.create', async (t) => {
3434
ctx.nr.server = server
3535
ctx.nr.agent = helper.instrumentMockedAgent({
3636
ai_monitoring: {
37-
enabled: true
37+
enabled: true,
38+
streaming: {
39+
enabled: true
40+
}
3841
},
39-
streaming: {
40-
enabled: true
41-
}
4242
})
4343
const OpenAI = require('openai')
4444
ctx.nr.client = new OpenAI({
@@ -398,7 +398,7 @@ test('chat.completions.create', async (t) => {
398398
})
399399

400400
test('should not create llm events when ai_monitoring.streaming.enabled is false', (t, end) => {
401-
const { client, agent } = t.nr
401+
const { client, agent, host, port } = t.nr
402402
agent.config.ai_monitoring.streaming.enabled = false
403403
helper.runInTransaction(agent, async (tx) => {
404404
const content = 'Streamed response'
@@ -421,11 +421,16 @@ test('chat.completions.create', async (t) => {
421421
assert.equal(res, expectedRes.streamData)
422422

423423
const events = agent.customEventAggregator.events.toArray()
424-
assert.equal(events.length, 0, 'should not llm events when streaming is disabled')
425-
const metrics = agent.metrics.getOrCreateMetric(TRACKING_METRIC)
426-
assert.equal(metrics.callCount > 0, true)
427-
const attributes = tx.trace.attributes.get(DESTINATIONS.TRANS_EVENT)
428-
assert.equal(attributes.llm, true)
424+
assert.equal(events.length, 0, 'should not create llm events when streaming is disabled')
425+
426+
// Should still create the OPENAI.COMPLETION segment since ai_monitoring is enabled
427+
assertSegments(
428+
tx.trace,
429+
tx.trace.root,
430+
[OPENAI.COMPLETION, [`External/${host}:${port}/chat/completions`]],
431+
{ exact: false }
432+
)
433+
429434
const streamingDisabled = agent.metrics.getOrCreateMetric(
430435
'Supportability/Nodejs/ML/Streaming/Disabled'
431436
)
@@ -478,6 +483,25 @@ test('chat.completions.create', async (t) => {
478483
assert.equal(events.length, 0, 'should not create llm events')
479484
})
480485

486+
await t.test('should not create segment or llm events when ai_monitoring.enabled is false', (t, end) => {
487+
const { client, agent } = t.nr
488+
agent.config.ai_monitoring.enabled = false
489+
helper.runInTransaction(agent, async (tx) => {
490+
await client.chat.completions.create({
491+
messages: [{ role: 'user', content: 'You are a mathematician.' }]
492+
})
493+
494+
const events = agent.customEventAggregator.events.toArray()
495+
assert.equal(events.length, 0, 'should not create llm events when ai_monitoring is disabled')
496+
497+
const children = tx.trace.segments.root.children
498+
assert.equal(children.length, 0, 'should not create OpenAI completion segment')
499+
500+
tx.end()
501+
end()
502+
})
503+
})
504+
481505
await t.test('auth errors should be tracked', async (t) => {
482506
const { client, agent } = t.nr
483507
const plan = tspl(t, { plan: 13 })

test/versioned/openai/embeddings.test.js

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -188,3 +188,23 @@ test('should add llm attribute to transaction', (t, end) => {
188188
end()
189189
})
190190
})
191+
192+
test('should not create segment or llm events when ai_monitoring.enabled is false', (t, end) => {
193+
const { client, agent } = t.nr
194+
agent.config.ai_monitoring.enabled = false
195+
helper.runInTransaction(agent, async (tx) => {
196+
await client.embeddings.create({
197+
input: 'This is an embedding test.',
198+
model: 'text-embedding-ada-002'
199+
})
200+
201+
const events = agent.customEventAggregator.events.toArray()
202+
assert.equal(events.length, 0, 'should not create llm events when ai_monitoring is disabled')
203+
204+
const children = tx.trace.segments.root.children
205+
assert.equal(children.length, 0, 'should not create OpenAI completion segment')
206+
207+
tx.end()
208+
end()
209+
})
210+
})

0 commit comments

Comments
 (0)