Skip to content

Commit 9d8ec07

Browse files
committed
wip: generateContentStreamInternal
1 parent dfe4687 commit 9d8ec07

File tree

1 file changed

+49
-27
lines changed

1 file changed

+49
-27
lines changed

lib/instrumentation/@google/genai.js

Lines changed: 49 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ function addLlmMeta({ agent, transaction }) {
6161
* @param {Shim} params.shim the current shim instance
6262
* @param {TraceSegment} params.segment active segment from chat completion
6363
* @param {object} params.request chat completion params
64-
* @param {object} params.response chat completion response
64+
* @param {GenerateContentResponse} params.response chat completion response
6565
* @param {boolean} [params.err] err if it exists
6666
* @param {Transaction} params.transaction active transaction
6767
*/
@@ -127,6 +127,7 @@ module.exports = function initialize(agent, googleGenAi, moduleName, shim) {
127127
shim.logger.debug('config.ai_monitoring.enabled is set to false.')
128128
return
129129
}
130+
130131
// Update the tracking metric name with the version of the library
131132
// being instrumented. We do not have access to the version when
132133
// initially declaring the variable.
@@ -159,10 +160,6 @@ module.exports = function initialize(agent, googleGenAi, moduleName, shim) {
159160
})
160161

161162
const models = googleGenAi.Models
162-
// TODO: why is generateContentInternal and generateContentStreamInternal
163-
// exposed but not generateContent or generateContentStream?
164-
165-
// TODO: look at computeTokens and countTokens?
166163

167164
/**
168165
* Instruments chat completion creation
@@ -192,14 +189,51 @@ module.exports = function initialize(agent, googleGenAi, moduleName, shim) {
192189
}
193190
)
194191

195-
/*
196-
* Chat completions create can return a stream once promise resolves
197-
* This wraps the iterator which is a generator function
198-
* We will call the original iterator, intercept chunks and yield
199-
* to the original. On complete we will construct the new message object
200-
* with what we have seen in the stream and create the chat completion
201-
* messages
202-
*/
192+
// Wrap the stream to interept iterator.next calls
193+
shim.wrap(models.prototype, 'generateContentStreamInternal', function wrapGenerateContentStream(shim, func) {
194+
return async function wrappedGenerateContentStream(...args) {
195+
if (!agent.config.ai_monitoring.streaming.enabled) {
196+
shim.logger.warn(
197+
'`ai_monitoring.streaming.enabled` is set to `false`, stream will not be instrumented.'
198+
)
199+
agent.metrics.getOrCreateMetric(AI.STREAMING_DISABLED).incrementCallCount()
200+
return
201+
}
202+
203+
const iterator = await func.apply(this, args)
204+
205+
if (iterator && typeof iterator[Symbol.asyncIterator] === 'function') {
206+
const originalNext = iterator.next
207+
let err
208+
let result
209+
210+
iterator.next = async function wrappedNext(...nextArgs) {
211+
try {
212+
result = await originalNext.apply(iterator, nextArgs)
213+
} catch (streamErr) {
214+
err = streamErr
215+
throw err
216+
} finally {
217+
// TODO: How to integrate this with RecorderSpec?
218+
// segment.touch()
219+
recordChatCompletionMessages({
220+
agent: shim.agent,
221+
shim,
222+
// segment,
223+
// transaction,
224+
// request,
225+
response: result.value,
226+
// err
227+
})
228+
}
229+
230+
return result
231+
}
232+
}
233+
234+
return iterator
235+
}
236+
})
203237

204238
// TODO: might need to instrument processAfcStream too
205239
// https://github.com/googleapis/js-genai/blob/cd0454862b4a0251d2606eeca8500b3b76004944/src/models.ts#L183
@@ -212,23 +246,11 @@ module.exports = function initialize(agent, googleGenAi, moduleName, shim) {
212246
agent.metrics.getOrCreateMetric(AI.STREAMING_DISABLED).incrementCallCount()
213247
return
214248
}
215-
const [request] = args
216249

217250
return new RecorderSpec({
218251
name: GEMINI.COMPLETION,
219252
promise: true,
220-
after({ error: err, result: response, segment, transaction }) {
221-
// TODO: actually need to handle the stream
222-
recordChatCompletionMessages({
223-
agent,
224-
shim,
225-
segment,
226-
transaction,
227-
request,
228-
response,
229-
err
230-
})
231-
253+
after({ result: response, segment, transaction }) {
232254
addLlmMeta({ agent, transaction })
233255
}
234256
})
@@ -277,7 +299,7 @@ module.exports = function initialize(agent, googleGenAi, moduleName, shim) {
277299
}
278300

279301
// cleanup keys on response before returning to user code
280-
// delete response.headers
302+
delete response.headers
281303
}
282304
})
283305
}

0 commit comments

Comments
 (0)