Skip to content

Commit 143bc66

Browse files
authored
fix(sdk): properly initialize token enrich value for instrumentations (#384)
1 parent 1e404b7 commit 143bc66

File tree

5 files changed

+39
-12
lines changed

5 files changed

+39
-12
lines changed

packages/sample-app/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
"run:prompt_mgmt": "npm run build && node dist/src/sample_prompt_mgmt.js",
2020
"run:sample_vision": "npm run build && node dist/src/sample_vision_prompt.js",
2121
"run:sample_azure": "npm run build && node dist/src/sample_azure.js",
22+
"run:openai_streaming": "npm run build && node dist/src/sample_openai_streaming.js",
2223
"run:sampler": "npm run build && node dist/src/sample_sampler.js",
2324
"run:llamaindex": "npm run build && node dist/src/sample_llamaindex.js",
2425
"run:pinecone": "npm run build && node dist/src/sample_pinecone.js",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import * as traceloop from "@traceloop/node-server-sdk";
2+
import OpenAI from "openai";
3+
4+
traceloop.initialize({
5+
appName: "sample_openai_streaming",
6+
apiKey: process.env.TRACELOOP_API_KEY,
7+
disableBatch: true,
8+
});
9+
const openai = new OpenAI();
10+
11+
async function create_joke() {
12+
const responseStream = await traceloop.withTask(
13+
{ name: "joke_creation" },
14+
() => {
15+
return openai.chat.completions.create({
16+
model: "gpt-3.5-turbo",
17+
messages: [
18+
{ role: "user", content: "Tell me a joke about opentelemetry" },
19+
],
20+
stream: true,
21+
});
22+
},
23+
);
24+
let result = "";
25+
for await (const chunk of responseStream) {
26+
result += chunk.choices[0]?.delta?.content || "";
27+
}
28+
console.log(result);
29+
return result;
30+
}
31+
32+
create_joke();

packages/traceloop-sdk/src/lib/configuration/index.ts

-4
Original file line numberDiff line numberDiff line change
@@ -54,10 +54,6 @@ export const initialize = (options: InitializeOptions) => {
5454
options.traceloopSyncDevPollingInterval =
5555
Number(process.env.TRACELOOP_SYNC_DEV_POLLING_INTERVAL) || 5;
5656
}
57-
58-
if (options.shouldEnrichMetrics === undefined) {
59-
options.shouldEnrichMetrics = true;
60-
}
6157
}
6258

6359
validateConfiguration(options);

packages/traceloop-sdk/src/lib/interfaces/initialize-options.interface.ts

-6
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,6 @@ export interface InitializeOptions {
4949
*/
5050
logLevel?: "debug" | "info" | "warn" | "error";
5151

52-
/**
53-
* Whether to enrich metrics with additional data like OpenAI token usage for streaming requests. Optional.
54-
* Defaults to true.
55-
*/
56-
shouldEnrichMetrics?: boolean;
57-
5852
/**
5953
* Whether to log prompts, completions and embeddings on traces. Optional.
6054
* Defaults to true.

packages/traceloop-sdk/src/lib/tracing/index.ts

+6-2
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,11 @@ const instrumentations: Instrumentation[] = [];
5656

5757
export const initInstrumentations = () => {
5858
const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
59+
const enrichTokens =
60+
(process.env.TRACELOOP_ENRICH_TOKENS || "true").toLowerCase() === "true";
5961

6062
openAIInstrumentation = new OpenAIInstrumentation({
61-
enrichTokens: _configuration?.shouldEnrichMetrics,
63+
enrichTokens,
6264
exceptionLogger,
6365
});
6466
instrumentations.push(openAIInstrumentation);
@@ -109,13 +111,15 @@ export const manuallyInitInstrumentations = (
109111
instrumentModules: InitializeOptions["instrumentModules"],
110112
) => {
111113
const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
114+
const enrichTokens =
115+
(process.env.TRACELOOP_ENRICH_TOKENS || "true").toLowerCase() === "true";
112116

113117
// Clear the instrumentations array that was initialized by default
114118
instrumentations.length = 0;
115119

116120
if (instrumentModules?.openAI) {
117121
openAIInstrumentation = new OpenAIInstrumentation({
118-
enrichTokens: _configuration?.shouldEnrichMetrics,
122+
enrichTokens,
119123
exceptionLogger,
120124
});
121125
instrumentations.push(openAIInstrumentation);

0 commit comments

Comments
 (0)