File tree 5 files changed +39
-12
lines changed
5 files changed +39
-12
lines changed Original file line number Diff line number Diff line change 19
19
"run:prompt_mgmt" : " npm run build && node dist/src/sample_prompt_mgmt.js" ,
20
20
"run:sample_vision" : " npm run build && node dist/src/sample_vision_prompt.js" ,
21
21
"run:sample_azure" : " npm run build && node dist/src/sample_azure.js" ,
22
+ "run:openai_streaming" : " npm run build && node dist/src/sample_openai_streaming.js" ,
22
23
"run:sampler" : " npm run build && node dist/src/sample_sampler.js" ,
23
24
"run:llamaindex" : " npm run build && node dist/src/sample_llamaindex.js" ,
24
25
"run:pinecone" : " npm run build && node dist/src/sample_pinecone.js" ,
Original file line number Diff line number Diff line change
1
+ import * as traceloop from "@traceloop/node-server-sdk" ;
2
+ import OpenAI from "openai" ;
3
+
4
+ traceloop . initialize ( {
5
+ appName : "sample_openai_streaming" ,
6
+ apiKey : process . env . TRACELOOP_API_KEY ,
7
+ disableBatch : true ,
8
+ } ) ;
9
+ const openai = new OpenAI ( ) ;
10
+
11
+ async function create_joke ( ) {
12
+ const responseStream = await traceloop . withTask (
13
+ { name : "joke_creation" } ,
14
+ ( ) => {
15
+ return openai . chat . completions . create ( {
16
+ model : "gpt-3.5-turbo" ,
17
+ messages : [
18
+ { role : "user" , content : "Tell me a joke about opentelemetry" } ,
19
+ ] ,
20
+ stream : true ,
21
+ } ) ;
22
+ } ,
23
+ ) ;
24
+ let result = "" ;
25
+ for await ( const chunk of responseStream ) {
26
+ result += chunk . choices [ 0 ] ?. delta ?. content || "" ;
27
+ }
28
+ console . log ( result ) ;
29
+ return result ;
30
+ }
31
+
32
+ create_joke ( ) ;
Original file line number Diff line number Diff line change @@ -54,10 +54,6 @@ export const initialize = (options: InitializeOptions) => {
54
54
options . traceloopSyncDevPollingInterval =
55
55
Number ( process . env . TRACELOOP_SYNC_DEV_POLLING_INTERVAL ) || 5 ;
56
56
}
57
-
58
- if ( options . shouldEnrichMetrics === undefined ) {
59
- options . shouldEnrichMetrics = true ;
60
- }
61
57
}
62
58
63
59
validateConfiguration ( options ) ;
Original file line number Diff line number Diff line change @@ -49,12 +49,6 @@ export interface InitializeOptions {
49
49
*/
50
50
logLevel ?: "debug" | "info" | "warn" | "error" ;
51
51
52
- /**
53
- * Whether to enrich metrics with additional data like OpenAI token usage for streaming requests. Optional.
54
- * Defaults to true.
55
- */
56
- shouldEnrichMetrics ?: boolean ;
57
-
58
52
/**
59
53
* Whether to log prompts, completions and embeddings on traces. Optional.
60
54
* Defaults to true.
Original file line number Diff line number Diff line change @@ -56,9 +56,11 @@ const instrumentations: Instrumentation[] = [];
56
56
57
57
export const initInstrumentations = ( ) => {
58
58
const exceptionLogger = ( e : Error ) => Telemetry . getInstance ( ) . logException ( e ) ;
59
+ const enrichTokens =
60
+ ( process . env . TRACELOOP_ENRICH_TOKENS || "true" ) . toLowerCase ( ) === "true" ;
59
61
60
62
openAIInstrumentation = new OpenAIInstrumentation ( {
61
- enrichTokens : _configuration ?. shouldEnrichMetrics ,
63
+ enrichTokens,
62
64
exceptionLogger,
63
65
} ) ;
64
66
instrumentations . push ( openAIInstrumentation ) ;
@@ -109,13 +111,15 @@ export const manuallyInitInstrumentations = (
109
111
instrumentModules : InitializeOptions [ "instrumentModules" ] ,
110
112
) => {
111
113
const exceptionLogger = ( e : Error ) => Telemetry . getInstance ( ) . logException ( e ) ;
114
+ const enrichTokens =
115
+ ( process . env . TRACELOOP_ENRICH_TOKENS || "true" ) . toLowerCase ( ) === "true" ;
112
116
113
117
// Clear the instrumentations array that was initialized by default
114
118
instrumentations . length = 0 ;
115
119
116
120
if ( instrumentModules ?. openAI ) {
117
121
openAIInstrumentation = new OpenAIInstrumentation ( {
118
- enrichTokens : _configuration ?. shouldEnrichMetrics ,
122
+ enrichTokens,
119
123
exceptionLogger,
120
124
} ) ;
121
125
instrumentations . push ( openAIInstrumentation ) ;
You can’t perform that action at this time.
0 commit comments