Skip to content

Commit fef5bf0

Browse files
add provider field to chats (#4959)
1 parent 8e2efc6 commit fef5bf0

File tree

36 files changed

+83
-23
lines changed

36 files changed

+83
-23
lines changed

server/utils/AiProviders/anthropic/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,7 @@ class AnthropicLLM {
172172
outputTps: completionTokens / result.duration,
173173
duration: result.duration,
174174
model: this.model,
175+
provider: this.className,
175176
timestamp: new Date(),
176177
},
177178
};
@@ -194,6 +195,7 @@ class AnthropicLLM {
194195
messages,
195196
runPromptTokenCalculation: false,
196197
modelTag: this.model,
198+
provider: this.className,
197199
});
198200

199201
return measuredStreamRequest;

server/utils/AiProviders/apipie/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -221,6 +221,7 @@ class ApiPieLLM {
221221
(result.output.usage?.completion_tokens || 0) / result.duration,
222222
duration: result.duration,
223223
model: this.model,
224+
provider: this.className,
224225
timestamp: new Date(),
225226
},
226227
};
@@ -242,6 +243,7 @@ class ApiPieLLM {
242243
messages,
243244
runPromptTokenCalculation: true,
244245
modelTag: this.model,
246+
provider: this.className,
245247
});
246248
return measuredStreamRequest;
247249
}

server/utils/AiProviders/azureOpenAi/index.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class AzureOpenAiLLM {
1515
if (!process.env.AZURE_OPENAI_KEY)
1616
throw new Error("No Azure API key was set.");
1717

18+
this.className = "AzureOpenAiLLM";
1819
this.openai = new OpenAI({
1920
apiKey: process.env.AZURE_OPENAI_KEY,
2021
baseURL: AzureOpenAiLLM.formatBaseUrl(process.env.AZURE_OPENAI_ENDPOINT),
@@ -175,6 +176,7 @@ class AzureOpenAiLLM {
175176
outputTps: result.output.usage.completion_tokens / result.duration,
176177
duration: result.duration,
177178
model: this.model,
179+
provider: this.className,
178180
timestamp: new Date(),
179181
},
180182
};
@@ -197,6 +199,7 @@ class AzureOpenAiLLM {
197199
messages,
198200
runPromptTokenCalculation: true,
199201
modelTag: this.model,
202+
provider: this.className,
200203
});
201204

202205
return measuredStreamRequest;

server/utils/AiProviders/bedrock/index.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ class AWSBedrockLLM {
7272
throw new Error(`Required environment variable ${envVar} is not set.`);
7373
}
7474

75+
this.className = "AWSBedrockLLM";
7576
this.model =
7677
modelPreference || process.env.AWS_BEDROCK_LLM_MODEL_PREFERENCE;
7778

@@ -448,6 +449,7 @@ class AWSBedrockLLM {
448449
outputTps: outputTps,
449450
duration: result.duration,
450451
model: this.model,
452+
provider: this.className,
451453
timestamp: new Date(),
452454
},
453455
};
@@ -493,6 +495,7 @@ class AWSBedrockLLM {
493495
messages,
494496
runPromptTokenCalculation: false,
495497
modelTag: this.model,
498+
provider: this.className,
496499
});
497500
return measuredStreamRequest;
498501
} catch (e) {

server/utils/AiProviders/cohere/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,7 @@ class CohereLLM {
125125
outputTps: completionTokens / result.duration,
126126
duration: result.duration,
127127
model: this.model,
128+
provider: this.className,
128129
timestamp: new Date(),
129130
},
130131
};
@@ -143,6 +144,7 @@ class CohereLLM {
143144
messages,
144145
runPromptTokenCalculation: false,
145146
modelTag: this.model,
147+
provider: this.className,
146148
});
147149

148150
return measuredStreamRequest;

server/utils/AiProviders/cometapi/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,7 @@ class CometApiLLM {
226226
outputTps: result.output.usage.completion_tokens / result.duration,
227227
duration: result.duration,
228228
model: this.model,
229+
provider: this.className,
229230
timestamp: new Date(),
230231
},
231232
};
@@ -247,6 +248,7 @@ class CometApiLLM {
247248
messages,
248249
runPromptTokenCalculation: true,
249250
modelTag: this.model,
251+
provider: this.className,
250252
});
251253
return measuredStreamRequest;
252254
}

server/utils/AiProviders/deepseek/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ class DeepSeekLLM {
131131
outputTps: result.output.usage.completion_tokens / result.duration,
132132
duration: result.duration,
133133
model: this.model,
134+
provider: this.className,
134135
timestamp: new Date(),
135136
},
136137
};
@@ -152,6 +153,7 @@ class DeepSeekLLM {
152153
messages,
153154
runPromptTokenCalculation: false,
154155
modelTag: this.model,
156+
provider: this.className,
155157
});
156158

157159
return measuredStreamRequest;

server/utils/AiProviders/dellProAiStudio/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -166,6 +166,7 @@ class DellProAiStudioLLM {
166166
outputTps: result.output.usage?.completion_tokens / result.duration,
167167
duration: result.duration,
168168
model: this.model,
169+
provider: this.className,
169170
timestamp: new Date(),
170171
},
171172
};
@@ -187,6 +188,7 @@ class DellProAiStudioLLM {
187188
messages,
188189
runPromptTokenCalculation: true,
189190
modelTag: this.model,
191+
provider: this.className,
190192
});
191193
return measuredStreamRequest;
192194
}

server/utils/AiProviders/dockerModelRunner/index.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ class DockerModelRunnerLLM {
2626
if (!process.env.DOCKER_MODEL_RUNNER_LLM_MODEL_PREF && !modelPreference)
2727
throw new Error("No Docker Model Runner Model Pref was set.");
2828

29+
this.className = "DockerModelRunnerLLM";
2930
this.dmr = new OpenAIApi({
3031
baseURL: parseDockerModelRunnerEndpoint(
3132
process.env.DOCKER_MODEL_RUNNER_BASE_PATH
@@ -173,6 +174,7 @@ class DockerModelRunnerLLM {
173174
outputTps: result.output.usage?.completion_tokens / result.duration,
174175
duration: result.duration,
175176
model: this.model,
177+
provider: this.className,
176178
timestamp: new Date(),
177179
},
178180
};
@@ -194,6 +196,7 @@ class DockerModelRunnerLLM {
194196
messages,
195197
runPromptTokenCalculation: true,
196198
modelTag: this.model,
199+
provider: this.className,
197200
});
198201
return measuredStreamRequest;
199202
}

server/utils/AiProviders/fireworksAi/index.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,7 @@ class FireworksAiLLM {
164164
outputTps: result.output.usage.completion_tokens / result.duration,
165165
duration: result.duration,
166166
model: this.model,
167+
provider: this.className,
167168
timestamp: new Date(),
168169
},
169170
};
@@ -185,6 +186,7 @@ class FireworksAiLLM {
185186
messages,
186187
runPromptTokenCalculation: false,
187188
modelTag: this.model,
189+
provider: this.className,
188190
});
189191
return measuredStreamRequest;
190192
}

0 commit comments

Comments
 (0)