Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 80b9bd8

Browse files
authoredMay 1, 2025··
Merge branch 'main' into devtoolsplugin-nobrowser
2 parents 7a4dad1 + 55bf52b commit 80b9bd8

File tree

8 files changed

+644
-175
lines changed

8 files changed

+644
-175
lines changed
 

‎dev-proxy-abstractions/LanguageModel/OllamaLanguageModelClient.cs

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,13 @@ private async Task<bool> IsEnabledInternalAsync()
134134
);
135135
_logger.LogDebug("Response status: {response}", response.StatusCode);
136136

137+
if (!response.IsSuccessStatusCode)
138+
{
139+
var errorResponse = await response.Content.ReadAsStringAsync();
140+
_logger.LogDebug("LM error: {errorResponse}", errorResponse);
141+
return null;
142+
}
143+
137144
var res = await response.Content.ReadFromJsonAsync<OllamaLanguageModelCompletionResponse>();
138145
if (res is null)
139146
{
@@ -173,7 +180,7 @@ private async Task<bool> IsEnabledInternalAsync()
173180
return null;
174181
}
175182

176-
if (_configuration.CacheResponses && _cacheChatCompletion.TryGetValue(messages, out var cachedResponse))
183+
if (_configuration.CacheResponses && _cacheChatCompletion.TryGetCacheValue(messages, out var cachedResponse))
177184
{
178185
_logger.LogDebug("Returning cached response for message: {lastMessage}", messages.Last().Content);
179186
return cachedResponse;
@@ -221,9 +228,17 @@ private async Task<bool> IsEnabledInternalAsync()
221228
);
222229
_logger.LogDebug("Response: {response}", response.StatusCode);
223230

231+
if (!response.IsSuccessStatusCode)
232+
{
233+
var errorResponse = await response.Content.ReadAsStringAsync();
234+
_logger.LogDebug("LM error: {errorResponse}", errorResponse);
235+
return null;
236+
}
237+
224238
var res = await response.Content.ReadFromJsonAsync<OllamaLanguageModelChatCompletionResponse>();
225239
if (res is null)
226240
{
241+
_logger.LogDebug("Response: null");
227242
return res;
228243
}
229244

@@ -240,15 +255,15 @@ private async Task<bool> IsEnabledInternalAsync()
240255

241256
internal static class OllamaCacheChatCompletionExtensions
242257
{
243-
public static OllamaLanguageModelChatCompletionMessage[]? GetKey(
244-
this Dictionary<OllamaLanguageModelChatCompletionMessage[], OllamaLanguageModelChatCompletionResponse> cache,
258+
public static ILanguageModelChatCompletionMessage[]? GetKey(
259+
this Dictionary<ILanguageModelChatCompletionMessage[], OllamaLanguageModelChatCompletionResponse> cache,
245260
ILanguageModelChatCompletionMessage[] messages)
246261
{
247262
return cache.Keys.FirstOrDefault(k => k.SequenceEqual(messages));
248263
}
249264

250-
public static bool TryGetValue(
251-
this Dictionary<OllamaLanguageModelChatCompletionMessage[], OllamaLanguageModelChatCompletionResponse> cache,
265+
public static bool TryGetCacheValue(
266+
this Dictionary<ILanguageModelChatCompletionMessage[], OllamaLanguageModelChatCompletionResponse> cache,
252267
ILanguageModelChatCompletionMessage[] messages, out OllamaLanguageModelChatCompletionResponse? value)
253268
{
254269
var key = cache.GetKey(messages);

‎dev-proxy-abstractions/LanguageModel/OpenAILanguageModelClient.cs

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ private async Task<bool> IsEnabledInternalAsync()
125125
return null;
126126
}
127127

128-
if (_configuration.CacheResponses && _cacheChatCompletion.TryGetValue(messages, out var cachedResponse))
128+
if (_configuration.CacheResponses && _cacheChatCompletion.TryGetCacheValue(messages, out var cachedResponse))
129129
{
130130
_logger.LogDebug("Returning cached response for message: {lastMessage}", messages.Last().Content);
131131
return cachedResponse;
@@ -173,6 +173,13 @@ private async Task<bool> IsEnabledInternalAsync()
173173
var response = await client.PostAsJsonAsync(url, payload);
174174
_logger.LogDebug("Response: {response}", response.StatusCode);
175175

176+
if (!response.IsSuccessStatusCode)
177+
{
178+
var errorResponse = await response.Content.ReadAsStringAsync();
179+
_logger.LogDebug("LM error: {errorResponse}", errorResponse);
180+
return null;
181+
}
182+
176183
var res = await response.Content.ReadFromJsonAsync<OpenAIChatCompletionResponse>();
177184
if (res is null)
178185
{
@@ -192,15 +199,15 @@ private async Task<bool> IsEnabledInternalAsync()
192199

193200
internal static class OpenAICacheChatCompletionExtensions
194201
{
195-
public static OpenAIChatCompletionMessage[]? GetKey(
196-
this Dictionary<OpenAIChatCompletionMessage[], OpenAIChatCompletionResponse> cache,
202+
public static ILanguageModelChatCompletionMessage[]? GetKey(
203+
this Dictionary<ILanguageModelChatCompletionMessage[], OpenAIChatCompletionResponse> cache,
197204
ILanguageModelChatCompletionMessage[] messages)
198205
{
199206
return cache.Keys.FirstOrDefault(k => k.SequenceEqual(messages));
200207
}
201208

202-
public static bool TryGetValue(
203-
this Dictionary<OpenAIChatCompletionMessage[], OpenAIChatCompletionResponse> cache,
209+
public static bool TryGetCacheValue(
210+
this Dictionary<ILanguageModelChatCompletionMessage[], OpenAIChatCompletionResponse> cache,
204211
ILanguageModelChatCompletionMessage[] messages, out OpenAIChatCompletionResponse? value)
205212
{
206213
var key = cache.GetKey(messages);

‎dev-proxy-plugins/Http.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,7 @@ internal static class Http
234234
"secret",
235235
"x-secret",
236236
"access-key",
237-
"api-key",
238-
"apikey"
237+
"apikey",
238+
"code"
239239
];
240240
}

0 commit comments

Comments
 (0)
Please sign in to comment.