Skip to content

Commit 29c9be5

Browse files
authored
Merge pull request microsoft#396 from microsoft/alzollin/removedCustomIChatClient
Removed custom IChatClient.
2 parents 2bd7876 + d087015 commit 29c9be5

File tree

1 file changed

+1
-34
lines changed

1 file changed

+1
-34
lines changed

AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs

Lines changed: 1 addition & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -20,39 +20,6 @@ internal static class OnnxRuntimeGenAIChatClientFactory
2020

2121
private static readonly SemaphoreSlim _createSemaphore = new(1, 1);
2222

23-
// This is a workaround to ensure that the Config object is disposed
24-
// Remove after https://github.com/microsoft/onnxruntime-genai/pull/1364 is merged.
25-
private sealed class ConfigDisposingOnnxRuntimeGenAIChatClient : IChatClient, IDisposable
26-
{
27-
private Config _config;
28-
private IChatClient _innerChatClient;
29-
30-
public ConfigDisposingOnnxRuntimeGenAIChatClient(Config config, OnnxRuntimeGenAIChatClientOptions options)
31-
{
32-
_config = config;
33-
#pragma warning disable CA2000 // Dispose objects before losing scope
34-
_innerChatClient = new OnnxRuntimeGenAIChatClient(new Model(config), true, options);
35-
#pragma warning restore CA2000 // Dispose objects before losing scope
36-
}
37-
38-
public Task<ChatResponse> GetResponseAsync(
39-
IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
40-
_innerChatClient.GetResponseAsync(messages, options, cancellationToken);
41-
42-
public IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
43-
IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
44-
_innerChatClient.GetStreamingResponseAsync(messages, options, cancellationToken);
45-
46-
object? IChatClient.GetService(Type serviceType, object? serviceKey) =>
47-
_innerChatClient.GetService(serviceType, serviceKey);
48-
49-
public void Dispose()
50-
{
51-
_innerChatClient.Dispose();
52-
_config.Dispose();
53-
}
54-
}
55-
5623
public static async Task<IChatClient?> CreateAsync(string modelDir, LlmPromptTemplate? template = null, string? provider = null, CancellationToken cancellationToken = default)
5724
{
5825
var options = new OnnxRuntimeGenAIChatClientOptions
@@ -79,7 +46,7 @@ await Task.Run(
7946
config.AppendProvider(provider);
8047
}
8148

82-
chatClient = new ConfigDisposingOnnxRuntimeGenAIChatClient(config, options);
49+
chatClient = new OnnxRuntimeGenAIChatClient(config, true, options);
8350
cancellationToken.ThrowIfCancellationRequested();
8451
},
8552
cancellationToken);

0 commit comments

Comments
 (0)