-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathOpenAiCompatibleProvider.cs
More file actions
138 lines (119 loc) · 5.15 KB
/
OpenAiCompatibleProvider.cs
File metadata and controls
138 lines (119 loc) · 5.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
using System.ClientModel;
using System.Runtime.CompilerServices;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using OpenAI;
using SharpClaw.Code.Infrastructure.Abstractions;
using SharpClaw.Code.Providers.Abstractions;
using SharpClaw.Code.Providers.Configuration;
using SharpClaw.Code.Providers.Internal;
using SharpClaw.Code.Providers.Models;
using SharpClaw.Code.Protocol.Models;
namespace SharpClaw.Code.Providers;
/// <summary>
/// Streams responses from an OpenAI-compatible chat completions API using Microsoft.Extensions.AI and the OpenAI .NET SDK.
/// </summary>
public sealed class OpenAiCompatibleProvider(
IOptions<OpenAiCompatibleProviderOptions> options,
ISystemClock systemClock,
ILogger<OpenAiCompatibleProvider> logger) : IModelProvider
{
private readonly OpenAiCompatibleProviderOptions _options = options.Value;
private OpenAIClient? _cachedOpenAiClient;
internal const string RuntimeProfileMetadataKey = "openai-compatible.profile";
/// <inheritdoc />
public string ProviderName => _options.ProviderName;
/// <inheritdoc />
public Task<AuthStatus> GetAuthStatusAsync(CancellationToken cancellationToken)
=> Task.FromResult(Internal.ProviderAuthStatusFactory.FromConfiguration(
ProviderName,
_options.ApiKey,
_options.AuthMode,
_options.LocalRuntimes.Values.Any(static runtime => runtime.AuthMode != ProviderAuthMode.ApiKey)));
/// <inheritdoc />
public Task<ProviderStreamHandle> StartStreamAsync(ProviderRequest request, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
logger.LogInformation("Starting OpenAI-compatible MEAI stream for request {RequestId}.", request.Id);
return Task.FromResult(new ProviderStreamHandle(request, StreamEventsAsync(request, cancellationToken)));
}
private async IAsyncEnumerable<ProviderEvent> StreamEventsAsync(
ProviderRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
var profile = ResolveProfile(request.Metadata);
var modelId = Internal.ProviderHttpHelpers.ResolveModelOrDefault(
request.Model,
profile?.DefaultChatModel ?? _options.DefaultModel);
var openAiClient = GetOrCreateOpenAiClient(profile);
var nativeClient = openAiClient.GetChatClient(modelId);
using var chatClient = nativeClient.AsIChatClient();
var messages = request.Messages is not null
? OpenAiMessageBuilder.BuildMessages(request.Messages, request.SystemPrompt)
: BuildChatMessages(request);
var chatOptions = new ChatOptions();
if (request.Temperature is { } temp)
{
chatOptions.Temperature = (float)temp;
}
if (request.MaxTokens is { } maxTokens)
{
chatOptions.MaxOutputTokens = maxTokens;
}
if (request.Tools is { Count: > 0 } toolDefs)
{
chatOptions.Tools = OpenAiMessageBuilder.BuildTools(toolDefs);
}
var updates = chatClient.GetStreamingResponseAsync(messages, chatOptions, cancellationToken);
await foreach (var ev in OpenAiMeaiStreamAdapter.AdaptAsync(updates, request.Id, systemClock, cancellationToken)
.WithCancellation(cancellationToken)
.ConfigureAwait(false))
{
yield return ev;
}
}
private OpenAIClient GetOrCreateOpenAiClient(LocalRuntimeProfileOptions? profile)
{
if (profile is null && _cachedOpenAiClient is not null)
{
return _cachedOpenAiClient;
}
var openAiOptions = new OpenAIClientOptions();
var normalized = Internal.ProviderHttpHelpers.NormalizeBaseUrl(profile?.BaseUrl ?? _options.BaseUrl);
if (normalized is not null)
{
openAiOptions.Endpoint = new Uri(normalized);
}
var apiKey = profile?.ApiKey ?? _options.ApiKey ?? "local-runtime";
var credential = new ApiKeyCredential(apiKey);
var client = new OpenAIClient(credential, openAiOptions);
if (profile is null)
{
_cachedOpenAiClient = client;
}
return client;
}
private static List<Microsoft.Extensions.AI.ChatMessage> BuildChatMessages(ProviderRequest request)
{
var messages = new List<Microsoft.Extensions.AI.ChatMessage>();
if (!string.IsNullOrWhiteSpace(request.SystemPrompt))
{
messages.Add(new Microsoft.Extensions.AI.ChatMessage(ChatRole.System, request.SystemPrompt));
}
messages.Add(new Microsoft.Extensions.AI.ChatMessage(ChatRole.User, request.Prompt));
return messages;
}
private LocalRuntimeProfileOptions? ResolveProfile(IReadOnlyDictionary<string, string>? metadata)
{
if (metadata is null
|| !metadata.TryGetValue(RuntimeProfileMetadataKey, out var profileName)
|| string.IsNullOrWhiteSpace(profileName))
{
return null;
}
return _options.LocalRuntimes.TryGetValue(profileName, out var profile)
? profile
: null;
}
}