Open
Description
Problem
Its not having an Ollama module.
I realize that my POC here needs to have a discussion of design and features, but its a fun litte thing
Solution
Make a module, here's a prototype of Ollama working with Testcontainers in LinqPad 8:
Deps the nugets Testcontainers and OllamaSharp
var container = new ContainerBuilder()
.WithName("ollama-container")
.WithImage("ollama/ollama")
.WithPortBinding(11434, 11434)
.WithVolumeMount("ollama", "/root/.ollama")
.WithExposedPort(11434)
.Build();
var keepAlive = true;
container.Started += async (sender, args) =>
{
Console.WriteLine("Container started");
try
{
Console.WriteLine("Start ollama service");
await container.ExecAsync(new List<string>() {
"ollama run llama2"
});
var result = await RunAsync();
result.Dump();
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
keepAlive = false;
};
await container.StartAsync();
while (!QueryCancelToken.IsCancellationRequested && keepAlive)
{
await Task.Delay(1000);
}
var logs = await container.GetLogsAsync();
await container.StopAsync();
await container.DisposeAsync();
async Task<string> RunAsync()
{
try
{
var client = new OllamaApiClient("http://localhost:11434/api/generate", "llama2");
ChatRequest request = new ChatRequest();
request.Messages ??= new List<Message>();
request.Messages.Add(new Message(ChatRole.User, "Write me a dad-joke about cats please?", Enumerable.Empty<string>().ToArray()));
request.Model = "llama2";
request.Stream = false;
var messages = new List<ChatResponseStream>();
var responses = await client.SendChat(request, x => messages.Add(x));
return string.Join("\n\n", responses.Select(r => r.Content));
}
catch (Exception ex)
{
return ex.ToString();
}
}
Benefit
Its really cool
Alternatives
To be less cool
Would you like to help contributing this enhancement?
Yes
Activity