Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
aca1f88
prevent endless loop if llm creation fails
amakropoulos Jan 13, 2026
81bafce
fallback to slow data movement if current doesnt work
amakropoulos Jan 13, 2026
4c25d05
bundle httplib in static lib
amakropoulos Jan 14, 2026
e4083ba
adjust tests
amakropoulos Jan 14, 2026
60d3a6a
use NDK 27
amakropoulos Jan 15, 2026
2524613
download test models only when needed
amakropoulos Jan 15, 2026
5b5f243
remove build dir before zipping artifacts
amakropoulos Jan 15, 2026
cce6016
implement LLMService builder for easier access to the arguments
amakropoulos Jan 15, 2026
3213475
improve the cpp examples
amakropoulos Jan 15, 2026
f433cf7
documentation WIP
amakropoulos Jan 15, 2026
32dfe3d
C++ documentation
amakropoulos Jan 15, 2026
d0f1043
c++ guide small modifications
amakropoulos Jan 15, 2026
3f8b012
add mobile target frameworks
amakropoulos Jan 16, 2026
348f2dd
bump version to 2.0.1
amakropoulos Jan 16, 2026
e67f02e
remove static_cast
amakropoulos Jan 16, 2026
179130f
c++ guide changes
amakropoulos Jan 16, 2026
5adac80
c# guide
amakropoulos Jan 16, 2026
5edfcfc
remove llamafile license
amakropoulos Jan 19, 2026
1a1b2ee
c# dotnet build tests
amakropoulos Jan 19, 2026
110503e
fix c# runtime building
amakropoulos Jan 19, 2026
a575067
disable all archs if one selected
amakropoulos Jan 19, 2026
cd16c03
allow to disable cpu and gpu based archs
amakropoulos Jan 19, 2026
b7b870c
chmod servers
amakropoulos Jan 19, 2026
7d4e025
implement CPU/GPU disabling in c#
amakropoulos Jan 19, 2026
c9d64bf
allow for LLAMALIB flags in cmake
amakropoulos Jan 19, 2026
8b5b9ed
LLAMALIB support, no component find_package support
amakropoulos Jan 20, 2026
5201762
replace LlamaLib with LLAMALIB in examples
amakropoulos Jan 20, 2026
bfae1b3
replace LlamaLib with LLAMALIB in workflows
amakropoulos Jan 20, 2026
74db1f7
add c# dotnet test yaml
amakropoulos Jan 20, 2026
e8a0904
replace LlamaLib with LLAMALIB in tests
amakropoulos Jan 20, 2026
80ea298
update cmake/c# flags
amakropoulos Jan 20, 2026
1fab88a
adapt c# examples
amakropoulos Jan 20, 2026
12ed806
remove deprecated md
amakropoulos Jan 20, 2026
8a48d59
add missing line in update
amakropoulos Jan 20, 2026
65204d7
bump llama.cpp to b7779 (365a3e8c3)
amakropoulos Jan 20, 2026
766d2ee
bump llama.cpp to b7777 (959ecf7)
amakropoulos Jan 20, 2026
0ca1797
include arg.h
amakropoulos Jan 20, 2026
bd0f252
script to change version
amakropoulos Jan 20, 2026
39c5c4d
bump version to 2.0.2
amakropoulos Jan 20, 2026
af61ee9
update Readme
amakropoulos Jan 28, 2026
e276530
fix preprocessor directives for mobile in Unity
amakropoulos Jan 28, 2026
9036ea7
update readme
amakropoulos Jan 28, 2026
39bfcd9
update csproj
amakropoulos Jan 28, 2026
09bda00
dont install openssl for windows, use default
amakropoulos Jan 28, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/doxygen/Doxyfile
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ PROJECT_NAME = "LlamaLib"
# could be handy for archiving the generated documentation or if some version
# control system is used.

PROJECT_NUMBER = v2.0.0
PROJECT_NUMBER = v2.0.2

# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
Expand Down
53 changes: 53 additions & 0 deletions .github/scripts/change_version.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
#!/bin/bash

set -e

if [ $# -eq 0 ]; then
echo "Usage: $0 <new_version>"
exit 1
fi

NEW_VERSION=$1
CURRENT_VERSION=$(cat ./VERSION | tr -d '[:space:]')

echo "Updating version from $CURRENT_VERSION to $NEW_VERSION"

# Update VERSION file
echo "$NEW_VERSION" > ./VERSION

# Files to update
FILES=(
"./tests/csharp/LlamaLib.Test.csproj"
"./examples/csharp/agent/LlamaLibExamples.csproj"
"./examples/csharp/remote_client/LlamaLibExamples.csproj"
"./examples/csharp/basic_embeddings/LlamaLibExamples.csproj"
"./examples/csharp/basic/LlamaLibExamples.csproj"
"./.github/tests/csharp-dotnet/Program.csproj"
"./csharp/LlamaLib.csproj"
"./csharp/LlamaLib.targets"
"./cmake/LlamaLibConfigVersion.cmake"
"./.github/doxygen/Doxyfile"
)

# Patterns to replace (old -> new)
PATTERNS=(
"Version=\"${CURRENT_VERSION}\"|Version=\"${NEW_VERSION}\""
"<PackageVersion>${CURRENT_VERSION}<\/PackageVersion>|<PackageVersion>${NEW_VERSION}<\/PackageVersion>"
"<LlamaLibVersion>${CURRENT_VERSION}<\/LlamaLibVersion>|<LlamaLibVersion>${NEW_VERSION}<\/LlamaLibVersion>"
"set(PACKAGE_VERSION \"${CURRENT_VERSION}\")|set(PACKAGE_VERSION \"${NEW_VERSION}\")"
"PROJECT_NUMBER = v${CURRENT_VERSION}|PROJECT_NUMBER = v${NEW_VERSION}"
)

# Apply all patterns to all files
for file in "${FILES[@]}"; do
if [ -f "$file" ]; then
for pattern in "${PATTERNS[@]}"; do
OLD=$(echo "$pattern" | cut -d'|' -f1)
NEW=$(echo "$pattern" | cut -d'|' -f2)
sed -i "s/${OLD}/${NEW}/g" "$file" 2>/dev/null || sed -i '' "s/${OLD}/${NEW}/g" "$file" 2>/dev/null || true
done
echo "Updated: $file"
fi
done

echo "Done!"
2 changes: 1 addition & 1 deletion .github/scripts/release.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ mkdir servers
for arch in win-x64_noavx linux-x64_noavx osx-arm64_no-acc osx-x64_no-acc;do
unzip -o $arch.zip/$arch.zip -d servers llamalib*server*
done
chmod a+x servers/*

# extract runtimes
for d in *.zip;do
Expand All @@ -31,7 +32,6 @@ cp $root_dir/third_party/llama.cpp/vendor/cpp-httplib/httplib.h include/
# licenses
mkdir -p third_party_licenses
cp $root_dir/third_party/llama.cpp/LICENSE third_party_licenses/llama.cpp.LICENSE.txt
curl -o third_party_licenses/llamafile.LICENSE.txt -L https://raw.githubusercontent.com/Mozilla-Ocho/llamafile/main/LICENSE

# copy files from repo
cp $root_dir/LICENSE ./
Expand Down
1 change: 1 addition & 0 deletions .github/scripts/upgrade_llama.cpp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ patch -p1 < $BASE_DIR/patches/tinyBLAS.patch
save_patches

# check server
commit=`git rev-parse HEAD`
git log $version..$commit --format="%h" tools/server/server.cpp
git diff $version..$commit tools/server/server.cpp

Expand Down
Binary file not shown.
41 changes: 41 additions & 0 deletions .github/tests/csharp-dotnet/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
using System;
using UndreamAI.LlamaLib;
using Newtonsoft.Json.Linq;

class Program
{
static string previousText = "";
static void StreamingCallback(string text)
{
Console.Write(text.Substring(previousText.Length));
previousText = text;
}


static async Task Main()
{
LLMService llm = new LLMService("model.gguf", 1, -1, 5);
llm.Start();
llm.StartServer("0.0.0.0", 13333);

// Create agent with system prompt
LLMAgent agent1 = new LLMAgent(llm, "You are a helpful AI assistant. Be concise and friendly.");

// With local LLMClient
LLMClient localClient = new LLMClient(llm);
LLMAgent agent2 = new LLMAgent(localClient, "You are a helpful assistant.");

// With remote LLMClient
LLMClient remoteClient = new LLMClient("http://localhost", 13333);
LLMAgent agent3 = new LLMAgent(remoteClient, "You are a helpful assistant.");

// Async Interact with the agent (streaming)
foreach (LLMAgent agent in new LLMAgent[] { agent1, agent2, agent3 })
{
agent.SetCompletionParameters(new JObject { { "n_predict", 20 } });
string response = await agent.ChatAsync("What is AI?");
Console.WriteLine(response);
}

}
}
14 changes: 14 additions & 0 deletions .github/tests/csharp-dotnet/Program.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="LlamaLib" Version="2.0.2" />
</ItemGroup>

</Project>
Loading
Loading