Skip to content

Commit 38ceeb2

Browse files
committed
feat: implement UniFFI mobile bindings for iOS and Android
Add localgpt-mobile crate with full UniFFI proc-macro integration: - LocalGPTClient object with chat, memory, session, and model APIs - AgentHandle in core for thread-safe Agent access across threads - Paths::from_root() and Config::load_from_dir() for mobile sandboxes - Feature-gate FastEmbedProvider behind `embeddings-local` so mobile builds can exclude the large fastembed dependency - iOS build script (XCFramework + Swift bindings via uniffi-bindgen) - Android Gradle config (cargo-ndk + Kotlin bindings)
1 parent d45b138 commit 38ceeb2

11 files changed

Lines changed: 839 additions & 31 deletions

File tree

Cargo.lock

Lines changed: 297 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/core/src/agent/mod.rs

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1218,6 +1218,101 @@ impl Agent {
12181218
}
12191219
}
12201220

1221+
// ---------------------------------------------------------------------------
1222+
// AgentHandle — thread-safe wrapper for mobile and server consumers
1223+
// ---------------------------------------------------------------------------
1224+
1225+
/// Thread-safe handle to an Agent. Mobile and server code use this instead of
1226+
/// Agent directly. Wraps in `Arc<tokio::sync::Mutex<Agent>>` so it can be
1227+
/// shared across threads and held across `.await` points.
1228+
#[derive(Clone)]
1229+
pub struct AgentHandle {
1230+
inner: Arc<tokio::sync::Mutex<Agent>>,
1231+
}
1232+
1233+
// Compile-time guarantee that AgentHandle is safe to share across threads.
1234+
const _: () = {
1235+
fn assert_send_sync<T: Send + Sync>() {}
1236+
fn check() {
1237+
assert_send_sync::<AgentHandle>();
1238+
}
1239+
};
1240+
1241+
impl AgentHandle {
1242+
/// Create a new handle wrapping an existing Agent.
1243+
pub fn new(agent: Agent) -> Self {
1244+
Self {
1245+
inner: Arc::new(tokio::sync::Mutex::new(agent)),
1246+
}
1247+
}
1248+
1249+
/// Send a chat message and return the full response text.
1250+
pub async fn chat(&self, message: &str) -> Result<String> {
1251+
let mut agent = self.inner.lock().await;
1252+
agent.chat(message).await
1253+
}
1254+
1255+
/// Start a new session.
1256+
pub async fn new_session(&self) -> Result<()> {
1257+
let mut agent = self.inner.lock().await;
1258+
agent.new_session().await
1259+
}
1260+
1261+
/// Search memory files.
1262+
pub async fn memory_search(&self, query: &str, max_results: usize) -> Result<Vec<MemoryChunk>> {
1263+
let agent = self.inner.lock().await;
1264+
agent.search_memory(query).await.map(|mut results| {
1265+
results.truncate(max_results);
1266+
results
1267+
})
1268+
}
1269+
1270+
/// Read a memory file by name.
1271+
pub async fn memory_get(&self, filename: &str) -> Result<String> {
1272+
let agent = self.inner.lock().await;
1273+
let workspace = agent.memory.workspace();
1274+
let path = workspace.join(filename);
1275+
std::fs::read_to_string(&path)
1276+
.map_err(|e| anyhow::anyhow!("Failed to read {}: {}", filename, e))
1277+
}
1278+
1279+
/// Get the current model name.
1280+
pub async fn model(&self) -> String {
1281+
let agent = self.inner.lock().await;
1282+
agent.model().to_string()
1283+
}
1284+
1285+
/// Switch to a different model.
1286+
pub async fn set_model(&self, model: &str) -> Result<()> {
1287+
let mut agent = self.inner.lock().await;
1288+
agent.set_model(model)
1289+
}
1290+
1291+
/// Get context window usage: (used, usable, total).
1292+
pub async fn context_usage(&self) -> (usize, usize, usize) {
1293+
let agent = self.inner.lock().await;
1294+
agent.context_usage()
1295+
}
1296+
1297+
/// Compact the session history.
1298+
pub async fn compact_session(&self) -> Result<(usize, usize)> {
1299+
let mut agent = self.inner.lock().await;
1300+
agent.compact_session().await
1301+
}
1302+
1303+
/// Clear session history.
1304+
pub async fn clear_session(&self) {
1305+
let mut agent = self.inner.lock().await;
1306+
agent.clear_session();
1307+
}
1308+
1309+
/// Export session as markdown.
1310+
pub async fn export_markdown(&self) -> String {
1311+
let agent = self.inner.lock().await;
1312+
agent.export_markdown()
1313+
}
1314+
}
1315+
12211316
/// Welcome message shown on first run (brand new workspace)
12221317
const FIRST_RUN_WELCOME: &str = r#"# Welcome to LocalGPT
12231318

crates/core/src/config/mod.rs

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -635,6 +635,30 @@ impl Config {
635635
Ok(config)
636636
}
637637

638+
/// Load (or create default) config with all directories rooted under `data_dir`.
639+
///
640+
/// Mobile apps use this instead of `load()` since they don't have XDG dirs.
641+
pub fn load_from_dir(data_dir: &str) -> Result<Self> {
642+
let paths = Paths::from_root(data_dir);
643+
paths.ensure_dirs()?;
644+
let path = paths.config_file();
645+
646+
if !path.exists() {
647+
let config = Config {
648+
paths,
649+
..Config::default()
650+
};
651+
config.save()?;
652+
return Ok(config);
653+
}
654+
655+
let content = fs::read_to_string(&path)?;
656+
let mut config: Config = toml::from_str(&content)?;
657+
config.paths = paths;
658+
config.expand_env_vars();
659+
Ok(config)
660+
}
661+
638662
pub fn save(&self) -> Result<()> {
639663
let path = self.paths.config_file();
640664

crates/core/src/memory/embeddings.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,14 +159,18 @@ pub fn hash_text(text: &str) -> String {
159159
// Local Embedding Provider (fastembed) - Default provider, no API key needed
160160
// ============================================================================
161161

162+
#[cfg(feature = "embeddings-local")]
162163
use std::sync::{Arc, Mutex as StdMutex};
163164

165+
#[cfg(feature = "embeddings-local")]
166+
164167
pub struct FastEmbedProvider {
165168
model: Arc<StdMutex<fastembed::TextEmbedding>>,
166169
model_name: String,
167170
dimensions: usize,
168171
}
169172

173+
#[cfg(feature = "embeddings-local")]
170174
impl FastEmbedProvider {
171175
pub fn new(model_name: Option<&str>) -> Result<Self> {
172176
Self::new_with_cache_dir(model_name, None)
@@ -243,6 +247,7 @@ impl FastEmbedProvider {
243247
}
244248
}
245249

250+
#[cfg(feature = "embeddings-local")]
246251
#[async_trait]
247252
impl EmbeddingProvider for FastEmbedProvider {
248253
fn id(&self) -> &str {

crates/core/src/memory/mod.rs

Lines changed: 34 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,11 @@ mod search;
44
mod watcher;
55
mod workspace;
66

7+
#[cfg(feature = "embeddings-local")]
8+
pub use embeddings::FastEmbedProvider;
79
#[cfg(feature = "gguf")]
810
pub use embeddings::LlamaCppProvider;
9-
pub use embeddings::{EmbeddingProvider, FastEmbedProvider, OpenAIEmbeddingProvider, hash_text};
11+
pub use embeddings::{EmbeddingProvider, OpenAIEmbeddingProvider, hash_text};
1012
pub use index::{MemoryIndex, ReindexStats};
1113
pub use search::MemoryChunk;
1214
pub use watcher::MemoryWatcher;
@@ -102,31 +104,41 @@ impl MemoryManager {
102104
.as_str()
103105
{
104106
"local" => {
105-
let model_name = if memory_config.embedding_model.is_empty()
106-
|| memory_config.embedding_model == "text-embedding-3-small"
107+
#[cfg(feature = "embeddings-local")]
107108
{
108-
None // Use default local model
109-
} else {
110-
Some(memory_config.embedding_model.as_str())
111-
};
112-
let cache_dir = if memory_config.embedding_cache_dir.is_empty() {
113-
None
114-
} else {
115-
Some(memory_config.embedding_cache_dir.as_str())
116-
};
117-
match FastEmbedProvider::new_with_cache_dir(model_name, cache_dir) {
118-
Ok(provider) => {
119-
info!("Using local embedding provider: {}", provider.model());
120-
Some(Arc::new(provider))
121-
}
122-
Err(e) => {
123-
warn!(
124-
"Failed to initialize local embeddings: {}. Falling back to FTS-only search.",
125-
e
126-
);
109+
let model_name = if memory_config.embedding_model.is_empty()
110+
|| memory_config.embedding_model == "text-embedding-3-small"
111+
{
112+
None // Use default local model
113+
} else {
114+
Some(memory_config.embedding_model.as_str())
115+
};
116+
let cache_dir = if memory_config.embedding_cache_dir.is_empty() {
127117
None
118+
} else {
119+
Some(memory_config.embedding_cache_dir.as_str())
120+
};
121+
match FastEmbedProvider::new_with_cache_dir(model_name, cache_dir) {
122+
Ok(provider) => {
123+
info!("Using local embedding provider: {}", provider.model());
124+
Some(Arc::new(provider))
125+
}
126+
Err(e) => {
127+
warn!(
128+
"Failed to initialize local embeddings: {}. Falling back to FTS-only search.",
129+
e
130+
);
131+
None
132+
}
128133
}
129134
}
135+
#[cfg(not(feature = "embeddings-local"))]
136+
{
137+
warn!(
138+
"Local embeddings requested but `embeddings-local` feature is disabled. Falling back to FTS-only search."
139+
);
140+
None
141+
}
130142
}
131143
"openai" => {
132144
// Need OpenAI config for API key

crates/core/src/paths.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,22 @@ impl Paths {
158158
self.cache_dir.join("embeddings")
159159
}
160160

161+
/// Create Paths with all directories rooted under a single base path.
162+
///
163+
/// Mobile apps use this to point everything at their app-specific
164+
/// document or library directory.
165+
pub fn from_root(root: impl Into<PathBuf>) -> Self {
166+
let root = root.into();
167+
Self {
168+
config_dir: root.join("config"),
169+
data_dir: root.join("data"),
170+
workspace: root.join("data").join("workspace"),
171+
state_dir: root.join("state"),
172+
cache_dir: root.join("cache"),
173+
runtime_dir: None,
174+
}
175+
}
176+
161177
/// Create all directories with appropriate permissions.
162178
pub fn ensure_dirs(&self) -> Result<()> {
163179
let dirs = [

crates/mobile/Cargo.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,8 @@ name = "localgpt_mobile"
1212

1313
[dependencies]
1414
localgpt-core = { path = "../core", default-features = false, features = ["embeddings-openai"] }
15+
uniffi = "0.29"
16+
tokio = { workspace = true }
1517
anyhow = { workspace = true }
18+
thiserror = { workspace = true }
19+
serde_json = { workspace = true }

0 commit comments

Comments
 (0)