Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion crates/goose-server/src/openapi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use rmcp::model::{
use utoipa::{OpenApi, ToSchema};

use goose::config::declarative_providers::{
DeclarativeProviderConfig, LoadedProvider, ProviderEngine,
DeclarativeProviderConfig, EnvVarConfig, LoadedProvider, ProviderEngine,
};
use goose::conversation::message::{
ActionRequired, ActionRequiredData, FrontendToolRequest, Message, MessageContent,
Expand Down Expand Up @@ -509,6 +509,7 @@ derive_utoipa!(Icon as IconSchema);
LoadedProvider,
ProviderEngine,
DeclarativeProviderConfig,
EnvVarConfig,
ExtensionEntry,
ExtensionConfig,
ConfigKey,
Expand Down
190 changes: 190 additions & 0 deletions crates/goose/src/config/declarative_providers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,17 @@ pub enum ProviderEngine {
Anthropic,
}

#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct EnvVarConfig {
pub name: String,
#[serde(default)]
pub required: bool,
#[serde(default)]
pub secret: bool,
pub description: Option<String>,
pub default: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct DeclarativeProviderConfig {
pub name: String,
Expand All @@ -46,6 +57,10 @@ pub struct DeclarativeProviderConfig {
pub catalog_provider_id: Option<String>,
#[serde(default)]
pub base_path: Option<String>,
#[serde(default)]
pub env_vars: Option<Vec<EnvVarConfig>>,
#[serde(default)]
pub dynamic_models: Option<bool>,
}

fn default_requires_auth() -> bool {
Expand All @@ -66,6 +81,40 @@ impl DeclarativeProviderConfig {
}
}

/// Expand `${VAR_NAME}` placeholders in a template string using the given env var configs.
/// Resolves values via Config (secret if `secret`, param otherwise), falls back to `default`.
/// Returns an error if a `required` var is missing.
pub fn expand_env_vars(template: &str, env_vars: &[EnvVarConfig]) -> Result<String> {
let config = Config::global();
let mut result = template.to_string();
for var in env_vars {
let placeholder = format!("${{{}}}", var.name);
if !result.contains(&placeholder) {
continue;
}
let value = if var.secret {
config.get_secret::<String>(&var.name).ok()
} else {
config.get_param::<String>(&var.name).ok()
};
let value = match value {
Some(v) => v,
None => match &var.default {
Some(d) => d.clone(),
None if var.required => {
return Err(anyhow::anyhow!(
"Required environment variable {} is not set",
var.name
));
}
None => continue,
},
};
result = result.replace(&placeholder, &value);
}
Ok(result)
}

#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct LoadedProvider {
pub config: DeclarativeProviderConfig,
Expand Down Expand Up @@ -164,6 +213,8 @@ pub fn create_custom_provider(
requires_auth: params.requires_auth,
catalog_provider_id: params.catalog_provider_id,
base_path: params.base_path,
env_vars: None,
dynamic_models: None,
};

let custom_providers_dir = custom_providers_dir();
Expand Down Expand Up @@ -227,6 +278,8 @@ pub fn update_custom_provider(params: UpdateCustomProviderParams) -> Result<()>
requires_auth: params.requires_auth,
catalog_provider_id: params.catalog_provider_id,
base_path: params.base_path,
env_vars: existing_config.env_vars,
dynamic_models: existing_config.dynamic_models,
};

let file_path = custom_providers_dir().join(format!("{}.json", updated_config.name));
Expand Down Expand Up @@ -352,6 +405,13 @@ pub fn register_declarative_provider(
config: DeclarativeProviderConfig,
provider_type: ProviderType,
) {
// Expand env vars in base_url once, so individual engines don't need to
let mut config = config;
if let Some(ref env_vars) = config.env_vars {
if let Ok(resolved) = expand_env_vars(&config.base_url, env_vars) {
config.base_url = resolved;
}
}
let config_clone = config.clone();

match config.engine {
Expand All @@ -378,3 +438,133 @@ pub fn register_declarative_provider(
}
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_tanzu_json_deserializes() {
let json = include_str!("../providers/declarative/tanzu.json");
let config: DeclarativeProviderConfig =
serde_json::from_str(json).expect("tanzu.json should parse");
assert_eq!(config.name, "tanzu_ai");
assert_eq!(config.display_name, "Tanzu AI Services");
assert!(matches!(config.engine, ProviderEngine::OpenAI));
assert_eq!(config.api_key_env, "TANZU_AI_API_KEY");
assert_eq!(
config.base_url,
"${TANZU_AI_ENDPOINT}/openai/v1/chat/completions"
);
assert_eq!(config.dynamic_models, Some(true));
assert_eq!(config.supports_streaming, Some(false));

let env_vars = config.env_vars.as_ref().expect("env_vars should be set");
assert_eq!(env_vars.len(), 1);
assert_eq!(env_vars[0].name, "TANZU_AI_ENDPOINT");
assert!(env_vars[0].required);
assert!(!env_vars[0].secret);

assert_eq!(config.models.len(), 1);
assert_eq!(config.models[0].name, "openai/gpt-oss-120b");
}

#[test]
fn test_existing_json_files_still_deserialize_without_new_fields() {
let json = include_str!("../providers/declarative/groq.json");
let config: DeclarativeProviderConfig =
serde_json::from_str(json).expect("groq.json should parse without env_vars");
assert!(config.env_vars.is_none());
assert!(config.dynamic_models.is_none());
}

#[test]
fn test_expand_env_vars_replaces_placeholder() {
let _guard = env_lock::lock_env([("TEST_EXPAND_HOST", Some("https://example.com/api"))]);

let env_vars = vec![EnvVarConfig {
name: "TEST_EXPAND_HOST".to_string(),
required: true,
secret: false,
description: None,
default: None,
}];

let result = expand_env_vars("${TEST_EXPAND_HOST}/v1/chat/completions", &env_vars).unwrap();
assert_eq!(result, "https://example.com/api/v1/chat/completions");
}

#[test]
fn test_expand_env_vars_required_missing_errors() {
let _guard = env_lock::lock_env([("TEST_EXPAND_MISSING", None::<&str>)]);

let env_vars = vec![EnvVarConfig {
name: "TEST_EXPAND_MISSING".to_string(),
required: true,
secret: false,
description: None,
default: None,
}];

let result = expand_env_vars("${TEST_EXPAND_MISSING}/path", &env_vars);
assert!(result.is_err());
assert!(result
.unwrap_err()
.to_string()
.contains("TEST_EXPAND_MISSING"));
}

#[test]
fn test_expand_env_vars_uses_default_when_missing() {
let _guard = env_lock::lock_env([("TEST_EXPAND_DEFAULT", None::<&str>)]);

let env_vars = vec![EnvVarConfig {
name: "TEST_EXPAND_DEFAULT".to_string(),
required: false,
secret: false,
description: None,
default: Some("https://fallback.example.com".to_string()),
}];

let result =
expand_env_vars("${TEST_EXPAND_DEFAULT}/v1/chat/completions", &env_vars).unwrap();
assert_eq!(result, "https://fallback.example.com/v1/chat/completions");
}

#[test]
fn test_expand_env_vars_no_placeholders_passthrough() {
let env_vars = vec![EnvVarConfig {
name: "UNUSED_VAR".to_string(),
required: true,
secret: false,
description: None,
default: None,
}];

let result =
expand_env_vars("https://static.example.com/v1/chat/completions", &env_vars).unwrap();
assert_eq!(result, "https://static.example.com/v1/chat/completions");
}

#[test]
fn test_expand_env_vars_empty_slice_passthrough() {
let result = expand_env_vars("${WHATEVER}/path", &[]).unwrap();
assert_eq!(result, "${WHATEVER}/path");
}

#[test]
fn test_expand_env_vars_env_value_overrides_default() {
let _guard = env_lock::lock_env([("TEST_EXPAND_OVERRIDE", Some("https://from-env.com"))]);

let env_vars = vec![EnvVarConfig {
name: "TEST_EXPAND_OVERRIDE".to_string(),
required: false,
secret: false,
description: None,
default: Some("https://from-default.com".to_string()),
}];

let result = expand_env_vars("${TEST_EXPAND_OVERRIDE}/path", &env_vars).unwrap();
assert_eq!(result, "https://from-env.com/path");
}
}
21 changes: 21 additions & 0 deletions crates/goose/src/providers/declarative/tanzu.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"name": "tanzu_ai",
"engine": "openai",
"display_name": "Tanzu AI Services",
"description": "Enterprise-managed LLM access through VMware Tanzu Platform AI Services",
"api_key_env": "TANZU_AI_API_KEY",
"base_url": "${TANZU_AI_ENDPOINT}/openai/v1/chat/completions",
"env_vars": [
{
"name": "TANZU_AI_ENDPOINT",
"required": true,
"secret": false,
"description": "Your Tanzu AI Services endpoint URL"
}
],
"dynamic_models": true,
"models": [
{ "name": "openai/gpt-oss-120b", "context_limit": 131072 }
],
"supports_streaming": false
}
37 changes: 37 additions & 0 deletions crates/goose/src/providers/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,43 @@ mod tests {
assert_eq!(result.context_limit, Some(expected_limit));
}

#[tokio::test]
async fn test_tanzu_declarative_provider_registry_wiring() {
let providers_list = providers().await;
let tanzu = providers_list
.iter()
.find(|(m, _)| m.name == "tanzu_ai")
.expect("tanzu_ai provider should be registered");
let (meta, provider_type) = tanzu;

// Should be a Declarative (fixed) provider
assert_eq!(*provider_type, ProviderType::Declarative);

assert_eq!(meta.display_name, "Tanzu AI Services");
assert_eq!(meta.default_model, "openai/gpt-oss-120b");

// First config key should be TANZU_AI_API_KEY (secret, required)
let api_key = meta
.config_keys
.iter()
.find(|k| k.name == "TANZU_AI_API_KEY")
.expect("TANZU_AI_API_KEY config key should exist");
assert!(
api_key.required,
"API key should be required for fixed declarative provider"
);
assert!(api_key.secret, "API key should be secret");

// Should have TANZU_AI_ENDPOINT config key (not secret, required)
let endpoint = meta
.config_keys
.iter()
.find(|k| k.name == "TANZU_AI_ENDPOINT")
.expect("TANZU_AI_ENDPOINT config key should exist");
assert!(endpoint.required, "Endpoint should be required");
assert!(!endpoint.secret, "Endpoint should not be secret");
}

#[tokio::test]
async fn test_openai_compatible_providers_config_keys() {
let providers_list = providers().await;
Expand Down
12 changes: 12 additions & 0 deletions crates/goose/src/providers/provider_registry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,18 @@ impl ProviderRegistry {
}
}

if let Some(ref env_vars) = config.env_vars {
for ev in env_vars {
config_keys.push(super::base::ConfigKey::new(
&ev.name,
ev.required,
ev.secret,
ev.default.as_deref(),
false,
));
}
}

let custom_metadata = ProviderMetadata {
name: config.name.clone(),
display_name: config.display_name.clone(),
Expand Down
1 change: 1 addition & 0 deletions documentation/docs/getting-started/providers.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ goose is compatible with a wide range of LLM providers, allowing you to choose a
| [OVHcloud AI](https://www.ovhcloud.com/en/public-cloud/ai-endpoints/) | Provides access to open-source models including Qwen, Llama, Mistral, and DeepSeek through AI Endpoints service. | `OVHCLOUD_API_KEY` |
| [Ramalama](https://ramalama.ai/) | Local model using native [OCI](https://opencontainers.org/) container runtimes, [CNCF](https://www.cncf.io/) tools, and supporting models as OCI artifacts. Ramalama API is a compatible alternative to Ollama and can be used with the goose Ollama provider. Supports Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](#local-llms).** | `OLLAMA_HOST` |
| [Snowflake](https://docs.snowflake.com/user-guide/snowflake-cortex/aisql#choosing-a-model) | Access the latest models using Snowflake Cortex services, including Claude models. **Requires a Snowflake account and programmatic access token (PAT)**. | `SNOWFLAKE_HOST`, `SNOWFLAKE_TOKEN` |
| [Tanzu AI Services](https://techdocs.broadcom.com/us/en/vmware-tanzu/platform/ai-services/10-3/ai/index.html) | Enterprise-managed LLM access through VMware Tanzu Platform AI Services. Models are fetched dynamically from the endpoint. | `TANZU_AI_API_KEY`, `TANZU_AI_ENDPOINT` |
| [Tetrate Agent Router Service](https://router.tetrate.ai) | Unified API gateway for AI models including Claude, Gemini, GPT, open-weight models, and others. Supports PKCE authentication flow for secure API key generation. | `TETRATE_API_KEY`, `TETRATE_HOST` (optional) |
| [Venice AI](https://venice.ai/home) | Provides access to open source models like Llama, Mistral, and Qwen while prioritizing user privacy. **Requires an account and an [API key](https://docs.venice.ai/overview/guides/generating-api-key)**. | `VENICE_API_KEY`, `VENICE_HOST` (optional), `VENICE_BASE_PATH` (optional), `VENICE_MODELS_PATH` (optional) |
| [xAI](https://x.ai/) | Access to xAI's Grok models including grok-3, grok-3-mini, and grok-3-fast with 131,072 token context window. | `XAI_API_KEY`, `XAI_HOST` (optional) |
Expand Down
36 changes: 36 additions & 0 deletions ui/desktop/openapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -4195,9 +4195,20 @@
"display_name": {
"type": "string"
},
"dynamic_models": {
"type": "boolean",
"nullable": true
},
"engine": {
"$ref": "#/components/schemas/ProviderEngine"
},
"env_vars": {
"type": "array",
"items": {
"$ref": "#/components/schemas/EnvVarConfig"
},
"nullable": true
},
"headers": {
"type": "object",
"additionalProperties": {
Expand Down Expand Up @@ -4460,6 +4471,31 @@
}
}
},
"EnvVarConfig": {
"type": "object",
"required": [
"name"
],
"properties": {
"default": {
"type": "string",
"nullable": true
},
"description": {
"type": "string",
"nullable": true
},
"name": {
"type": "string"
},
"required": {
"type": "boolean"
},
"secret": {
"type": "boolean"
}
}
},
"Envs": {
"type": "object",
"additionalProperties": {
Expand Down
Loading
Loading