Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions crates/goose/src/config/declarative_providers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,18 @@ pub struct DeclarativeProviderConfig {
pub setup_steps: Vec<String>,
#[serde(default, deserialize_with = "deserialize_non_empty_string")]
pub fast_model: Option<String>,
#[serde(default, skip_serializing_if = "is_false")]
pub preserves_thinking: bool,
}

fn default_requires_auth() -> bool {
true
}

fn is_false(value: &bool) -> bool {
!*value
}

impl DeclarativeProviderConfig {
pub fn id(&self) -> &str {
&self.name
Expand Down Expand Up @@ -240,6 +246,7 @@ pub fn create_custom_provider(
model_doc_link: None,
setup_steps: vec![],
fast_model: None,
preserves_thinking: false,
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Allow custom providers to opt into preserved thinking

Fresh evidence after the earlier custom_deepseek concern: this commit marks built-in JSONs as preserves_thinking: true, but newly created custom providers are now hardcoded to preserves_thinking: false here, and the create/update request types still expose no way to set it. Since format_messages_with_options in providers/formats/openai.rs now sends reasoning_content only when that flag is enabled, custom OpenAI-compatible providers that depend on reasoning replay can silently lose tool-call continuation context unless users manually edit provider JSON.

Useful? React with 👍 / 👎.

};

let custom_providers_dir = custom_providers_dir();
Expand Down Expand Up @@ -309,6 +316,7 @@ pub fn update_custom_provider(params: UpdateCustomProviderParams) -> Result<()>
model_doc_link: existing_config.model_doc_link,
setup_steps: existing_config.setup_steps,
fast_model: existing_config.fast_model.clone(),
preserves_thinking: existing_config.preserves_thinking,
};

let file_path = custom_providers_dir().join(format!("{}.json", updated_config.name));
Expand Down Expand Up @@ -597,6 +605,32 @@ mod tests {
assert!(config.dynamic_models.is_none());
assert!(config.model_doc_link.is_none());
assert!(config.setup_steps.is_empty());
assert!(!config.preserves_thinking);
}

#[test]
fn test_zai_json_deserializes() {
let json = include_str!("../providers/declarative/zai.json");
let config: DeclarativeProviderConfig =
serde_json::from_str(json).expect("zai.json should parse");
assert_eq!(config.name, "zai");
assert_eq!(config.display_name, "Z.AI");
assert!(matches!(config.engine, ProviderEngine::Anthropic));
assert_eq!(config.api_key_env, "ZHIPU_API_KEY");
assert_eq!(config.base_url, "${ZAI_BASE_URL}");
assert_eq!(config.catalog_provider_id, Some("zai".to_string()));
assert_eq!(config.fast_model, Some("glm-4.5-air".to_string()));
assert!(config.preserves_thinking);
assert_eq!(config.supports_streaming, Some(true));
assert_eq!(config.models[0].name, "glm-5.1");

let env_vars = config.env_vars.as_ref().expect("env_vars should be set");
assert_eq!(env_vars.len(), 1);
assert_eq!(env_vars[0].name, "ZAI_BASE_URL");
assert_eq!(
env_vars[0].default,
Some("https://api.z.ai/api/anthropic".to_string())
);
}

#[test]
Expand Down
45 changes: 43 additions & 2 deletions crates/goose/src/providers/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, MessageStream, ModelInfo, Provider, ProviderDef, ProviderMetadata};
use super::errors::ProviderError;
use super::formats::anthropic::{
create_request, response_to_streaming_message, thinking_type, ThinkingType,
create_request_with_options, response_to_streaming_message, thinking_type,
AnthropicFormatOptions, ThinkingType,
};
use super::inventory::{config_secret_value, serialize_string_map, InventoryIdentityInput};
use super::openai_compatible::handle_status;
Expand Down Expand Up @@ -58,6 +59,8 @@ pub struct AnthropicProvider {
name: String,
custom_models: Option<Vec<String>>,
skip_canonical_filtering: bool,
#[serde(skip)]
format_options: AnthropicFormatOptions,
}

impl AnthropicProvider {
Expand Down Expand Up @@ -85,6 +88,7 @@ impl AnthropicProvider {
name: ANTHROPIC_PROVIDER_NAME.to_string(),
custom_models: None,
skip_canonical_filtering: false,
format_options: AnthropicFormatOptions::default(),
})
}

Expand All @@ -102,6 +106,8 @@ impl AnthropicProvider {
key: api_key,
};

let format_options = Self::format_options_for_provider(config.preserves_thinking);

let mut api_client = ApiClient::new(config.base_url, auth)?
.with_header("anthropic-version", ANTHROPIC_API_VERSION)?;

Expand Down Expand Up @@ -143,9 +149,17 @@ impl AnthropicProvider {
name: config.name.clone(),
custom_models,
skip_canonical_filtering: config.skip_canonical_filtering,
format_options,
})
}

fn format_options_for_provider(preserves_thinking: bool) -> AnthropicFormatOptions {
AnthropicFormatOptions {
preserve_unsigned_thinking: preserves_thinking,
preserve_thinking_context: preserves_thinking,
}
}

fn get_conditional_headers(&self) -> Vec<(&str, &str)> {
let mut headers = Vec::new();

Expand Down Expand Up @@ -306,7 +320,13 @@ impl Provider for AnthropicProvider {
messages: &[Message],
tools: &[Tool],
) -> Result<MessageStream, ProviderError> {
let mut payload = create_request(model_config, system, messages, tools)?;
let mut payload = create_request_with_options(
model_config,
system,
messages,
tools,
self.format_options,
)?;
payload
.as_object_mut()
.unwrap()
Expand Down Expand Up @@ -345,3 +365,24 @@ impl Provider for AnthropicProvider {
}))
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_default_provider_uses_strict_anthropic_thinking() {
assert_eq!(
AnthropicProvider::format_options_for_provider(false),
AnthropicFormatOptions::default()
);
}

#[test]
fn test_preserves_thinking_provider_preserves_thinking_context() {
let options = AnthropicProvider::format_options_for_provider(true);

assert!(options.preserve_unsigned_thinking);
assert!(options.preserve_thinking_context);
}
}
34 changes: 34 additions & 0 deletions crates/goose/src/providers/declarative/zai.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"name": "zai",
"engine": "anthropic",
"display_name": "Z.AI",
"description": "Z.AI GLM models via Anthropic-compatible API.",
"api_key_env": "ZHIPU_API_KEY",
"base_url": "${ZAI_BASE_URL}",
"env_vars": [
{
"name": "ZAI_BASE_URL",
"required": false,
"secret": false,
"default": "https://api.z.ai/api/anthropic",
"description": "Z.AI Anthropic-compatible API base URL."
}
],
"catalog_provider_id": "zai",
"model_doc_link": "https://docs.z.ai/devpack/tool/goose",
"fast_model": "glm-4.5-air",
"preserves_thinking": true,
"models": [
{"name": "glm-5.1", "context_limit": 200000},
{"name": "glm-5", "context_limit": 204800},
{"name": "glm-5-turbo", "context_limit": 200000},
{"name": "glm-4.7", "context_limit": 204800},
{"name": "glm-4.7-flash", "context_limit": 200000},
{"name": "glm-4.7-flashx", "context_limit": 200000},
{"name": "glm-4.6", "context_limit": 204800},
{"name": "glm-4.5", "context_limit": 131072},
{"name": "glm-4.5-air", "context_limit": 131072},
{"name": "glm-4.5-flash", "context_limit": 131072}
],
"supports_streaming": true
}
Loading
Loading