Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions crates/goose/src/providers/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ use super::{
ollama::OllamaProvider,
openai::OpenAiProvider,
openrouter::OpenRouterProvider,
perplexity::PerplexityProvider,
pi_acp::PiAcpProvider,
provider_registry::ProviderRegistry,
snowflake::SnowflakeProvider,
Expand Down Expand Up @@ -79,6 +80,7 @@ async fn init_registry() -> RwLock<ProviderRegistry> {
registry.register::<OllamaProvider>(true);
registry.register::<OpenAiProvider>(true);
registry.register::<OpenRouterProvider>(true);
registry.register::<PerplexityProvider>(false);
registry.register::<PiAcpProvider>(false);
#[cfg(feature = "aws-providers")]
registry.register::<SageMakerTgiProvider>(false);
Expand Down
1 change: 1 addition & 0 deletions crates/goose/src/providers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ pub mod ollama;
pub mod openai;
pub mod openai_compatible;
pub mod openrouter;
pub mod perplexity;
pub mod pi_acp;
pub mod provider_registry;
pub mod provider_test;
Expand Down
151 changes: 151 additions & 0 deletions crates/goose/src/providers/perplexity.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
use super::api_client::{ApiClient, AuthMethod};
use super::base::{ConfigKey, ProviderDef, ProviderMetadata};
use super::openai_compatible::OpenAiCompatibleProvider;
use crate::model::ModelConfig;
use anyhow::Result;
use futures::future::BoxFuture;

const PERPLEXITY_PROVIDER_NAME: &str = "perplexity";
pub const PERPLEXITY_API_HOST: &str = "https://api.perplexity.ai";
pub const PERPLEXITY_DEFAULT_MODEL: &str = "sonar-pro";

/// Models exposed via Perplexity's OpenAI-compatible chat completions endpoint.
///
/// Perplexity ships new and renames existing models on its own cadence; this list
/// is a curated default for setup wizards. Users can override
/// `GOOSE_MODEL` to point at any other model the API accepts.
pub const PERPLEXITY_KNOWN_MODELS: &[&str] = &[
"sonar",
"sonar-pro",
"sonar-reasoning",
"sonar-reasoning-pro",
];

pub const PERPLEXITY_DOC_URL: &str = "https://docs.perplexity.ai/docs/getting-started";

pub struct PerplexityProvider;

impl PerplexityProvider {
/// Resolves the API key, accepting either `PERPLEXITY_API_KEY` (the canonical
/// name) or `PPLX_API_KEY` (the abbreviated alias used by Perplexity's SDKs).
fn resolve_api_key() -> Result<String, crate::config::ConfigError> {
let config = crate::config::Config::global();
match config.get_secret::<String>("PERPLEXITY_API_KEY") {
Ok(key) => Ok(key),
Err(primary_err) => match config.get_secret::<String>("PPLX_API_KEY") {
Ok(key) => Ok(key),
Err(_) => Err(primary_err),
},
}
}
}

impl ProviderDef for PerplexityProvider {
type Provider = OpenAiCompatibleProvider;

fn metadata() -> ProviderMetadata {
ProviderMetadata::new(
PERPLEXITY_PROVIDER_NAME,
"Perplexity",
"Perplexity chat models with built-in real-time web search grounding",
PERPLEXITY_DEFAULT_MODEL,
PERPLEXITY_KNOWN_MODELS.to_vec(),
PERPLEXITY_DOC_URL,
vec![
ConfigKey::new("PERPLEXITY_API_KEY", true, true, None, true),
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Mark PPLX_API_KEY as a valid configured credential

from_env explicitly accepts PPLX_API_KEY as a fallback, but metadata declares only PERPLEXITY_API_KEY as the required secret. Configuration status is computed from metadata via default_inventory_configured (crates/goose/src/providers/inventory/mod.rs), and exposed through provider_config_status (crates/goose/src/acp/server.rs), so users who set only PPLX_API_KEY will still be reported as unconfigured (and can be filtered out in configured-provider flows) even though runtime auth would succeed. Please add alias-aware configured checks (or include the alias in required-key semantics) so status matches actual provider behavior.

Useful? React with 👍 / 👎.

ConfigKey::new(
"PERPLEXITY_HOST",
false,
false,
Some(PERPLEXITY_API_HOST),
false,
),
],
)
.with_setup_steps(vec![
"Go to https://www.perplexity.ai/account/api/keys",
"Create or copy an existing API key",
"Paste the key above as PERPLEXITY_API_KEY",
])
}

fn from_env(
model: ModelConfig,
_extensions: Vec<crate::config::ExtensionConfig>,
) -> BoxFuture<'static, Result<OpenAiCompatibleProvider>> {
Box::pin(async move {
let api_key = Self::resolve_api_key()?;
let host: String = crate::config::Config::global()
.get_param("PERPLEXITY_HOST")
.unwrap_or_else(|_| PERPLEXITY_API_HOST.to_string());

let api_client = ApiClient::new(host, AuthMethod::BearerToken(api_key))?;

Ok(OpenAiCompatibleProvider::new(
PERPLEXITY_PROVIDER_NAME.to_string(),
api_client,
model,
String::new(),
))
})
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_metadata_structure() {
let metadata = PerplexityProvider::metadata();

assert_eq!(metadata.name, PERPLEXITY_PROVIDER_NAME);
assert_eq!(metadata.display_name, "Perplexity");
assert_eq!(metadata.default_model, PERPLEXITY_DEFAULT_MODEL);
assert_eq!(metadata.model_doc_link, PERPLEXITY_DOC_URL);

assert_eq!(metadata.config_keys.len(), 2);

let api_key = &metadata.config_keys[0];
assert_eq!(api_key.name, "PERPLEXITY_API_KEY");
assert!(api_key.required);
assert!(api_key.secret);
assert!(api_key.primary);

let host = &metadata.config_keys[1];
assert_eq!(host.name, "PERPLEXITY_HOST");
assert!(!host.required);
assert!(!host.secret);
assert_eq!(host.default.as_deref(), Some(PERPLEXITY_API_HOST));
}

#[test]
fn test_known_models_non_empty() {
let metadata = PerplexityProvider::metadata();
assert!(!metadata.known_models.is_empty());
assert!(metadata
.known_models
.iter()
.any(|m| m.name == PERPLEXITY_DEFAULT_MODEL));
}

#[test]
fn test_setup_steps_present() {
let metadata = PerplexityProvider::metadata();
assert!(!metadata.setup_steps.is_empty());
assert!(metadata
.setup_steps
.iter()
.any(|step| step.contains("PERPLEXITY_API_KEY")));
}

#[test]
fn test_default_model_is_known() {
assert!(PERPLEXITY_KNOWN_MODELS.contains(&PERPLEXITY_DEFAULT_MODEL));
}

#[test]
fn test_doc_url_points_to_perplexity_docs() {
assert!(PERPLEXITY_DOC_URL.starts_with("https://docs.perplexity.ai"));
}
}
12 changes: 12 additions & 0 deletions crates/goose/tests/providers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ use goose::providers::errors::ProviderError;
use goose::providers::google::GOOGLE_DEFAULT_MODEL;
use goose::providers::litellm::LITELLM_DEFAULT_MODEL;
use goose::providers::openai::OPEN_AI_DEFAULT_MODEL;
use goose::providers::perplexity::PERPLEXITY_DEFAULT_MODEL;
#[cfg(feature = "aws-providers")]
use goose::providers::sagemaker_tgi::SAGEMAKER_TGI_DEFAULT_MODEL;
use goose::providers::snowflake::SNOWFLAKE_DEFAULT_MODEL;
Expand Down Expand Up @@ -872,6 +873,17 @@ async fn test_xai_provider() -> Result<()> {
.await
}

#[tokio::test]
async fn test_perplexity_provider() -> Result<()> {
ProviderTestConfig::with_llm_provider(
"Perplexity",
PERPLEXITY_DEFAULT_MODEL,
&["PERPLEXITY_API_KEY"],
)
.run()
.await
}

#[tokio::test]
async fn test_claude_code_provider() -> Result<()> {
ProviderTestConfig::with_agentic_provider("claude-code", CLAUDE_CODE_DEFAULT_MODEL, "claude")
Expand Down
1 change: 1 addition & 0 deletions documentation/docs/getting-started/providers.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ goose is compatible with a wide range of LLM providers, allowing you to choose a
| [Ollama Cloud](https://ollama.com/) | Access hosted models on ollama.com via OpenAI-compatible API. Requires an Ollama account and API key. | `OLLAMA_CLOUD_API_KEY` |
| [OpenAI](https://platform.openai.com/api-keys) | Provides gpt-4o, o1, and other advanced language models. Also supports OpenAI-compatible endpoints (e.g., self-hosted LLaMA, vLLM, KServe). **o1-mini and o1-preview are not supported because goose uses tool calling.** | `OPENAI_API_KEY`, `OPENAI_HOST` (optional), `OPENAI_ORGANIZATION` (optional), `OPENAI_PROJECT` (optional), `OPENAI_CUSTOM_HEADERS` (optional) |
| [OpenRouter](https://openrouter.ai/) | API gateway for unified access to various models with features like rate-limiting management. | `OPENROUTER_API_KEY` |
| [Perplexity](https://www.perplexity.ai/) | Chat models with built-in real-time web search grounding. OpenAI-compatible chat completions API at `https://api.perplexity.ai`. | `PERPLEXITY_API_KEY` (or `PPLX_API_KEY`), `PERPLEXITY_HOST` (optional) |
| [OVHcloud AI](https://www.ovhcloud.com/en/public-cloud/ai-endpoints/) | Provides access to open-source models including Qwen, Llama, Mistral, and DeepSeek through AI Endpoints service. | `OVHCLOUD_API_KEY` |
| [Ramalama](https://ramalama.ai/) | Local model using native [OCI](https://opencontainers.org/) container runtimes, [CNCF](https://www.cncf.io/) tools, and supporting models as OCI artifacts. Ramalama API is a compatible alternative to Ollama and can be used with the goose Ollama provider. Supports Qwen, Llama, DeepSeek, and other open-source models. **Because this provider runs locally, you must first [download and run a model](#local-llms).** | `OLLAMA_HOST` |
| [Snowflake](https://docs.snowflake.com/user-guide/snowflake-cortex/aisql#choosing-a-model) | Access the latest models using Snowflake Cortex services, including Claude models. **Requires a Snowflake account and programmatic access token (PAT)**. | `SNOWFLAKE_HOST`, `SNOWFLAKE_TOKEN` |
Expand Down
Loading