diff --git a/README.md b/README.md index 3a81639..c124bbb 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,7 @@ AIScript supports the following AI models: - [x] OpenAI ((uses `OPENAI_API_KEY` environment variable by default)) - [x] DeepSeek -- [ ] Anthropic +- [x] Anthropic Configuration by `project.toml`: @@ -133,6 +133,11 @@ model = "gpt-3.5-turbo" [ai.deepseek] api_key = "YOUR_API_KEY" model = "deepseek-chat" + +# or use Anthropic +[ai.anthropic] +api_key = "YOUR_API_KEY" +model = "claude-3-5-sonnet-latest" ``` ## Roadmap diff --git a/aiscript-vm/src/ai/mod.rs b/aiscript-vm/src/ai/mod.rs index ba82193..61a9f80 100644 --- a/aiscript-vm/src/ai/mod.rs +++ b/aiscript-vm/src/ai/mod.rs @@ -9,9 +9,14 @@ pub use prompt::{PromptConfig, prompt_with_config}; use serde::Deserialize; +// Deepseek const DEEPSEEK_API_ENDPOINT: &str = "https://api.deepseek.com/v1"; const DEEPSEEK_V3: &str = "deepseek-chat"; +// Anthropic +const ANTHROPIC_API_ENDPOINT: &str = "https://api.anthropic.com/v1"; +const CLAUDE_3_5_SONNET: &str = "claude-3-5-sonnet-latest"; + #[derive(Debug, Clone, Deserialize)] pub enum AiConfig { #[serde(rename = "openai")] @@ -69,7 +74,11 @@ pub(crate) fn openai_client(config: Option<&AiConfig>) -> OpenAIClient { .with_api_key(api_key) .build() .unwrap(), - Some(AiConfig::Anthropic(_)) => unimplemented!("Anthropic API not yet supported"), + Some(AiConfig::Anthropic(ModelConfig { api_key, .. })) => OpenAIClient::builder() + .with_endpoint(ANTHROPIC_API_ENDPOINT) + .with_api_key(api_key) + .build() + .unwrap(), } } @@ -82,6 +91,8 @@ pub(crate) fn default_model(config: Option<&AiConfig>) -> String { Some(AiConfig::DeepSeek(ModelConfig { model, .. })) => { model.clone().unwrap_or(DEEPSEEK_V3.to_string()) } - Some(AiConfig::Anthropic(_)) => unimplemented!("Anthropic API not yet supported"), + Some(AiConfig::Anthropic(ModelConfig { model, .. })) => { + model.clone().unwrap_or(CLAUDE_3_5_SONNET.to_string()) + } } }