Skip to content
Open
Show file tree
Hide file tree
Changes from 35 commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
56e477e
add imagen
May 27, 2025
85e0997
Merge branch 'main' of https://github.com/DDU1222/cherry-studio
May 27, 2025
c007ff4
Merge branch 'main' of https://github.com/DDU1222/cherry-studio
Jun 4, 2025
04d9b02
Merge branch 'CherryHQ:main' into main
DDU1222 Jun 9, 2025
099b963
Merge branch 'CherryHQ:main' into main
DDU1222 Jun 16, 2025
55f1c42
Merge branch 'CherryHQ:main' into main
DDU1222 Jun 18, 2025
d4ebac9
Merge branch 'CherryHQ:main' into main
DDU1222 Jun 24, 2025
b92c9d4
Merge branch 'CherryHQ:main' into main
DDU1222 Jul 24, 2025
9ff702d
Merge branch 'CherryHQ:main' into main
DDU1222 Jul 27, 2025
0b121e2
Merge branch 'CherryHQ:main' into main
DDU1222 Jul 29, 2025
d4f38b0
Merge branch 'CherryHQ:main' into main
DDU1222 Aug 6, 2025
976b735
Merge branch 'CherryHQ:main' into main
DDU1222 Aug 15, 2025
cbc2e73
Merge branch 'CherryHQ:main' into main
DDU1222 Oct 20, 2025
6ae7a9d
Merge branch 'CherryHQ:main' into main
DDU1222 Oct 21, 2025
5a9b38e
Merge branch 'CherryHQ:main' into main
DDU1222 Oct 22, 2025
f2bfe87
Merge branch 'CherryHQ:main' into main
DDU1222 Nov 2, 2025
bb27cf5
Merge branch 'CherryHQ:main' into main
DDU1222 Nov 26, 2025
b245761
Merge branch 'CherryHQ:main' into main
DDU1222 Nov 30, 2025
1e89ab9
Merge branch 'CherryHQ:main' into main
DDU1222 Dec 7, 2025
9ec0a5c
Merge branch 'CherryHQ:main' into main
DDU1222 Dec 12, 2025
8c264f7
Merge branch 'CherryHQ:main' into main
DDU1222 Feb 7, 2026
f88f2c8
Merge branch 'CherryHQ:main' into main
DDU1222 Mar 8, 2026
d0f5a33
fix: rewrite OpenClaw determineApiType to use mapping-based protocol …
DDU1222 Mar 11, 2026
a5bac1e
fix: restrict model name inference to providers with anthropicApiHost
DDU1222 Mar 11, 2026
c311e4d
fix: add gemini provider type to OpenClaw protocol mapping
DDU1222 Mar 11, 2026
44652fa
fix: use geminiApiHost for gemini protocol inference on aggregators
DDU1222 Mar 11, 2026
273a34e
fix: use geminiApiHost as base URL for google-generative-ai protocol
DDU1222 Mar 11, 2026
d20aaa2
fix: use provider.geminiApiHost instead of hardcoded URL in AihubmixA…
DDU1222 Mar 11, 2026
c32044b
fix: add migration 200 to populate geminiApiHost for aihubmix provider
DDU1222 Mar 11, 2026
1ec9aca
fix: update geminiApiHost to v1beta and add migration 201
DDU1222 Mar 11, 2026
0ff7095
chore: remove design document
DDU1222 Mar 11, 2026
4186142
chore: remove determineApiType test file
DDU1222 Mar 11, 2026
f456580
chore: revert test mock change in clientCompatibilityTypes
DDU1222 Mar 11, 2026
8ad017c
Merge branch 'CherryHQ:main' into main
DDU1222 Mar 11, 2026
d1ace75
Merge branch 'main' into fix/openclaw-determine-api-type
DDU1222 Mar 11, 2026
8804a5e
Merge branch 'CherryHQ:main' into main
DDU1222 Mar 15, 2026
c82beb0
merge main
DDU1222 Mar 15, 2026
b69c408
Update migrate.ts
DDU1222 Mar 15, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 104 additions & 46 deletions src/main/services/OpenClawService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,33 +79,119 @@ export interface OpenClawProviderConfig {
}

/**
* OpenClaw API types
* - 'openai-completions': For OpenAI-compatible chat completions API
* - 'anthropic-messages': For Anthropic Messages API format
* OpenClaw API protocol types.
* Add new protocol mappings here as OpenClaw adds support for them.
*/
const OPENCLAW_API_TYPES = {
OPENAI: 'openai-completions',
OPENAI_RESPONSE: 'openai-responses',
OPENAI_CODEX_RESPONSE: 'openai-codex-responses',
ANTHROPIC: 'anthropic-messages',
OPENAI_RESPOSNE: 'openai-responses'
GOOGLE: 'google-generative-ai',
COPILOT: 'github-copilot',
BEDROCK: 'bedrock-converse-stream',
OLLAMA: 'ollama'
} as const

/**
* Providers that always use Anthropic API format
* Mapping from Cherry Studio EndpointType to OpenClaw API protocol.
* Used when model has explicit endpoint_type metadata.
*/
const ANTHROPIC_ONLY_PROVIDERS: ProviderType[] = ['anthropic', 'vertex-anthropic']
const ENDPOINT_TO_OPENCLAW_API: Record<string, string> = {
anthropic: OPENCLAW_API_TYPES.ANTHROPIC,
openai: OPENCLAW_API_TYPES.OPENAI,
'openai-response': OPENCLAW_API_TYPES.OPENAI_RESPONSE,
gemini: OPENCLAW_API_TYPES.GOOGLE
}

/**
* Mapping from Cherry Studio provider type to OpenClaw API protocol.
* Used for providers that always use a specific protocol regardless of model.
*/
const PROVIDER_TYPE_TO_OPENCLAW_API: Partial<Record<ProviderType, string>> = {
anthropic: OPENCLAW_API_TYPES.ANTHROPIC,
'vertex-anthropic': OPENCLAW_API_TYPES.ANTHROPIC,
gemini: OPENCLAW_API_TYPES.GOOGLE,
ollama: OPENCLAW_API_TYPES.OLLAMA,
'aws-bedrock': OPENCLAW_API_TYPES.BEDROCK,
'openai-response': OPENCLAW_API_TYPES.OPENAI_RESPONSE
}

/**
* Mapping from Cherry Studio provider id to OpenClaw API protocol.
* Add provider-specific protocol overrides here as needed.
*/
const PROVIDER_ID_TO_OPENCLAW_API: Record<string, string> = {
copilot: OPENCLAW_API_TYPES.COPILOT
}

/**
* Get the base model name (last segment after '/') in lowercase.
* e.g. 'openrouter/anthropic/claude-opus-4.6' => 'claude-opus-4.6'
*/
function getModelBaseName(modelId: string): string {
const parts = modelId.split('/')
return (parts.pop() || modelId).toLowerCase()
}

/**
* Endpoint types that use Anthropic API format
* These are values from model.endpoint_type field
* Check if a model is an Anthropic model by its name.
*/
const ANTHROPIC_ENDPOINT_TYPES = ['anthropic']
function isAnthropicModel(modelId: string): boolean {
return getModelBaseName(modelId).startsWith('claude')
}

/**
* Check if a model should use Anthropic API based on endpoint_type
* Check if a model is a Gemini model by its name.
*/
function isAnthropicEndpointType(model: Model): boolean {
const endpointType = model.endpoint_type
return endpointType ? ANTHROPIC_ENDPOINT_TYPES.includes(endpointType) : false
function isGeminiModel(modelId: string): boolean {
return getModelBaseName(modelId).startsWith('gemini')
}

/**
* Determine the appropriate OpenClaw API protocol for the given provider and model.
*
* Priority order:
* 1. Model's explicit endpoint_type (model knows best — set by new-api, etc.)
* 2. Provider id (provider-specific protocol overrides)
* 3. Provider type (anthropic, vertex-anthropic, gemini, ollama, bedrock, openai-response)
* 4. Model name inference for multi-protocol aggregators
* (only when provider has a dedicated host for that protocol)
* 5. Default to openai-completions
*
* @internal Exported for testing only.
*/
export function determineApiType(
provider: { id: string; type: string; anthropicApiHost?: string; geminiApiHost?: string },
model: { id: string; endpoint_type?: string }
): string {
// 1. Model's explicit endpoint_type (highest priority — model declares its own protocol)
if (model.endpoint_type && ENDPOINT_TO_OPENCLAW_API[model.endpoint_type]) {
return ENDPOINT_TO_OPENCLAW_API[model.endpoint_type]
}

// 2. Provider id specific protocol
if (PROVIDER_ID_TO_OPENCLAW_API[provider.id]) {
return PROVIDER_ID_TO_OPENCLAW_API[provider.id]
}

// 3. Provider type specific protocol (anthropic, vertex-anthropic, gemini, etc.)
if (PROVIDER_TYPE_TO_OPENCLAW_API[provider.type as ProviderType]) {
return PROVIDER_TYPE_TO_OPENCLAW_API[provider.type as ProviderType]!
}

// 4. Infer protocol from model name for multi-protocol aggregators.
// Each vendor-specific host (anthropicApiHost, geminiApiHost) independently
// signals that the provider can route to that vendor's native API.
if (provider.anthropicApiHost && isAnthropicModel(model.id)) {
return OPENCLAW_API_TYPES.ANTHROPIC
}
if (provider.geminiApiHost && isGeminiModel(model.id)) {
return OPENCLAW_API_TYPES.GOOGLE
}

// 5. Default to OpenAI-compatible
return OPENCLAW_API_TYPES.OPENAI
}

/**
Expand Down Expand Up @@ -935,38 +1021,8 @@ class OpenClawService {
})
}

/**
* Determine the API type based on model and provider
* This supports mixed providers (cherryin, aihubmix, new-api, etc.) that have both OpenAI and Anthropic endpoints
*
* Priority order:
* 1. Provider type (anthropic, vertex-anthropic always use Anthropic API)
* 2. Model endpoint_type (explicit endpoint configuration)
* 3. Provider has anthropicApiHost configured
* 4. Default to OpenAI-compatible
*/
private determineApiType(provider: Provider, model: Model): string {
// 1. Check if provider type is always Anthropic
if (ANTHROPIC_ONLY_PROVIDERS.includes(provider.type)) {
return OPENCLAW_API_TYPES.ANTHROPIC
}

// 2. Check model's endpoint_type (used by new-api and other mixed providers)
if (isAnthropicEndpointType(model)) {
return OPENCLAW_API_TYPES.ANTHROPIC
}

// 3. Check if provider has anthropicApiHost configured
if (provider.anthropicApiHost) {
return OPENCLAW_API_TYPES.ANTHROPIC
}

if (provider.type === 'openai-response') {
return OPENCLAW_API_TYPES.OPENAI_RESPOSNE
}

// 4. Default to OpenAI-compatible
return OPENCLAW_API_TYPES.OPENAI
return determineApiType(provider, model)
}

/**
Expand All @@ -976,11 +1032,13 @@ class OpenClawService {
*/
private getBaseUrlForApiType(provider: Provider, apiType: string): string {
if (apiType === OPENCLAW_API_TYPES.ANTHROPIC) {
// For Anthropic API type, prefer anthropicApiHost if available
const host = provider.anthropicApiHost || provider.apiHost
return this.formatAnthropicUrl(host)
}
// For OpenAI-compatible API type
if (apiType === OPENCLAW_API_TYPES.GOOGLE && provider.geminiApiHost) {
return withoutTrailingSlash(provider.geminiApiHost)
}
// TODO: Add dedicated URL formatters for ollama, bedrock, copilot protocols
return this.formatOpenAIUrl(provider)
}

Expand Down
2 changes: 1 addition & 1 deletion src/main/services/WindowService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ export class WindowService {
'https://account.siliconflow.cn/oauth',
'https://cloud.siliconflow.cn/bills',
'https://cloud.siliconflow.cn/expensebill',
'https://console.aihubmix.com/token',
'https://console.aihubmix.com/sign-in',
'https://console.aihubmix.com/topup',
'https://console.aihubmix.com/statistics',
'https://dash.302.ai/sso/login',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,10 @@ export class AihubmixAPIClient extends MixedBaseAPIClient {

// 初始化各个client - 现在有类型安全
const claudeClient = new AnthropicAPIClient(providerExtraHeaders)
const geminiClient = new GeminiAPIClient({ ...providerExtraHeaders, apiHost: 'https://aihubmix.com/gemini' })
const geminiClient = new GeminiAPIClient({
...providerExtraHeaders,
apiHost: 'https://aihubmix.com/gemini'
})
const openaiClient = new OpenAIResponseAPIClient(providerExtraHeaders)
const defaultClient = new OpenAIAPIClient(providerExtraHeaders)

Expand Down
1 change: 1 addition & 0 deletions src/renderer/src/config/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ export const SYSTEM_PROVIDERS_CONFIG: Record<SystemProviderId, SystemProvider> =
apiKey: '',
apiHost: 'https://aihubmix.com',
anthropicApiHost: 'https://aihubmix.com',
geminiApiHost: 'https://aihubmix.com/gemini/v1beta',
models: SYSTEM_MODELS.aihubmix,
isSystem: true,
enabled: false
Expand Down
2 changes: 1 addition & 1 deletion src/renderer/src/store/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ const persistedReducer = persistReducer(
{
key: 'cherry-studio',
storage,
version: 199,
version: 201,
blacklist: ['runtime', 'messages', 'messageBlocks', 'tabs', 'toolPermissions'],
migrate
},
Expand Down
18 changes: 18 additions & 0 deletions src/renderer/src/store/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3255,6 +3255,24 @@ const migrateConfig = {
logger.error('migrate 199 error', error as Error)
return state
}
},
'200': (state: RootState) => {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这里两个版本可以合并一下

try {
updateProvider(state, 'aihubmix', { geminiApiHost: 'https://aihubmix.com/gemini/v1beta' })
return state
} catch (error) {
logger.error('migrate 200 error', error as Error)
return state
}
},
'201': (state: RootState) => {
try {
updateProvider(state, 'aihubmix', { geminiApiHost: 'https://aihubmix.com/gemini/v1beta' })
return state
} catch (error) {
logger.error('migrate 201 error', error as Error)
return state
}
}
}

Expand Down
1 change: 1 addition & 0 deletions src/renderer/src/types/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ export type Provider = {
apiKey: string
apiHost: string
anthropicApiHost?: string
geminiApiHost?: string
isAnthropicModel?: (m: Model) => boolean
apiVersion?: string
models: Model[]
Expand Down
2 changes: 1 addition & 1 deletion src/renderer/src/utils/oauth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export const oauthWithSiliconFlow = async (setKey) => {
}

export const oauthWithAihubmix = async (setKey) => {
const authUrl = ` https://console.aihubmix.com/token?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`
const authUrl = `https://console.aihubmix.com/sign-in?client_id=cherry_studio_oauth&lang=${getLanguageCode()}&aff=SJyh`

const popup = window.open(
authUrl,
Expand Down
Loading