-
Notifications
You must be signed in to change notification settings - Fork 7.5k
fix: route OpenAI Codex shortcuts to correct endpoint #566
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 6 commits
f45b904
d5a5237
8c1ba91
a0323f7
1825b6e
06f2c96
a566167
bfca356
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -5,7 +5,7 @@ | |
| * Addresses: https://github.com/Gitlawb/openclaude/issues/55 | ||
| */ | ||
|
|
||
| import { isLocalProviderUrl } from '../services/api/providerConfig.js' | ||
| import { isLocalProviderUrl, resolveProviderRequest } from '../services/api/providerConfig.js' | ||
| import { getLocalOpenAICompatibleProviderLabel } from '../utils/providerDiscovery.js' | ||
|
|
||
| declare const MACRO: { VERSION: string; DISPLAY_VERSION?: string } | ||
|
|
@@ -101,10 +101,17 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc | |
|
|
||
| if (useOpenAI) { | ||
| const rawModel = process.env.OPENAI_MODEL || 'gpt-4o' | ||
| const baseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1' | ||
| const resolvedRequest = resolveProviderRequest({ | ||
| model: rawModel, | ||
| baseUrl: process.env.OPENAI_BASE_URL, | ||
| }) | ||
| const baseUrl = resolvedRequest.baseUrl | ||
| const isLocal = isLocalProviderUrl(baseUrl) | ||
| let name = 'OpenAI' | ||
| if (/deepseek/i.test(baseUrl) || /deepseek/i.test(rawModel)) name = 'DeepSeek' | ||
| // Override to Codex when resolved endpoint is Codex | ||
| if (resolvedRequest.transport === 'codex_responses' || baseUrl.includes('chatgpt.com/backend-api/codex')) { | ||
| name = 'Codex' | ||
| } else if (/deepseek/i.test(baseUrl) || /deepseek/i.test(rawModel)) name = 'DeepSeek' | ||
|
Comment on lines
+120
to
+123
|
||
| else if (/openrouter/i.test(baseUrl)) name = 'OpenRouter' | ||
| else if (/together/i.test(baseUrl)) name = 'Together AI' | ||
| else if (/groq/i.test(baseUrl)) name = 'Groq' | ||
|
|
@@ -114,26 +121,9 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc | |
| else if (isLocal) name = getLocalOpenAICompatibleProviderLabel(baseUrl) | ||
|
|
||
| // Resolve model alias to actual model name + reasoning effort | ||
| let displayModel = rawModel | ||
| const codexAliases: Record<string, { model: string; reasoningEffort?: string }> = { | ||
| codexplan: { model: 'gpt-5.4', reasoningEffort: 'high' }, | ||
| 'gpt-5.4': { model: 'gpt-5.4', reasoningEffort: 'high' }, | ||
| 'gpt-5.3-codex': { model: 'gpt-5.3-codex', reasoningEffort: 'high' }, | ||
| 'gpt-5.3-codex-spark': { model: 'gpt-5.3-codex-spark' }, | ||
| codexspark: { model: 'gpt-5.3-codex-spark' }, | ||
| 'gpt-5.2-codex': { model: 'gpt-5.2-codex', reasoningEffort: 'high' }, | ||
| 'gpt-5.1-codex-max': { model: 'gpt-5.1-codex-max', reasoningEffort: 'high' }, | ||
| 'gpt-5.1-codex-mini': { model: 'gpt-5.1-codex-mini' }, | ||
| 'gpt-5.4-mini': { model: 'gpt-5.4-mini', reasoningEffort: 'medium' }, | ||
| 'gpt-5.2': { model: 'gpt-5.2', reasoningEffort: 'medium' }, | ||
| } | ||
| const alias = rawModel.toLowerCase() | ||
| if (alias in codexAliases) { | ||
| const resolved = codexAliases[alias] | ||
| displayModel = resolved.model | ||
| if (resolved.reasoningEffort) { | ||
| displayModel = `${displayModel} (${resolved.reasoningEffort})` | ||
| } | ||
| let displayModel = resolvedRequest.resolvedModel | ||
| if (resolvedRequest.reasoning?.effort) { | ||
| displayModel = `${displayModel} (${resolvedRequest.reasoning.effort})` | ||
| } | ||
|
|
||
| return { name, model: displayModel, baseUrl, isLocal } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -84,6 +84,18 @@ describe('Codex provider config', () => { | |
| expect(resolved.transport).toBe('codex_responses') | ||
| expect(resolved.resolvedModel).toBe('gpt-5.4') | ||
| expect(resolved.reasoning).toEqual({ effort: 'high' }) | ||
| expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex') | ||
| }) | ||
|
|
||
| test('resolves codexspark alias to Codex transport with Codex base URL', () => { | ||
| delete process.env.OPENAI_BASE_URL | ||
| delete process.env.OPENAI_API_BASE | ||
| delete process.env.CLAUDE_CODE_USE_GITHUB | ||
|
|
||
| const resolved = resolveProviderRequest({ model: 'codexspark' }) | ||
| expect(resolved.transport).toBe('codex_responses') | ||
| expect(resolved.resolvedModel).toBe('gpt-5.3-codex-spark') | ||
| expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex') | ||
| }) | ||
|
|
||
| test('does not force Codex transport when a local non-Codex base URL is explicit', () => { | ||
|
|
@@ -118,6 +130,37 @@ describe('Codex provider config', () => { | |
| expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex') | ||
| }) | ||
|
|
||
| test('default gpt-4o uses OpenAI base URL (no regression)', () => { | ||
| delete process.env.OPENAI_BASE_URL | ||
| delete process.env.CLAUDE_CODE_USE_GITHUB | ||
|
|
||
| const resolved = resolveProviderRequest({ model: 'gpt-4o' }) | ||
| expect(resolved.transport).toBe('chat_completions') | ||
| expect(resolved.baseUrl).toBe('https://api.openai.com/v1') | ||
| expect(resolved.resolvedModel).toBe('gpt-4o') | ||
| }) | ||
|
|
||
| test('resolves codexplan from env var OPENAI_MODEL to Codex endpoint', () => { | ||
| process.env.OPENAI_MODEL = 'codexplan' | ||
| delete process.env.OPENAI_BASE_URL | ||
| delete process.env.CLAUDE_CODE_USE_GITHUB | ||
|
|
||
| const resolved = resolveProviderRequest() | ||
| expect(resolved.transport).toBe('codex_responses') | ||
| expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex') | ||
| expect(resolved.resolvedModel).toBe('gpt-5.4') | ||
|
Comment on lines
+147
to
+155
|
||
| }) | ||
|
|
||
| test('does not override custom base URL for codexplan (e.g., local provider)', () => { | ||
| process.env.OPENAI_MODEL = 'codexplan' | ||
| process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1' | ||
| delete process.env.CLAUDE_CODE_USE_GITHUB | ||
|
|
||
| const resolved = resolveProviderRequest() | ||
| expect(resolved.transport).toBe('chat_completions') | ||
| expect(resolved.baseUrl).toBe('http://localhost:11434/v1') | ||
| }) | ||
|
Comment on lines
+147
to
+166
|
||
|
|
||
| test('loads Codex credentials from auth.json fallback', () => { | ||
| const authPath = createTempAuthJson({ | ||
| tokens: { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -59,6 +59,8 @@ const CODEX_ALIAS_MODELS: Record< | |
| type CodexAlias = keyof typeof CODEX_ALIAS_MODELS | ||
| type ReasoningEffort = 'low' | 'medium' | 'high' | 'xhigh' | ||
|
|
||
| const OPENAI_CODEX_SHORTCUT_ALIASES = new Set(['codexplan', 'codexspark']) | ||
|
|
||
| export type ProviderTransport = 'chat_completions' | 'codex_responses' | ||
|
|
||
| export type ResolvedProviderRequest = { | ||
|
|
@@ -219,6 +221,12 @@ export function isCodexAlias(model: string): boolean { | |
| return base in CODEX_ALIAS_MODELS | ||
| } | ||
|
|
||
| function isOpenAICodexShortcutAlias(model: string): boolean { | ||
| const normalized = model.trim().toLowerCase() | ||
| const base = normalized.split('?', 1)[0] ?? normalized | ||
| return OPENAI_CODEX_SHORTCUT_ALIASES.has(base) | ||
| } | ||
|
|
||
| export function shouldUseCodexTransport( | ||
| model: string, | ||
| baseUrl: string | undefined, | ||
|
|
@@ -363,11 +371,36 @@ export function resolveProviderRequest(options?: { | |
| options?.fallbackModel?.trim() || | ||
| (isGithubMode ? 'github:copilot' : 'gpt-4o') | ||
| const descriptor = parseModelDescriptor(requestedModel) | ||
| const rawBaseUrl = | ||
| asEnvUrl(options?.baseUrl) ?? | ||
| const explicitBaseUrl = asEnvUrl(options?.baseUrl) | ||
| const envBaseUrlRaw = | ||
| asEnvUrl(process.env.OPENAI_BASE_URL) ?? | ||
| asEnvUrl(process.env.OPENAI_API_BASE) | ||
|
|
||
| const isCodexModelForGithub = isGithubMode && isCodexAlias(requestedModel) | ||
| const envBaseUrl = | ||
| isCodexModelForGithub && envBaseUrlRaw && getGithubEndpointType(envBaseUrlRaw) === 'custom' | ||
| ? undefined | ||
| : envBaseUrlRaw | ||
|
|
||
| const rawBaseUrl = explicitBaseUrl ?? envBaseUrl | ||
|
|
||
| const shellModel = process.env.OPENAI_MODEL?.trim() ?? '' | ||
| const envIsCodexShortcut = isOpenAICodexShortcutAlias(shellModel) | ||
| const envResolvedCodexModel = envIsCodexShortcut | ||
| ? parseModelDescriptor(shellModel).baseModel | ||
| : null | ||
| const requestedMatchesEnvCodexShortcut = | ||
| Boolean(options?.model) && | ||
| Boolean(envResolvedCodexModel) && | ||
| descriptor.baseModel === envResolvedCodexModel | ||
| const isCodexAliasModel = | ||
| isOpenAICodexShortcutAlias(requestedModel) || requestedMatchesEnvCodexShortcut | ||
| const hasUserSetBaseUrl = rawBaseUrl && rawBaseUrl !== DEFAULT_OPENAI_BASE_URL | ||
| const finalBaseUrl = | ||
|
Comment on lines
+396
to
+408
|
||
| !isGithubMode && isCodexAliasModel && !hasUserSetBaseUrl | ||
| ? DEFAULT_CODEX_BASE_URL | ||
| : rawBaseUrl | ||
|
Comment on lines
+408
to
+411
|
||
|
|
||
| const githubEndpointType = isGithubMode | ||
| ? getGithubEndpointType(rawBaseUrl) | ||
| : 'custom' | ||
|
|
@@ -380,7 +413,7 @@ export function resolveProviderRequest(options?: { | |
| : requestedModel | ||
|
|
||
| const transport: ProviderTransport = | ||
| shouldUseCodexTransport(requestedModel, rawBaseUrl) || | ||
| shouldUseCodexTransport(requestedModel, finalBaseUrl) || | ||
| (isGithubCopilot && shouldUseGithubResponsesApi(githubResolvedModel)) | ||
| ? 'codex_responses' | ||
| : 'chat_completions' | ||
|
|
@@ -404,7 +437,7 @@ export function resolveProviderRequest(options?: { | |
| requestedModel, | ||
| resolvedModel, | ||
| baseUrl: | ||
| (rawBaseUrl ?? | ||
| (finalBaseUrl ?? | ||
| (isGithubCopilot && transport === 'codex_responses' | ||
| ? GITHUB_COPILOT_BASE_URL | ||
| : (isGithubMode | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The provider label is set to "Codex" when
resolvedRequest.transport === 'codex_responses', butcodex_responsescan be selected for reasons other than hitting the Codex endpoint (e.g., Responses API on the OpenAI base URL). If the intent is “label as Codex only when using the Codex endpoint”, prefer checkingisCodexBaseUrl(resolvedRequest.baseUrl)(and avoid hard-coded.includes('chatgpt.com/backend-api/codex')).