Skip to content
2 changes: 1 addition & 1 deletion src/commands/provider/provider.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ test('buildCurrentProviderSummary labels generic local openai-compatible provide
expect(summary.endpointLabel).toBe('http://127.0.0.1:8080/v1')
})

test('buildCurrentProviderSummary does not relabel local gpt-5.4 providers as Codex', () => {
test('buildCurrentProviderSummary does not relabel local gpt-5.4 providers as Codex when custom base URL is set', () => {
const summary = buildCurrentProviderSummary({
processEnv: {
CLAUDE_CODE_USE_OPENAI: '1',
Expand Down
36 changes: 13 additions & 23 deletions src/components/StartupScreen.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
* Addresses: https://github.com/Gitlawb/openclaude/issues/55
*/

import { isLocalProviderUrl } from '../services/api/providerConfig.js'
import { isLocalProviderUrl, resolveProviderRequest } from '../services/api/providerConfig.js'
import { getLocalOpenAICompatibleProviderLabel } from '../utils/providerDiscovery.js'

declare const MACRO: { VERSION: string; DISPLAY_VERSION?: string }
Expand Down Expand Up @@ -101,10 +101,17 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc

if (useOpenAI) {
const rawModel = process.env.OPENAI_MODEL || 'gpt-4o'
const baseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1'
const resolvedRequest = resolveProviderRequest({
model: rawModel,
baseUrl: process.env.OPENAI_BASE_URL,
})
const baseUrl = resolvedRequest.baseUrl
const isLocal = isLocalProviderUrl(baseUrl)
let name = 'OpenAI'
if (/deepseek/i.test(baseUrl) || /deepseek/i.test(rawModel)) name = 'DeepSeek'
// Override to Codex when resolved endpoint is Codex
if (resolvedRequest.transport === 'codex_responses' || baseUrl.includes('chatgpt.com/backend-api/codex')) {
name = 'Codex'
Comment on lines +120 to +122
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The provider label is set to "Codex" when resolvedRequest.transport === 'codex_responses', but codex_responses can be selected for reasons other than hitting the Codex endpoint (e.g., Responses API on the OpenAI base URL). If the intent is “label as Codex only when using the Codex endpoint”, prefer checking isCodexBaseUrl(resolvedRequest.baseUrl) (and avoid hard-coded .includes('chatgpt.com/backend-api/codex')).

Copilot uses AI. Check for mistakes.
} else if (/deepseek/i.test(baseUrl) || /deepseek/i.test(rawModel)) name = 'DeepSeek'
Comment on lines +120 to +123
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Codex relabeling condition uses resolvedRequest.transport === 'codex_responses', but codex_responses is also used for non-Codex endpoints (e.g., OpenAI /responses on https://api.openai.com/v1). This can label the provider as “Codex” even when the resolved endpoint is not the Codex backend, which contradicts the comment “Override to Codex when resolved endpoint is Codex”. Prefer checking the resolved base URL via the existing isCodexBaseUrl(resolvedRequest.baseUrl) (instead of transport and includes(...)) to avoid incorrect labeling and duplicated URL matching logic.

Copilot uses AI. Check for mistakes.
else if (/openrouter/i.test(baseUrl)) name = 'OpenRouter'
else if (/together/i.test(baseUrl)) name = 'Together AI'
else if (/groq/i.test(baseUrl)) name = 'Groq'
Expand All @@ -114,26 +121,9 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
else if (isLocal) name = getLocalOpenAICompatibleProviderLabel(baseUrl)

// Resolve model alias to actual model name + reasoning effort
let displayModel = rawModel
const codexAliases: Record<string, { model: string; reasoningEffort?: string }> = {
codexplan: { model: 'gpt-5.4', reasoningEffort: 'high' },
'gpt-5.4': { model: 'gpt-5.4', reasoningEffort: 'high' },
'gpt-5.3-codex': { model: 'gpt-5.3-codex', reasoningEffort: 'high' },
'gpt-5.3-codex-spark': { model: 'gpt-5.3-codex-spark' },
codexspark: { model: 'gpt-5.3-codex-spark' },
'gpt-5.2-codex': { model: 'gpt-5.2-codex', reasoningEffort: 'high' },
'gpt-5.1-codex-max': { model: 'gpt-5.1-codex-max', reasoningEffort: 'high' },
'gpt-5.1-codex-mini': { model: 'gpt-5.1-codex-mini' },
'gpt-5.4-mini': { model: 'gpt-5.4-mini', reasoningEffort: 'medium' },
'gpt-5.2': { model: 'gpt-5.2', reasoningEffort: 'medium' },
}
const alias = rawModel.toLowerCase()
if (alias in codexAliases) {
const resolved = codexAliases[alias]
displayModel = resolved.model
if (resolved.reasoningEffort) {
displayModel = `${displayModel} (${resolved.reasoningEffort})`
}
let displayModel = resolvedRequest.resolvedModel
if (resolvedRequest.reasoning?.effort) {
displayModel = `${displayModel} (${resolvedRequest.reasoning.effort})`
}

return { name, model: displayModel, baseUrl, isLocal }
Expand Down
43 changes: 43 additions & 0 deletions src/services/api/codexShim.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,18 @@ describe('Codex provider config', () => {
expect(resolved.transport).toBe('codex_responses')
expect(resolved.resolvedModel).toBe('gpt-5.4')
expect(resolved.reasoning).toEqual({ effort: 'high' })
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})

test('resolves codexspark alias to Codex transport with Codex base URL', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_BASE
delete process.env.CLAUDE_CODE_USE_GITHUB

const resolved = resolveProviderRequest({ model: 'codexspark' })
expect(resolved.transport).toBe('codex_responses')
expect(resolved.resolvedModel).toBe('gpt-5.3-codex-spark')
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})

test('does not force Codex transport when a local non-Codex base URL is explicit', () => {
Expand Down Expand Up @@ -118,6 +130,37 @@ describe('Codex provider config', () => {
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})

test('default gpt-4o uses OpenAI base URL (no regression)', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.CLAUDE_CODE_USE_GITHUB

const resolved = resolveProviderRequest({ model: 'gpt-4o' })
expect(resolved.transport).toBe('chat_completions')
expect(resolved.baseUrl).toBe('https://api.openai.com/v1')
expect(resolved.resolvedModel).toBe('gpt-4o')
})

test('resolves codexplan from env var OPENAI_MODEL to Codex endpoint', () => {
process.env.OPENAI_MODEL = 'codexplan'
delete process.env.OPENAI_BASE_URL
delete process.env.CLAUDE_CODE_USE_GITHUB

const resolved = resolveProviderRequest()
expect(resolved.transport).toBe('codex_responses')
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
expect(resolved.resolvedModel).toBe('gpt-5.4')
Comment on lines +147 to +155
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These tests mutate process.env.OPENAI_MODEL but the suite’s env reset logic at the top of the file does not restore it. This can leak OPENAI_MODEL=codexplan into later tests (including other describes in this file) and cause order-dependent failures. Capture the original OPENAI_MODEL and restore/delete it in the existing afterEach cleanup (or add a local beforeEach/afterEach in this describe).

Copilot uses AI. Check for mistakes.
})

test('does not override custom base URL for codexplan (e.g., local provider)', () => {
process.env.OPENAI_MODEL = 'codexplan'
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
delete process.env.CLAUDE_CODE_USE_GITHUB

const resolved = resolveProviderRequest()
expect(resolved.transport).toBe('chat_completions')
expect(resolved.baseUrl).toBe('http://localhost:11434/v1')
})
Comment on lines +147 to +166
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These tests set process.env.OPENAI_MODEL but the suite doesn’t restore it in afterEach (only OPENAI_BASE_URL, OPENAI_API_BASE, and CLAUDE_CODE_USE_GITHUB are restored). This can leak state into later tests (including other files, depending on the runner). Capture and restore OPENAI_MODEL similarly to the other env vars or delete it after the test.

Copilot uses AI. Check for mistakes.

test('loads Codex credentials from auth.json fallback', () => {
const authPath = createTempAuthJson({
tokens: {
Expand Down
41 changes: 37 additions & 4 deletions src/services/api/providerConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ const CODEX_ALIAS_MODELS: Record<
type CodexAlias = keyof typeof CODEX_ALIAS_MODELS
type ReasoningEffort = 'low' | 'medium' | 'high' | 'xhigh'

const OPENAI_CODEX_SHORTCUT_ALIASES = new Set(['codexplan', 'codexspark'])

export type ProviderTransport = 'chat_completions' | 'codex_responses'

export type ResolvedProviderRequest = {
Expand Down Expand Up @@ -219,6 +221,12 @@ export function isCodexAlias(model: string): boolean {
return base in CODEX_ALIAS_MODELS
}

function isOpenAICodexShortcutAlias(model: string): boolean {
const normalized = model.trim().toLowerCase()
const base = normalized.split('?', 1)[0] ?? normalized
return OPENAI_CODEX_SHORTCUT_ALIASES.has(base)
}

export function shouldUseCodexTransport(
model: string,
baseUrl: string | undefined,
Expand Down Expand Up @@ -363,11 +371,36 @@ export function resolveProviderRequest(options?: {
options?.fallbackModel?.trim() ||
(isGithubMode ? 'github:copilot' : 'gpt-4o')
const descriptor = parseModelDescriptor(requestedModel)
const rawBaseUrl =
asEnvUrl(options?.baseUrl) ??
const explicitBaseUrl = asEnvUrl(options?.baseUrl)
const envBaseUrlRaw =
asEnvUrl(process.env.OPENAI_BASE_URL) ??
asEnvUrl(process.env.OPENAI_API_BASE)

const isCodexModelForGithub = isGithubMode && isCodexAlias(requestedModel)
const envBaseUrl =
isCodexModelForGithub && envBaseUrlRaw && getGithubEndpointType(envBaseUrlRaw) === 'custom'
? undefined
: envBaseUrlRaw

const rawBaseUrl = explicitBaseUrl ?? envBaseUrl

const shellModel = process.env.OPENAI_MODEL?.trim() ?? ''
const envIsCodexShortcut = isOpenAICodexShortcutAlias(shellModel)
const envResolvedCodexModel = envIsCodexShortcut
? parseModelDescriptor(shellModel).baseModel
: null
const requestedMatchesEnvCodexShortcut =
Boolean(options?.model) &&
Boolean(envResolvedCodexModel) &&
descriptor.baseModel === envResolvedCodexModel
const isCodexAliasModel =
isOpenAICodexShortcutAlias(requestedModel) || requestedMatchesEnvCodexShortcut
const hasUserSetBaseUrl = rawBaseUrl && rawBaseUrl !== DEFAULT_OPENAI_BASE_URL
const finalBaseUrl =
Comment on lines +396 to +408
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

isCodexAliasModel currently considers process.env.OPENAI_MODEL (shellModel) even when the caller passes an explicit options.model. This can incorrectly force finalBaseUrl to DEFAULT_CODEX_BASE_URL for non-Codex requests (e.g., resolveProviderRequest({ model: 'gpt-4o' }) while the environment has OPENAI_MODEL=codexplan), which would misroute traffic to the Codex endpoint. Consider basing the Codex-shortcut check only on requestedModel, or only consulting shellModel when options?.model is not provided and requestedModel came from the env var.

Copilot uses AI. Check for mistakes.
!isGithubMode && isCodexAliasModel && !hasUserSetBaseUrl
? DEFAULT_CODEX_BASE_URL
: rawBaseUrl
Comment on lines +408 to +411
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The finalBaseUrl override for Codex shortcuts is currently gated on !explicitBaseUrl (i.e., whether the caller passed options.baseUrl), not on whether the user configured a non-Codex endpoint. This makes behavior inconsistent across call sites (e.g., openaiShim calls resolveProviderRequest without baseUrl, so a locally configured OPENAI_BASE_URL=http://127.0.0.1:8080/v1 would be ignored for codexplan/codexspark and silently switched to the Codex endpoint). Consider basing the override on rawBaseUrl's value (e.g., only override when no base URL is set / it’s empty/"undefined" / it’s the official OpenAI v1 base URL), rather than on whether it was passed via options.

Copilot uses AI. Check for mistakes.
Comment on lines +407 to +411
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hasUserSetBaseUrl compares rawBaseUrl to DEFAULT_OPENAI_BASE_URL without normalizing. If a user sets OPENAI_BASE_URL to an equivalent value like https://api.openai.com/v1/ (trailing slash) or different casing, this will be treated as “custom”, preventing the Codex shortcut override and potentially flipping transport back to chat_completions. Consider normalizing before comparison (e.g., trimming trailing slashes or parsing with new URL).

Copilot uses AI. Check for mistakes.
Comment on lines +405 to +411
Copy link

Copilot AI Apr 10, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hasUserSetBaseUrl treats rawBaseUrl === DEFAULT_OPENAI_BASE_URL as “not user set”, so an explicit OPENAI_BASE_URL=https://api.openai.com/v1 (or options.baseUrl) will still be overridden to the Codex endpoint for shortcut aliases. This also creates inconsistent behavior where a trailing slash (e.g. .../v1/) avoids the override. Consider treating any explicitly provided base URL (options/env) as user-set, or normalize/compare URLs rather than raw strings, and only apply the Codex default when no base URL was provided at all.

Copilot uses AI. Check for mistakes.
Comment on lines +406 to +411
Copy link

Copilot AI Apr 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

resolveProviderRequest can override an explicitly provided baseUrl when it equals DEFAULT_OPENAI_BASE_URL. Example: calling resolveProviderRequest({ model: 'codexplan', baseUrl: DEFAULT_OPENAI_BASE_URL }) will currently force finalBaseUrl to DEFAULT_CODEX_BASE_URL because hasUserSetBaseUrl is false for the default URL. This breaks the expected precedence where an explicit option should always win. Consider treating explicitBaseUrl presence as “user-set” (even if it equals the default) and/or adding a regression test for this case.

Copilot uses AI. Check for mistakes.

const githubEndpointType = isGithubMode
? getGithubEndpointType(rawBaseUrl)
: 'custom'
Expand All @@ -380,7 +413,7 @@ export function resolveProviderRequest(options?: {
: requestedModel

const transport: ProviderTransport =
shouldUseCodexTransport(requestedModel, rawBaseUrl) ||
shouldUseCodexTransport(requestedModel, finalBaseUrl) ||
(isGithubCopilot && shouldUseGithubResponsesApi(githubResolvedModel))
? 'codex_responses'
: 'chat_completions'
Expand All @@ -404,7 +437,7 @@ export function resolveProviderRequest(options?: {
requestedModel,
resolvedModel,
baseUrl:
(rawBaseUrl ??
(finalBaseUrl ??
(isGithubCopilot && transport === 'codex_responses'
? GITHUB_COPILOT_BASE_URL
: (isGithubMode
Expand Down
Loading