Skip to content
Open
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/integrations/gateways/gitlawb-opengateway.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ export default defineGateway({
transportConfig: {
kind: 'openai-compatible',
openaiShim: {
headers: {
'Accept-Encoding': 'identity',
},
Comment on lines +18 to +20
defaultAuthHeader: {
name: 'api-key',
scheme: 'raw',
Expand Down
18 changes: 18 additions & 0 deletions src/services/api/errors.openaiCompatibility.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,24 @@ test('maps endpoint_not_found category markers to actionable setup guidance', ()
expect(text).toContain('/v1')
})

test('vision_not_supported shows image-specific guidance for remote host', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=vision_not_supported,host=opengateway.gitlawb.com] Hint: The provider returned 404 for a request containing images.',
new Headers(),
)

const message = getAssistantMessageFromError(error, 'mimo-v2.5-pro')
const text = getFirstText(message)

expect(message.isApiErrorMessage).toBe(true)
expect(text).toContain('images')
expect(text).toContain('mimo-v2.5-pro')
expect(text).toContain('opengateway.gitlawb.com')
expect(text).not.toContain('OPENAI_BASE_URL')
})

test('endpoint_not_found from a remote host shows the actual host, not Ollama (issue #926)', () => {
const error = APIError.generate(
404,
Expand Down
6 changes: 6 additions & 0 deletions src/services/api/errors.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,12 @@ function mapOpenAICompatibilityFailureToAssistantMessage(options: {
error: 'invalid_request',
})

case 'vision_not_supported':
return createAssistantAPIErrorMessage({
content: `The provider at ${options.host} returned 404 for a request containing images. The model (${options.model}) may not support image/vision inputs. Try removing images from your message, or ${switchCmd} to a vision-capable model.`,
error: 'invalid_request',
})

case 'model_not_found':
return createAssistantAPIErrorMessage({
content: `The selected model (${options.model}) is not available on this provider. Run ${switchCmd} to choose another model, or verify installed local models (for Ollama: ollama list).`,
Expand Down
12 changes: 12 additions & 0 deletions src/services/api/openaiErrorClassification.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,18 @@ test('classifies generic 404 responses as endpoint_not_found', () => {
expect(failure.hint).toContain('/v1')
})

test('classifies 404 with images as vision_not_supported', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
hasImages: true,
})

expect(failure.category).toBe('vision_not_supported')
expect(failure.retryable).toBe(false)
expect(failure.hint).toContain('image')
})

test('classifies context-overflow responses', () => {
const failure = classifyOpenAIHttpFailure({
status: 500,
Expand Down
15 changes: 15 additions & 0 deletions src/services/api/openaiErrorClassification.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export type OpenAICompatibilityFailureCategory =
| 'rate_limited'
| 'model_not_found'
| 'endpoint_not_found'
| 'vision_not_supported'
| 'context_overflow'
| 'tool_call_incompatible'
| 'malformed_provider_response'
Expand Down Expand Up @@ -38,6 +39,7 @@ const OPENAI_COMPATIBILITY_FAILURE_CATEGORIES: ReadonlySet<OpenAICompatibilityFa
'rate_limited',
'model_not_found',
'endpoint_not_found',
'vision_not_supported',
'context_overflow',
'tool_call_incompatible',
'malformed_provider_response',
Expand Down Expand Up @@ -264,6 +266,7 @@ export function classifyOpenAIHttpFailure(options: {
status: number
body: string
url?: string
hasImages?: boolean
}): OpenAICompatibilityFailure {
const body = options.body ?? ''
const hostname = options.url ? getHostname(options.url) : null
Expand Down Expand Up @@ -313,6 +316,18 @@ export function classifyOpenAIHttpFailure(options: {
}
}

if (options.status === 404 && options.hasImages) {
return {
source: 'http',
category: 'vision_not_supported',
retryable: false,
status: options.status,
message: body,
requestUrl: options.url,
hint: 'The provider returned 404 for a request containing images. The model may not support vision/image inputs.',
}
}

if (options.status === 404) {
const isRemote = hostname !== null && !isLocalHost
return {
Expand Down
83 changes: 83 additions & 0 deletions src/services/api/openaiShim.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -705,6 +705,89 @@ test('applies descriptor static headers before client and request headers', asyn
expect(capturedHeaders?.get('x-override-header')).toBe('from-request')
})

test('opengateway sends Accept-Encoding: identity header on chat requests', async () => {
let capturedHeaders: Headers | undefined

registerGateway({
id: 'gitlawb-opengateway-test',
label: 'Gitlawb Opengateway',
category: 'aggregating',
defaultBaseUrl: 'https://opengateway.gitlawb.com/v1/xiaomi-mimo',
defaultModel: 'mimo-v2.5-pro',
setup: {
requiresAuth: false,
authMode: 'none',
},
transportConfig: {
kind: 'openai-compatible',
openaiShim: {
headers: {
'Accept-Encoding': 'identity',
},
defaultAuthHeader: {
name: 'api-key',
scheme: 'raw',
},
preserveReasoningContent: true,
requireReasoningContentOnAssistantMessages: true,
reasoningContentFallback: '',
maxTokensField: 'max_completion_tokens',
supportsApiFormatSelection: false,
supportsAuthHeaders: false,
},
},
})

process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'https://opengateway.gitlawb.com/v1/xiaomi-mimo'
process.env.OPENAI_MODEL = 'mimo-v2.5-pro'

globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)

return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'mimo-v2.5-pro',
choices: [
{
message: {
role: 'assistant',
content: 'ok',
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 8,
completion_tokens: 3,
total_tokens: 11,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType

const client = createOpenAIShimClient({}) as OpenAIShimClient

await client.beta.messages.create(
{
model: 'mimo-v2.5-pro',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: false,
},
{},
)

expect(capturedHeaders?.get('Accept-Encoding')).toBe('identity')
Comment on lines +708 to +788
})

test('strips Anthropic-specific headers on GitHub Codex transport requests', async () => {
let capturedHeaders: Headers | undefined

Expand Down
22 changes: 22 additions & 0 deletions src/services/api/openaiShim.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2121,6 +2121,25 @@ class OpenAIShimMessages {
// Local backends do not implement prefix caching, so the deep key-sort
// is pure CPU overhead per request (issue #1016). Drop to the native
// `JSON.stringify` fast path when the fast-path config opts out.
const bodyContainsImages = (): boolean => {
if (request.transport === 'responses') {
const responsesBody = buildResponsesBody()
const input = responsesBody.input as Array<Record<string, unknown>> | undefined
if (!Array.isArray(input)) return false
return input.some(item => {
const content = item.content as Array<Record<string, unknown>> | undefined
return Array.isArray(content) && content.some(part => part.type === 'input_image')
})
}
const messages = body.messages as Array<Record<string, unknown>> | undefined
if (!Array.isArray(messages)) return false
return messages.some(msg => {
const content = msg.content
if (!Array.isArray(content)) return false
return content.some((part: Record<string, unknown>) => part.type === 'image_url')
})
}
Comment on lines +2115 to +2132

const serializeBody = (): string => {
const payload =
request.transport === 'responses' ? buildResponsesBody() : body
Expand Down Expand Up @@ -2198,6 +2217,7 @@ class OpenAIShimMessages {
status,
body: errorBody,
url: requestUrl,
hasImages: bodyContainsImages(),
})
const failureWithUrl = { ...failure, requestUrl: failure.requestUrl ?? requestUrl }
const redactedUrl = redactUrlForDiagnostics(requestUrl)
Expand Down Expand Up @@ -2325,6 +2345,7 @@ class OpenAIShimMessages {
const responsesFailure = classifyOpenAIHttpFailure({
status: responsesResponse.status,
body: responsesErrorBody,
hasImages: bodyContainsImages(),
})
let responsesErrorResponse: object | undefined
try { responsesErrorResponse = JSON.parse(responsesErrorBody) } catch { /* raw text */ }
Expand All @@ -2343,6 +2364,7 @@ class OpenAIShimMessages {
const failure = classifyOpenAIHttpFailure({
status: response.status,
body: errorBody,
hasImages: bodyContainsImages(),
})

if (
Expand Down
Loading