Skip to content

Commit 93d636f

Browse files
authored
fix(api-proxy): route OIDC agent traffic + async model fetch
Agent-Logs-Url: https://github.com/github/gh-aw-firewall/sessions/abb4f8bb-9660-48aa-b1d6-12d4501112be
1 parent 9bd38a8 commit 93d636f

5 files changed

Lines changed: 106 additions & 13 deletions

File tree

containers/api-proxy/providers/openai.js

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -109,13 +109,24 @@ function createOpenAIAdapter(env, deps = {}) {
109109
* Returns the model-list fetch config for /reflect model population, or null.
110110
* Uses the configured base path so prefixed OpenAI-compatible deployments
111111
* (e.g. Databricks, Azure) populate /reflect and models.json correctly.
112-
* OIDC auth is async so model fetching is skipped at startup — the model list
113-
* will be populated on the first authenticated request.
114112
*
115-
* @returns {{ url: string, opts: object, cacheKey: string }|null}
113+
* When OIDC is configured, this method is async — it acquires a fresh token so
114+
* that fetchStartupModels() can populate cachedModels.openai. This ensures that
115+
* AWF_MODEL_ALIASES entries targeting `openai/*` resolve correctly and that
116+
* /reflect and models.json show the available Azure OpenAI models.
117+
*
118+
* @returns {Promise<{ url: string, opts: object, cacheKey: string }|null>|{ url: string, opts: object, cacheKey: string }|null}
116119
*/
117-
getModelsFetchConfig() {
118-
if (oidcEnabled) return null; // token not yet available at startup
120+
async getModelsFetchConfig() {
121+
if (oidcEnabled) {
122+
const token = await oidcAuth.getToken();
123+
const modelsPath = basePath ? `${basePath}/models` : '/v1/models';
124+
return {
125+
url: `https://${rawTarget}${modelsPath}`,
126+
opts: { method: 'GET', headers: { 'Authorization': `Bearer ${token}` } },
127+
cacheKey: 'openai',
128+
};
129+
}
119130
if (!apiKey) return null;
120131
const modelsPath = basePath ? `${basePath}/models` : '/v1/models';
121132
return {

containers/api-proxy/server.js

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1108,12 +1108,14 @@ async function fetchStartupModels(adaptersOrOverrides = {}) {
11081108
const fetches = [];
11091109

11101110
for (const adapter of adapters) {
1111-
const config = adapter.getModelsFetchConfig?.();
1112-
if (!config) continue;
1113-
1111+
// getModelsFetchConfig() may return a plain config object or a Promise<config>
1112+
// (e.g. OIDC-backed adapters need to acquire a token asynchronously first).
11141113
fetches.push(
1115-
fetchJson(config.url, config.opts, TIMEOUT_MS).then((json) => {
1116-
cachedModels[config.cacheKey] = extractModelIds(json);
1114+
Promise.resolve(adapter.getModelsFetchConfig?.()).then((config) => {
1115+
if (!config) return;
1116+
return fetchJson(config.url, config.opts, TIMEOUT_MS).then((json) => {
1117+
cachedModels[config.cacheKey] = extractModelIds(json);
1118+
});
11171119
})
11181120
);
11191121
}

containers/api-proxy/server.test.js

Lines changed: 43 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1353,9 +1353,21 @@ describe('createOpenAIAdapter — OIDC auth', () => {
13531353
expect(probe.reason).toMatch(/OIDC/i);
13541354
});
13551355

1356-
it('getModelsFetchConfig() returns null when OIDC is enabled (deferred to runtime)', () => {
1357-
const adapter = createOpenAIAdapter({}, { oidcAuth: makeMockOidcManager({ enabled: true }) });
1358-
expect(adapter.getModelsFetchConfig()).toBeNull();
1356+
it('getModelsFetchConfig() returns async config with Bearer token when OIDC is enabled', async () => {
1357+
const adapter = createOpenAIAdapter(
1358+
{ OPENAI_API_TARGET: 'myaccount.openai.azure.com', OPENAI_API_BASE_PATH: '/openai/deployments/gpt-4' },
1359+
{ oidcAuth: makeMockOidcManager({ token: 'az-token-abc' }) }
1360+
);
1361+
const config = await adapter.getModelsFetchConfig();
1362+
expect(config).not.toBeNull();
1363+
expect(config.cacheKey).toBe('openai');
1364+
expect(config.url).toBe('https://myaccount.openai.azure.com/openai/deployments/gpt-4/models');
1365+
expect(config.opts.headers.Authorization).toBe('Bearer az-token-abc');
1366+
});
1367+
1368+
it('getModelsFetchConfig() propagates OIDC token errors', async () => {
1369+
const adapter = createOpenAIAdapter({}, { oidcAuth: makeMockOidcManager({ error: 'token expired' }) });
1370+
await expect(adapter.getModelsFetchConfig()).rejects.toThrow('token expired');
13591371
});
13601372

13611373
it('getReflectionInfo().configured is true when OIDC is enabled', () => {
@@ -1949,6 +1961,34 @@ describe('fetchStartupModels', () => {
19491961
const reflect = reflectEndpoints();
19501962
expect(reflect.models_fetch_complete).toBe(true);
19511963
});
1964+
1965+
it('adapter-based: supports async getModelsFetchConfig (e.g. OIDC)', async () => {
1966+
mockHttpsRequestWithBody(200, '{"data":[{"id":"gpt-4o"},{"id":"gpt-4o-mini"}]}');
1967+
1968+
// Adapter with async getModelsFetchConfig simulating OIDC token acquisition
1969+
const oidcAdapter = {
1970+
name: 'openai-oidc',
1971+
getModelsFetchConfig: async () => ({
1972+
url: 'https://myaccount.openai.azure.com/openai/deployments/gpt-4/models',
1973+
opts: { method: 'GET', headers: { 'Authorization': 'Bearer az-oidc-token' } },
1974+
cacheKey: 'openai',
1975+
}),
1976+
};
1977+
1978+
await fetchStartupModels([oidcAdapter]);
1979+
expect(cachedModels.openai).toEqual(['gpt-4o', 'gpt-4o-mini']);
1980+
});
1981+
1982+
it('adapter-based: handles null from async getModelsFetchConfig gracefully', async () => {
1983+
const spy = jest.spyOn(https, 'request');
1984+
const adapter = {
1985+
name: 'no-config',
1986+
getModelsFetchConfig: async () => null,
1987+
};
1988+
await fetchStartupModels([adapter]);
1989+
expect(spy).not.toHaveBeenCalled();
1990+
expect(cachedModels).toEqual({});
1991+
});
19521992
});
19531993

19541994
// ── reflectEndpoints ───────────────────────────────────────────────────────

src/compose-generator.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1420,6 +1420,15 @@ export function generateDockerCompose(
14201420
environment.OPENAI_API_KEY = 'sk-placeholder-for-api-proxy';
14211421
environment.CODEX_API_KEY = 'sk-placeholder-for-api-proxy';
14221422
logger.debug('OPENAI_API_KEY and CODEX_API_KEY set to placeholder values for credential isolation');
1423+
} else if (process.env.AWF_AUTH_TYPE === 'github-oidc') {
1424+
// OIDC-only mode (no static OPENAI_API_KEY): the api-proxy sidecar will acquire a
1425+
// short-lived Azure AD token via GitHub OIDC federation. We still need to point the
1426+
// agent at the proxy so it routes OpenAI traffic through the sidecar instead of
1427+
// connecting directly to the upstream host.
1428+
environment.OPENAI_BASE_URL = `http://${networkConfig.proxyIp}:${API_PROXY_PORTS.OPENAI}`;
1429+
environment.OPENAI_API_KEY = 'sk-placeholder-for-oidc-proxy';
1430+
environment.CODEX_API_KEY = 'sk-placeholder-for-oidc-proxy';
1431+
logger.debug(`OpenAI API will be proxied through sidecar (OIDC auth) at http://${networkConfig.proxyIp}:${API_PROXY_PORTS.OPENAI}`);
14231432
}
14241433
if (config.anthropicApiKey) {
14251434
environment.ANTHROPIC_BASE_URL = `http://${networkConfig.proxyIp}:${API_PROXY_PORTS.ANTHROPIC}`;

src/docker-manager-compose.test.ts

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2658,6 +2658,37 @@ describe('docker-manager generateDockerCompose', () => {
26582658
expect(env[key]).toBeUndefined();
26592659
}
26602660
});
2661+
2662+
it('should set OPENAI_BASE_URL and placeholder keys in agent when AWF_AUTH_TYPE=github-oidc (no static key)', () => {
2663+
process.env.AWF_AUTH_TYPE = 'github-oidc';
2664+
// No openaiApiKey in config — OIDC-only mode
2665+
const config = { ...mockConfig, enableApiProxy: true };
2666+
const result = generateDockerCompose(config, mockNetworkConfigWithProxy);
2667+
const agentEnv = result.services.agent.environment as Record<string, string>;
2668+
expect(agentEnv.OPENAI_BASE_URL).toBe('http://172.30.0.30:10000');
2669+
expect(agentEnv.OPENAI_API_KEY).toBe('sk-placeholder-for-oidc-proxy');
2670+
expect(agentEnv.CODEX_API_KEY).toBe('sk-placeholder-for-oidc-proxy');
2671+
});
2672+
2673+
it('should not set OPENAI_BASE_URL in agent when enableApiProxy is false even if AWF_AUTH_TYPE=github-oidc', () => {
2674+
process.env.AWF_AUTH_TYPE = 'github-oidc';
2675+
// api-proxy not enabled — OIDC env vars are irrelevant
2676+
const config = { ...mockConfig, enableApiProxy: false };
2677+
const result = generateDockerCompose(config, mockNetworkConfigWithProxy);
2678+
const agentEnv = result.services.agent.environment as Record<string, string>;
2679+
expect(agentEnv.OPENAI_BASE_URL).toBeUndefined();
2680+
});
2681+
2682+
it('should prefer static key placeholder over OIDC placeholder when both are configured', () => {
2683+
process.env.AWF_AUTH_TYPE = 'github-oidc';
2684+
// Static key takes precedence (existing behavior is preserved)
2685+
const config = { ...mockConfig, enableApiProxy: true, openaiApiKey: 'sk-static' };
2686+
const result = generateDockerCompose(config, mockNetworkConfigWithProxy);
2687+
const agentEnv = result.services.agent.environment as Record<string, string>;
2688+
expect(agentEnv.OPENAI_BASE_URL).toBe('http://172.30.0.30:10000');
2689+
expect(agentEnv.OPENAI_API_KEY).toBe('sk-placeholder-for-api-proxy');
2690+
expect(agentEnv.CODEX_API_KEY).toBe('sk-placeholder-for-api-proxy');
2691+
});
26612692
});
26622693

26632694
it('should set OPENAI_API_TARGET in api-proxy when openaiApiTarget is provided', () => {

0 commit comments

Comments
 (0)