fix: route OpenAI Codex shortcuts to correct endpoint (#566)

* feat: enhance codex provider resolution with shortcut aliases and improved base URL handling

* fix: enhance codex alias resolution to include shell model

* feat: enhance Codex provider resolution to support new aliases and base URL handling

* fix: update base URL resolution logic for Codex models in GitHub mode

* fix: update provider transport logic to enforce Codex responses and adjust base URL handling

* fix: update provider request resolution to respect custom base URLs and adjust transport logic

* fix: restore OPENAI_MODEL environment variable handling in tests and provider config
This commit is contained in:
Meetpatel006
2026-04-13 16:01:15 +05:30
committed by GitHub
parent 64298a663f
commit 7c8bdcc3e2
4 changed files with 102 additions and 29 deletions

View File

@@ -18,6 +18,7 @@ const originalEnv = {
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
OPENAI_API_BASE: process.env.OPENAI_API_BASE,
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
OPENAI_MODEL: process.env.OPENAI_MODEL,
}
afterEach(() => {
@@ -30,6 +31,9 @@ afterEach(() => {
if (originalEnv.CLAUDE_CODE_USE_GITHUB === undefined) delete process.env.CLAUDE_CODE_USE_GITHUB
else process.env.CLAUDE_CODE_USE_GITHUB = originalEnv.CLAUDE_CODE_USE_GITHUB
if (originalEnv.OPENAI_MODEL === undefined) delete process.env.OPENAI_MODEL
else process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
while (tempDirs.length > 0) {
const dir = tempDirs.pop()
if (dir) rmSync(dir, { recursive: true, force: true })
@@ -84,6 +88,18 @@ describe('Codex provider config', () => {
expect(resolved.transport).toBe('codex_responses')
expect(resolved.resolvedModel).toBe('gpt-5.4')
expect(resolved.reasoning).toEqual({ effort: 'high' })
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})
test('resolves codexspark alias to Codex transport with Codex base URL', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_BASE
delete process.env.CLAUDE_CODE_USE_GITHUB
const resolved = resolveProviderRequest({ model: 'codexspark' })
expect(resolved.transport).toBe('codex_responses')
expect(resolved.resolvedModel).toBe('gpt-5.3-codex-spark')
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})
test('does not force Codex transport when a local non-Codex base URL is explicit', () => {
@@ -118,6 +134,37 @@ describe('Codex provider config', () => {
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
})
test('default gpt-4o uses OpenAI base URL (no regression)', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.CLAUDE_CODE_USE_GITHUB
const resolved = resolveProviderRequest({ model: 'gpt-4o' })
expect(resolved.transport).toBe('chat_completions')
expect(resolved.baseUrl).toBe('https://api.openai.com/v1')
expect(resolved.resolvedModel).toBe('gpt-4o')
})
test('resolves codexplan from env var OPENAI_MODEL to Codex endpoint', () => {
process.env.OPENAI_MODEL = 'codexplan'
delete process.env.OPENAI_BASE_URL
delete process.env.CLAUDE_CODE_USE_GITHUB
const resolved = resolveProviderRequest()
expect(resolved.transport).toBe('codex_responses')
expect(resolved.baseUrl).toBe('https://chatgpt.com/backend-api/codex')
expect(resolved.resolvedModel).toBe('gpt-5.4')
})
test('does not override custom base URL for codexplan (e.g., local provider)', () => {
process.env.OPENAI_MODEL = 'codexplan'
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
delete process.env.CLAUDE_CODE_USE_GITHUB
const resolved = resolveProviderRequest()
expect(resolved.transport).toBe('chat_completions')
expect(resolved.baseUrl).toBe('http://localhost:11434/v1')
})
test('loads Codex credentials from auth.json fallback', () => {
const authPath = createTempAuthJson({
tokens: {

View File

@@ -60,6 +60,8 @@ const CODEX_ALIAS_MODELS: Record<
type CodexAlias = keyof typeof CODEX_ALIAS_MODELS
type ReasoningEffort = 'low' | 'medium' | 'high' | 'xhigh'
const OPENAI_CODEX_SHORTCUT_ALIASES = new Set(['codexplan', 'codexspark'])
export type ProviderTransport = 'chat_completions' | 'codex_responses'
export type ResolvedProviderRequest = {
@@ -220,6 +222,12 @@ export function isCodexAlias(model: string): boolean {
return base in CODEX_ALIAS_MODELS
}
function isOpenAICodexShortcutAlias(model: string): boolean {
const normalized = model.trim().toLowerCase()
const base = normalized.split('?', 1)[0] ?? normalized
return OPENAI_CODEX_SHORTCUT_ALIASES.has(base)
}
export function shouldUseCodexTransport(
model: string,
baseUrl: string | undefined,
@@ -367,13 +375,41 @@ export function resolveProviderRequest(options?: {
options?.fallbackModel?.trim() ||
(isGithubMode ? 'github:copilot' : 'gpt-4o')
const descriptor = parseModelDescriptor(requestedModel)
const rawBaseUrl =
asEnvUrl(options?.baseUrl) ??
const explicitBaseUrl = asEnvUrl(options?.baseUrl)
const envBaseUrlRaw =
explicitBaseUrl ??
asEnvUrl(
isMistralMode ? (process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL) : process.env.OPENAI_BASE_URL,
isMistralMode
? (process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL)
: process.env.OPENAI_BASE_URL
) ??
asEnvUrl(process.env.OPENAI_API_BASE)
const isCodexModelForGithub = isGithubMode && isCodexAlias(requestedModel)
const envBaseUrl =
isCodexModelForGithub && envBaseUrlRaw && getGithubEndpointType(envBaseUrlRaw) === 'custom'
? undefined
: envBaseUrlRaw
const rawBaseUrl = explicitBaseUrl ?? envBaseUrl
const shellModel = process.env.OPENAI_MODEL?.trim() ?? ''
const envIsCodexShortcut = isOpenAICodexShortcutAlias(shellModel)
const envResolvedCodexModel = envIsCodexShortcut
? parseModelDescriptor(shellModel).baseModel
: null
const requestedMatchesEnvCodexShortcut =
Boolean(options?.model) &&
Boolean(envResolvedCodexModel) &&
descriptor.baseModel === envResolvedCodexModel
const isCodexAliasModel =
isOpenAICodexShortcutAlias(requestedModel) || requestedMatchesEnvCodexShortcut
const hasUserSetBaseUrl = rawBaseUrl && rawBaseUrl !== DEFAULT_OPENAI_BASE_URL
const finalBaseUrl =
!isGithubMode && isCodexAliasModel && !hasUserSetBaseUrl
? DEFAULT_CODEX_BASE_URL
: rawBaseUrl
const githubEndpointType = isGithubMode
? getGithubEndpointType(rawBaseUrl)
: 'custom'
@@ -386,7 +422,7 @@ export function resolveProviderRequest(options?: {
: requestedModel
const transport: ProviderTransport =
shouldUseCodexTransport(requestedModel, rawBaseUrl) ||
shouldUseCodexTransport(requestedModel, finalBaseUrl) ||
(isGithubCopilot && shouldUseGithubResponsesApi(githubResolvedModel))
? 'codex_responses'
: 'chat_completions'
@@ -410,7 +446,7 @@ export function resolveProviderRequest(options?: {
requestedModel,
resolvedModel,
baseUrl:
(rawBaseUrl ??
(finalBaseUrl ??
(isGithubCopilot && transport === 'codex_responses'
? GITHUB_COPILOT_BASE_URL
: (isGithubMode