Feature: Add local OpenAI-compatible model discovery to /model (#201)

* Add local OpenAI-compatible model discovery to /model

* Guard local OpenAI model discovery from Codex routing

* Preserve remote OpenAI Codex alias behavior
This commit is contained in:
Technomancer702
2026-04-05 15:46:06 -07:00
committed by GitHub
parent 60d3d8961a
commit c534aa5771
15 changed files with 539 additions and 39 deletions

View File

@@ -1,5 +1,6 @@
// biome-ignore-all assist/source/organizeImports: internal-only import markers must not be reordered
import { getInitialMainLoopModel } from '../../bootstrap/state.js'
import { getAdditionalModelOptionsCacheScope } from '../../services/api/providerConfig.js'
import {
isClaudeAISubscriber,
isMaxSubscriber,
@@ -44,6 +45,25 @@ export type ModelOption = {
descriptionForModel?: string
}
function getScopedAdditionalModelOptions(): ModelOption[] {
const config = getGlobalConfig()
const activeScope = getAdditionalModelOptionsCacheScope()
if (!activeScope) {
return []
}
if (config.additionalModelOptionsCacheScope !== undefined) {
return config.additionalModelOptionsCacheScope === activeScope
? (config.additionalModelOptionsCache ?? [])
: []
}
return activeScope === 'firstParty'
? (config.additionalModelOptionsCache ?? [])
: []
}
export function getDefaultOptionForUser(fastMode = false): ModelOption {
if (process.env.USER_TYPE === 'ant') {
const currentModel = renderDefaultModelSetting(
@@ -408,6 +428,16 @@ function getModelOptionsBase(fastMode = false): ModelOption[] {
return standardOptions
}
if (getAdditionalModelOptionsCacheScope()?.startsWith('openai:')) {
const activeOpenAIOptions = getActiveOpenAIModelOptionsCache()
return [
getDefaultOptionForUser(fastMode),
...(activeOpenAIOptions.length > 0
? activeOpenAIOptions
: getScopedAdditionalModelOptions()),
]
}
// PAYG 1P API: Default (Sonnet) + Sonnet 1M + Opus 4.6 + Opus 1M + Haiku
if (getAPIProvider() === 'firstParty') {
const payg1POptions = [getDefaultOptionForUser(fastMode)]
@@ -566,13 +596,8 @@ export function getModelOptions(fastMode = false): ModelOption[] {
})
}
const additionalOptions =
getAPIProvider() === 'openai'
? getActiveOpenAIModelOptionsCache()
: getGlobalConfig().additionalModelOptionsCache ?? []
// Append additional model options fetched during bootstrap/endpoints.
for (const opt of additionalOptions) {
// Append additional model options fetched during bootstrap
for (const opt of getScopedAdditionalModelOptions()) {
if (!options.some(existing => existing.value === opt.value)) {
options.push(opt)
}

View File

@@ -7,6 +7,9 @@ const originalEnv = {
CLAUDE_CODE_USE_BEDROCK: process.env.CLAUDE_CODE_USE_BEDROCK,
CLAUDE_CODE_USE_VERTEX: process.env.CLAUDE_CODE_USE_VERTEX,
CLAUDE_CODE_USE_FOUNDRY: process.env.CLAUDE_CODE_USE_FOUNDRY,
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
OPENAI_API_BASE: process.env.OPENAI_API_BASE,
OPENAI_MODEL: process.env.OPENAI_MODEL,
}
afterEach(() => {
@@ -16,6 +19,9 @@ afterEach(() => {
process.env.CLAUDE_CODE_USE_BEDROCK = originalEnv.CLAUDE_CODE_USE_BEDROCK
process.env.CLAUDE_CODE_USE_VERTEX = originalEnv.CLAUDE_CODE_USE_VERTEX
process.env.CLAUDE_CODE_USE_FOUNDRY = originalEnv.CLAUDE_CODE_USE_FOUNDRY
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
process.env.OPENAI_API_BASE = originalEnv.OPENAI_API_BASE
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
})
async function importFreshProvidersModule() {
@@ -29,6 +35,9 @@ function clearProviderEnv(): void {
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_BASE
delete process.env.OPENAI_MODEL
}
test('first-party provider keeps Anthropic account setup flow enabled', () => {
@@ -69,3 +78,29 @@ test('GEMINI takes precedence over GitHub when both are set', async () => {
expect(getAPIProvider()).toBe('gemini')
})
test('explicit local openai-compatible base URLs stay on the openai provider', () => {
clearProviderEnv()
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'http://127.0.0.1:8080/v1'
process.env.OPENAI_MODEL = 'gpt-5.4'
expect(getAPIProvider()).toBe('openai')
})
test('codex aliases still resolve to the codex provider without a non-codex base URL', () => {
clearProviderEnv()
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_MODEL = 'codexplan'
expect(getAPIProvider()).toBe('codex')
})
test('official OpenAI base URLs now keep provider detection on openai for aliases', () => {
clearProviderEnv()
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'https://api.openai.com/v1'
process.env.OPENAI_MODEL = 'gpt-5.4'
expect(getAPIProvider()).toBe('openai')
})

View File

@@ -1,5 +1,5 @@
import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from '../../services/analytics/index.js'
import { isCodexAlias } from '../../services/api/providerConfig.js'
import { shouldUseCodexTransport } from '../../services/api/providerConfig.js'
import { isEnvTruthy } from '../envUtils.js'
export type APIProvider =
@@ -34,11 +34,10 @@ export function usesAnthropicAccountFlow(): boolean {
return getAPIProvider() === 'firstParty'
}
function isCodexModel(): boolean {
const model = (process.env.OPENAI_MODEL || '').trim()
if (!model) return false
// Delegate to the canonical alias table in providerConfig to keep
// the two Codex detection systems (provider type + transport) in sync.
return isCodexAlias(model)
return shouldUseCodexTransport(
process.env.OPENAI_MODEL || '',
process.env.OPENAI_BASE_URL ?? process.env.OPENAI_API_BASE,
)
}
export function getAPIProviderForStatsig(): AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS {