From e346b8d5ec2d58a4e8db337918d52d844ee52766 Mon Sep 17 00:00:00 2001 From: 0xfandom <50949929+0xfandom@users.noreply.github.com> Date: Thu, 23 Apr 2026 23:22:27 +0530 Subject: [PATCH] fix(startup): url authoritative over model name in banner provider detect (#864) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The banner provider branch tested model-name substrings (`/deepseek/`, `/kimi/`, `/mistral/`, `/llama/`) before aggregator base-URL substrings (`/openrouter/`, `/together/`, `/groq/`, `/azure/`). When running OpenRouter/Together/Groq with vendor-prefixed model IDs (e.g. `deepseek/deepseek-chat`, `moonshotai/kimi-k2`, `deepseek-r1-distill-llama-70b`), the banner mislabelled the provider. Reorder: explicit env flags (NVIDIA_NIM, MINIMAX_API_KEY) and codex transport win first; base-URL host checks run before rawModel fallback; rawModel only fires when the base URL is generic/custom. Add unit tests covering the aggregator × vendor-prefixed-model matrix plus direct-vendor regressions. Closes #855 --- src/components/StartupScreen.test.ts | 158 +++++++++++++++++++++++++++ src/components/StartupScreen.ts | 52 +++++---- 2 files changed, 186 insertions(+), 24 deletions(-) create mode 100644 src/components/StartupScreen.test.ts diff --git a/src/components/StartupScreen.test.ts b/src/components/StartupScreen.test.ts new file mode 100644 index 00000000..bd1f9ba7 --- /dev/null +++ b/src/components/StartupScreen.test.ts @@ -0,0 +1,158 @@ +import { afterEach, beforeEach, describe, expect, test } from 'bun:test' +import { detectProvider } from './StartupScreen.js' + +const ENV_KEYS = [ + 'CLAUDE_CODE_USE_OPENAI', + 'CLAUDE_CODE_USE_GEMINI', + 'CLAUDE_CODE_USE_GITHUB', + 'CLAUDE_CODE_USE_BEDROCK', + 'CLAUDE_CODE_USE_VERTEX', + 'CLAUDE_CODE_USE_MISTRAL', + 'OPENAI_BASE_URL', + 'OPENAI_API_KEY', + 'OPENAI_MODEL', + 'GEMINI_MODEL', + 'MISTRAL_MODEL', + 'ANTHROPIC_MODEL', + 'NVIDIA_NIM', + 'MINIMAX_API_KEY', +] + +const originalEnv: Record = {} + +beforeEach(() => { + for (const key of ENV_KEYS) { + originalEnv[key] = process.env[key] + delete process.env[key] + } +}) + +afterEach(() => { + for (const key of ENV_KEYS) { + if (originalEnv[key] === undefined) { + delete process.env[key] + } else { + process.env[key] = originalEnv[key] + } + } +}) + +function setupOpenAIMode(baseUrl: string, model: string): void { + process.env.CLAUDE_CODE_USE_OPENAI = '1' + process.env.OPENAI_BASE_URL = baseUrl + process.env.OPENAI_MODEL = model + process.env.OPENAI_API_KEY = 'test-key' +} + +// --- Issue #855: aggregator URL must win over vendor-prefixed model name --- + +describe('detectProvider — aggregator URL authoritative over model-name substring (#855)', () => { + test('OpenRouter + deepseek/deepseek-chat labels as OpenRouter', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'deepseek/deepseek-chat') + expect(detectProvider().name).toBe('OpenRouter') + }) + + test('OpenRouter + moonshotai/kimi-k2 labels as OpenRouter', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'moonshotai/kimi-k2') + expect(detectProvider().name).toBe('OpenRouter') + }) + + test('OpenRouter + mistralai/mistral-large labels as OpenRouter', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'mistralai/mistral-large') + expect(detectProvider().name).toBe('OpenRouter') + }) + + test('OpenRouter + meta-llama/llama-3.3 labels as OpenRouter', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'meta-llama/llama-3.3-70b-instruct') + expect(detectProvider().name).toBe('OpenRouter') + }) + + test('Together + deepseek-ai/DeepSeek-V3 labels as Together AI', () => { + setupOpenAIMode('https://api.together.xyz/v1', 'deepseek-ai/DeepSeek-V3') + expect(detectProvider().name).toBe('Together AI') + }) + + test('Together + meta-llama/Llama-3.3 labels as Together AI', () => { + setupOpenAIMode('https://api.together.xyz/v1', 'meta-llama/Llama-3.3-70B-Instruct-Turbo') + expect(detectProvider().name).toBe('Together AI') + }) + + test('Groq + deepseek-r1-distill-llama-70b labels as Groq', () => { + setupOpenAIMode('https://api.groq.com/openai/v1', 'deepseek-r1-distill-llama-70b') + expect(detectProvider().name).toBe('Groq') + }) + + test('Groq + llama-3.3-70b-versatile labels as Groq', () => { + setupOpenAIMode('https://api.groq.com/openai/v1', 'llama-3.3-70b-versatile') + expect(detectProvider().name).toBe('Groq') + }) + + test('Azure + any deepseek deployment labels as Azure OpenAI', () => { + setupOpenAIMode('https://my-resource.openai.azure.com/', 'deepseek-chat') + expect(detectProvider().name).toBe('Azure OpenAI') + }) +}) + +// --- Direct vendor endpoints still label correctly (regression) --- + +describe('detectProvider — direct vendor endpoints', () => { + test('api.deepseek.com labels as DeepSeek', () => { + setupOpenAIMode('https://api.deepseek.com/v1', 'deepseek-chat') + expect(detectProvider().name).toBe('DeepSeek') + }) + + test('api.moonshot.cn labels as Moonshot (Kimi)', () => { + setupOpenAIMode('https://api.moonshot.cn/v1', 'moonshot-v1-8k') + expect(detectProvider().name).toBe('Moonshot (Kimi)') + }) + + test('api.mistral.ai labels as Mistral', () => { + setupOpenAIMode('https://api.mistral.ai/v1', 'mistral-large-latest') + expect(detectProvider().name).toBe('Mistral') + }) + + test('default OpenAI URL + gpt-4o labels as OpenAI', () => { + setupOpenAIMode('https://api.openai.com/v1', 'gpt-4o') + expect(detectProvider().name).toBe('OpenAI') + }) +}) + +// --- rawModel fallback for generic/custom endpoints --- + +describe('detectProvider — rawModel fallback when URL is generic', () => { + test('custom proxy + deepseek-chat falls back to DeepSeek', () => { + setupOpenAIMode('https://my-proxy.internal/v1', 'deepseek-chat') + expect(detectProvider().name).toBe('DeepSeek') + }) + + test('custom proxy + kimi-k2 falls back to Moonshot (Kimi)', () => { + setupOpenAIMode('https://my-proxy.internal/v1', 'kimi-k2-instruct') + expect(detectProvider().name).toBe('Moonshot (Kimi)') + }) + + test('custom proxy + llama-3.3 falls back to Meta Llama', () => { + setupOpenAIMode('https://my-proxy.internal/v1', 'llama-3.3-70b') + expect(detectProvider().name).toBe('Meta Llama') + }) + + test('custom proxy + mistral-large falls back to Mistral', () => { + setupOpenAIMode('https://my-proxy.internal/v1', 'mistral-large-latest') + expect(detectProvider().name).toBe('Mistral') + }) +}) + +// --- Explicit env flags win over URL heuristics --- + +describe('detectProvider — explicit dedicated-provider env flags', () => { + test('NVIDIA_NIM=1 overrides aggregator URL', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'some-nim-model') + process.env.NVIDIA_NIM = '1' + expect(detectProvider().name).toBe('NVIDIA NIM') + }) + + test('MINIMAX_API_KEY overrides aggregator URL', () => { + setupOpenAIMode('https://openrouter.ai/api/v1', 'any-model') + process.env.MINIMAX_API_KEY = 'test-key' + expect(detectProvider().name).toBe('MiniMax') + }) +}) diff --git a/src/components/StartupScreen.ts b/src/components/StartupScreen.ts index b73bf523..6b38b26e 100644 --- a/src/components/StartupScreen.ts +++ b/src/components/StartupScreen.ts @@ -83,7 +83,7 @@ const LOGO_CLAUDE = [ // ─── Provider detection ─────────────────────────────────────────────────────── -function detectProvider(): { name: string; model: string; baseUrl: string; isLocal: boolean } { +export function detectProvider(): { name: string; model: string; baseUrl: string; isLocal: boolean } { const useGemini = process.env.CLAUDE_CODE_USE_GEMINI === '1' || process.env.CLAUDE_CODE_USE_GEMINI === 'true' const useGithub = process.env.CLAUDE_CODE_USE_GITHUB === '1' || process.env.CLAUDE_CODE_USE_GITHUB === 'true' const useOpenAI = process.env.CLAUDE_CODE_USE_OPENAI === '1' || process.env.CLAUDE_CODE_USE_OPENAI === 'true' @@ -117,30 +117,34 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc const baseUrl = resolvedRequest.baseUrl const isLocal = isLocalProviderUrl(baseUrl) let name = 'OpenAI' - if (/nvidia/i.test(baseUrl) || /nvidia/i.test(rawModel) || process.env.NVIDIA_NIM) - name = 'NVIDIA NIM' - else if (/minimax/i.test(baseUrl) || /minimax/i.test(rawModel) || process.env.MINIMAX_API_KEY) - name = 'MiniMax' - else if (resolvedRequest.transport === 'codex_responses' || baseUrl.includes('chatgpt.com/backend-api/codex')) + // Explicit dedicated-provider env flags win. + if (process.env.NVIDIA_NIM) name = 'NVIDIA NIM' + else if (process.env.MINIMAX_API_KEY) name = 'MiniMax' + else if ( + resolvedRequest.transport === 'codex_responses' || + baseUrl.includes('chatgpt.com/backend-api/codex') + ) name = 'Codex' - else if (/moonshot/i.test(baseUrl) || /kimi/i.test(rawModel)) - name = 'Moonshot (Kimi)' - else if (/deepseek/i.test(baseUrl) || /deepseek/i.test(rawModel)) - name = 'DeepSeek' - else if (/openrouter/i.test(baseUrl)) - name = 'OpenRouter' - else if (/together/i.test(baseUrl)) - name = 'Together AI' - else if (/groq/i.test(baseUrl)) - name = 'Groq' - else if (/mistral/i.test(baseUrl) || /mistral/i.test(rawModel)) - name = 'Mistral' - else if (/azure/i.test(baseUrl)) - name = 'Azure OpenAI' - else if (/llama/i.test(rawModel)) - name = 'Meta Llama' - else if (isLocal) - name = getLocalOpenAICompatibleProviderLabel(baseUrl) + // Base URL is authoritative — must precede rawModel checks so aggregators + // (OpenRouter/Together/Groq) aren't mislabelled as DeepSeek/Kimi/etc. + // when routed to models whose IDs contain a vendor prefix. See issue #855. + else if (/openrouter/i.test(baseUrl)) name = 'OpenRouter' + else if (/together/i.test(baseUrl)) name = 'Together AI' + else if (/groq/i.test(baseUrl)) name = 'Groq' + else if (/azure/i.test(baseUrl)) name = 'Azure OpenAI' + else if (/nvidia/i.test(baseUrl)) name = 'NVIDIA NIM' + else if (/minimax/i.test(baseUrl)) name = 'MiniMax' + else if (/moonshot/i.test(baseUrl)) name = 'Moonshot (Kimi)' + else if (/deepseek/i.test(baseUrl)) name = 'DeepSeek' + else if (/mistral/i.test(baseUrl)) name = 'Mistral' + // rawModel fallback — fires only when base URL is generic/custom. + else if (/nvidia/i.test(rawModel)) name = 'NVIDIA NIM' + else if (/minimax/i.test(rawModel)) name = 'MiniMax' + else if (/kimi/i.test(rawModel)) name = 'Moonshot (Kimi)' + else if (/deepseek/i.test(rawModel)) name = 'DeepSeek' + else if (/mistral/i.test(rawModel)) name = 'Mistral' + else if (/llama/i.test(rawModel)) name = 'Meta Llama' + else if (isLocal) name = getLocalOpenAICompatibleProviderLabel(baseUrl) // Resolve model alias to actual model name + reasoning effort let displayModel = resolvedRequest.resolvedModel