fix(startup): show --model flag override on startup screen (#898)

The startup screen was only reading model from env vars and settings,
ignoring the --model CLI flag since it's parsed by Commander.js after
the banner prints. Now eagerly parses --model from argv before rendering
so the displayed model matches what the session will actually use.
This commit is contained in:
Rayan Alkhelaiwi
2026-04-26 15:24:44 +03:00
committed by GitHub
parent 6dedffe5ff
commit d45628c413
3 changed files with 82 additions and 9 deletions

View File

@@ -84,33 +84,33 @@ const LOGO_CLAUDE = [
// ─── Provider detection ───────────────────────────────────────────────────────
export function detectProvider(): { name: string; model: string; baseUrl: string; isLocal: boolean } {
export function detectProvider(modelOverride?: string): { name: string; model: string; baseUrl: string; isLocal: boolean } {
const useGemini = process.env.CLAUDE_CODE_USE_GEMINI === '1' || process.env.CLAUDE_CODE_USE_GEMINI === 'true'
const useGithub = process.env.CLAUDE_CODE_USE_GITHUB === '1' || process.env.CLAUDE_CODE_USE_GITHUB === 'true'
const useOpenAI = process.env.CLAUDE_CODE_USE_OPENAI === '1' || process.env.CLAUDE_CODE_USE_OPENAI === 'true'
const useMistral = process.env.CLAUDE_CODE_USE_MISTRAL === '1' || process.env.CLAUDE_CODE_USE_MISTRAL === 'true'
if (useGemini) {
const model = process.env.GEMINI_MODEL || 'gemini-2.0-flash'
const model = modelOverride || process.env.GEMINI_MODEL || 'gemini-2.0-flash'
const baseUrl = process.env.GEMINI_BASE_URL || 'https://generativelanguage.googleapis.com/v1beta/openai'
return { name: 'Google Gemini', model, baseUrl, isLocal: false }
}
if (useMistral) {
const model = process.env.MISTRAL_MODEL || 'devstral-latest'
const model = modelOverride || process.env.MISTRAL_MODEL || 'devstral-latest'
const baseUrl = process.env.MISTRAL_BASE_URL || 'https://api.mistral.ai/v1'
return { name: 'Mistral', model, baseUrl, isLocal: false }
}
if (useGithub) {
const model = process.env.OPENAI_MODEL || 'github:copilot'
const model = modelOverride || process.env.OPENAI_MODEL || 'github:copilot'
const baseUrl =
process.env.OPENAI_BASE_URL || 'https://api.githubcopilot.com'
return { name: 'GitHub Copilot', model, baseUrl, isLocal: false }
}
if (useOpenAI) {
const rawModel = process.env.OPENAI_MODEL || 'gpt-4o'
const rawModel = modelOverride || process.env.OPENAI_MODEL || 'gpt-4o'
const resolvedRequest = resolveProviderRequest({
model: rawModel,
baseUrl: process.env.OPENAI_BASE_URL,
@@ -166,7 +166,7 @@ export function detectProvider(): { name: string; model: string; baseUrl: string
// Default: Anthropic - check settings.model first, then env vars
const settings = getSettings_DEPRECATED() || {}
const modelSetting = settings.model || process.env.ANTHROPIC_MODEL || process.env.CLAUDE_MODEL || 'claude-sonnet-4-6'
const modelSetting = modelOverride || settings.model || process.env.ANTHROPIC_MODEL || process.env.CLAUDE_MODEL || 'claude-sonnet-4-6'
const resolvedModel = parseUserSpecifiedModel(modelSetting)
const baseUrl = process.env.ANTHROPIC_BASE_URL ?? 'https://api.anthropic.com'
const isLocal = isLocalProviderUrl(baseUrl)
@@ -182,11 +182,11 @@ function boxRow(content: string, width: number, rawLen: number): string {
// ─── Main ─────────────────────────────────────────────────────────────────────
export function printStartupScreen(): void {
export function printStartupScreen(modelOverride?: string): void {
// Skip in non-interactive / CI / print mode
if (process.env.CI || !process.stdout.isTTY) return
const p = detectProvider()
const p = detectProvider(modelOverride)
const W = 62
const out: string[] = []