import * as React from 'react' import type { LocalJSXCommandCall, LocalJSXCommandOnDone } from '../../types/command.js' import { COMMON_HELP_ARGS, COMMON_INFO_ARGS } from '../../constants/xml.js' import { ProviderManager } from '../../components/ProviderManager.js' import TextInput from '../../components/TextInput.js' import { Select, type OptionWithDescription, } from '../../components/CustomSelect/index.js' import { Dialog } from '../../components/design-system/Dialog.js' import { LoadingState } from '../../components/design-system/LoadingState.js' import { useTerminalSize } from '../../hooks/useTerminalSize.js' import { Box, Text } from '../../ink.js' import { DEFAULT_CODEX_BASE_URL, DEFAULT_OPENAI_BASE_URL, isLocalProviderUrl, resolveCodexApiCredentials, resolveProviderRequest, } from '../../services/api/providerConfig.js' import { buildCodexProfileEnv, buildGeminiProfileEnv, buildOllamaProfileEnv, buildOpenAIProfileEnv, createProfileFile, DEFAULT_GEMINI_BASE_URL, DEFAULT_GEMINI_MODEL, deleteProfileFile, loadProfileFile, maskSecretForDisplay, redactSecretValueForDisplay, sanitizeApiKey, sanitizeProviderConfigValue, saveProfileFile, type ProfileEnv, type ProfileFile, type ProviderProfile, } from '../../utils/providerProfile.js' import { getGeminiProjectIdHint, mayHaveGeminiAdcCredentials, } from '../../utils/geminiAuth.js' import { readGeminiAccessToken, saveGeminiAccessToken, } from '../../utils/geminiCredentials.js' import { getGoalDefaultOpenAIModel, normalizeRecommendationGoal, rankOllamaModels, recommendOllamaModel, type RecommendationGoal, } from '../../utils/providerRecommendation.js' import { getLocalOpenAICompatibleProviderLabel, hasLocalOllama, listOllamaModels, } from '../../utils/providerDiscovery.js' type ProviderChoice = 'auto' | ProviderProfile | 'clear' type Step = | { name: 'choose' } | { name: 'auto-goal' } | { name: 'auto-detect'; goal: RecommendationGoal } | { name: 'ollama-detect' } | { name: 'openai-key'; defaultModel: string } | { name: 'openai-base'; apiKey: string; defaultModel: string } | { name: 'openai-model' apiKey: string baseUrl: string | null defaultModel: string } | { name: 'gemini-auth-method' } | { name: 'gemini-key' } | { name: 'gemini-access-token' } | { name: 'gemini-model' apiKey?: string authMode: 'api-key' | 'access-token' | 'adc' } | { name: 'codex-check' } type CurrentProviderSummary = { providerLabel: string modelLabel: string endpointLabel: string savedProfileLabel: string } type SavedProfileSummary = { providerLabel: string modelLabel: string endpointLabel: string credentialLabel?: string } type TextEntryDialogProps = { title: string subtitle?: string resetStateKey?: string description: React.ReactNode initialValue: string placeholder?: string mask?: string allowEmpty?: boolean validate?: (value: string) => string | null onSubmit: (value: string) => void onCancel: () => void } type ProviderWizardDefaults = { openAIModel: string openAIBaseUrl: string geminiModel: string } function isEnvTruthy(value: string | undefined): boolean { if (!value) return false const normalized = value.trim().toLowerCase() return normalized !== '' && normalized !== '0' && normalized !== 'false' && normalized !== 'no' } function getSafeDisplayValue( value: string | undefined, processEnv: NodeJS.ProcessEnv, profileEnv?: ProfileEnv, fallback = '(not set)', ): string { return ( redactSecretValueForDisplay(value, processEnv, profileEnv) ?? fallback ) } export function getProviderWizardDefaults( processEnv: NodeJS.ProcessEnv = process.env, ): ProviderWizardDefaults { const safeOpenAIModel = sanitizeProviderConfigValue(processEnv.OPENAI_MODEL, processEnv) || 'gpt-4o' const safeOpenAIBaseUrl = sanitizeProviderConfigValue(processEnv.OPENAI_BASE_URL, processEnv) || DEFAULT_OPENAI_BASE_URL const safeGeminiModel = sanitizeProviderConfigValue(processEnv.GEMINI_MODEL, processEnv) || DEFAULT_GEMINI_MODEL return { openAIModel: safeOpenAIModel, openAIBaseUrl: safeOpenAIBaseUrl, geminiModel: safeGeminiModel, } } export function buildCurrentProviderSummary(options?: { processEnv?: NodeJS.ProcessEnv persisted?: ProfileFile | null }): CurrentProviderSummary { const processEnv = options?.processEnv ?? process.env const persisted = options?.persisted ?? loadProfileFile() const savedProfileLabel = persisted?.profile ?? 'none' if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GEMINI)) { return { providerLabel: 'Google Gemini', modelLabel: getSafeDisplayValue( processEnv.GEMINI_MODEL ?? DEFAULT_GEMINI_MODEL, processEnv, ), endpointLabel: getSafeDisplayValue( processEnv.GEMINI_BASE_URL ?? DEFAULT_GEMINI_BASE_URL, processEnv, ), savedProfileLabel, } } if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) { return { providerLabel: 'GitHub Models', modelLabel: getSafeDisplayValue( processEnv.OPENAI_MODEL ?? 'github:copilot', processEnv, ), endpointLabel: getSafeDisplayValue( processEnv.OPENAI_BASE_URL ?? processEnv.OPENAI_API_BASE ?? 'https://models.github.ai/inference', processEnv, ), savedProfileLabel, } } if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_OPENAI)) { const request = resolveProviderRequest({ model: processEnv.OPENAI_MODEL, baseUrl: processEnv.OPENAI_BASE_URL, }) let providerLabel = 'OpenAI-compatible' if (request.transport === 'codex_responses') { providerLabel = 'Codex' } else if (isLocalProviderUrl(request.baseUrl)) { providerLabel = getLocalOpenAICompatibleProviderLabel(request.baseUrl) } return { providerLabel, modelLabel: getSafeDisplayValue(request.requestedModel, processEnv), endpointLabel: getSafeDisplayValue(request.baseUrl, processEnv), savedProfileLabel, } } return { providerLabel: 'Anthropic', modelLabel: getSafeDisplayValue( processEnv.ANTHROPIC_MODEL ?? processEnv.CLAUDE_MODEL ?? 'claude-sonnet-4-6', processEnv, ), endpointLabel: getSafeDisplayValue( processEnv.ANTHROPIC_BASE_URL ?? 'https://api.anthropic.com', processEnv, ), savedProfileLabel, } } function buildSavedProfileSummary( profile: ProviderProfile, env: ProfileEnv, ): SavedProfileSummary { switch (profile) { case 'gemini': return { providerLabel: 'Google Gemini', modelLabel: getSafeDisplayValue( env.GEMINI_MODEL ?? DEFAULT_GEMINI_MODEL, process.env, env, ), endpointLabel: getSafeDisplayValue( env.GEMINI_BASE_URL ?? DEFAULT_GEMINI_BASE_URL, process.env, env, ), credentialLabel: env.GEMINI_AUTH_MODE === 'access-token' ? 'access token (stored securely)' : env.GEMINI_AUTH_MODE === 'adc' ? 'local ADC' : maskSecretForDisplay(env.GEMINI_API_KEY) !== undefined ? 'configured' : undefined, } case 'codex': return { providerLabel: 'Codex', modelLabel: getSafeDisplayValue( env.OPENAI_MODEL ?? 'codexplan', process.env, env, ), endpointLabel: getSafeDisplayValue( env.OPENAI_BASE_URL ?? DEFAULT_CODEX_BASE_URL, process.env, env, ), credentialLabel: maskSecretForDisplay(env.CODEX_API_KEY) !== undefined ? 'configured' : undefined, } case 'ollama': return { providerLabel: 'Ollama', modelLabel: getSafeDisplayValue( env.OPENAI_MODEL, process.env, env, ), endpointLabel: getSafeDisplayValue( env.OPENAI_BASE_URL, process.env, env, ), } case 'openai': default: { const baseUrl = env.OPENAI_BASE_URL ?? DEFAULT_OPENAI_BASE_URL return { providerLabel: isLocalProviderUrl(baseUrl) ? getLocalOpenAICompatibleProviderLabel(baseUrl) : 'OpenAI-compatible', modelLabel: getSafeDisplayValue( env.OPENAI_MODEL ?? 'gpt-4o', process.env, env, ), endpointLabel: getSafeDisplayValue( baseUrl, process.env, env, ), credentialLabel: maskSecretForDisplay(env.OPENAI_API_KEY) !== undefined ? 'configured' : undefined, } } } } export function buildProfileSaveMessage( profile: ProviderProfile, env: ProfileEnv, filePath: string, ): string { const summary = buildSavedProfileSummary(profile, env) const lines = [ `Saved ${summary.providerLabel} profile.`, `Model: ${summary.modelLabel}`, `Endpoint: ${summary.endpointLabel}`, ] if (summary.credentialLabel) { lines.push(`Credentials: ${summary.credentialLabel}`) } lines.push(`Profile: ${filePath}`) lines.push('Restart OpenClaude to use it.') return lines.join('\n') } function buildUsageText(): string { const summary = buildCurrentProviderSummary() return [ 'Usage: /provider', '', 'Guided setup for saved provider profiles.', '', `Current provider: ${summary.providerLabel}`, `Current model: ${summary.modelLabel}`, `Current endpoint: ${summary.endpointLabel}`, `Saved profile: ${summary.savedProfileLabel}`, '', 'Choose Auto, Ollama, OpenAI-compatible, Gemini, or Codex, then save a profile for the next OpenClaude restart.', ].join('\n') } function finishProfileSave( onDone: LocalJSXCommandOnDone, profile: ProviderProfile, env: ProfileEnv, ): void { try { const profileFile = createProfileFile(profile, env) const filePath = saveProfileFile(profileFile) onDone(buildProfileSaveMessage(profile, env, filePath), { display: 'system', }) } catch (error) { const message = error instanceof Error ? error.message : String(error) onDone(`Failed to save provider profile: ${message}`, { display: 'system', }) } } export function TextEntryDialog({ title, subtitle, resetStateKey, description, initialValue, placeholder, mask, allowEmpty = false, validate, onSubmit, onCancel, }: TextEntryDialogProps): React.ReactNode { const { columns } = useTerminalSize() const [value, setValue] = React.useState(initialValue) const [cursorOffset, setCursorOffset] = React.useState(initialValue.length) const [error, setError] = React.useState(null) React.useLayoutEffect(() => { setValue(initialValue) setCursorOffset(initialValue.length) setError(null) }, [initialValue, resetStateKey]) const inputColumns = Math.max(30, columns - 6) const handleSubmit = React.useCallback( (nextValue: string) => { if (!allowEmpty && nextValue.trim().length === 0) { setError('A value is required for this step.') return } const validationError = validate?.(nextValue) if (validationError) { setError(validationError) return } setError(null) onSubmit(nextValue) }, [allowEmpty, onSubmit, validate], ) return ( {description} {error ? {error} : null} ) } function ProviderChooser({ onChoose, onCancel, }: { onChoose: (value: ProviderChoice) => void onCancel: () => void }): React.ReactNode { const summary = buildCurrentProviderSummary() const options: OptionWithDescription[] = [ { label: 'Auto', value: 'auto', description: 'Prefer local Ollama when available, otherwise guide you into OpenAI-compatible setup', }, { label: 'Ollama', value: 'ollama', description: 'Use a local Ollama model with no API key', }, { label: 'OpenAI-compatible', value: 'openai', description: 'GPT-4o, DeepSeek, OpenRouter, Groq, LM Studio, and similar APIs', }, { label: 'Gemini', value: 'gemini', description: 'Use Google Gemini with API key, access token, or local ADC', }, { label: 'Codex', value: 'codex', description: 'Use existing ChatGPT Codex CLI auth or env credentials', }, ] if (summary.savedProfileLabel !== 'none') { options.push({ label: 'Clear saved profile', value: 'clear', description: 'Remove .openclaude-profile.json and return to normal startup', }) } return ( Save a provider profile for the next OpenClaude restart without editing environment variables first. Current model: {summary.modelLabel} Current endpoint: {summary.endpointLabel} Saved profile: {summary.savedProfileLabel} ) } function AutoRecommendationStep({ goal, onBack, onSave, onNeedOpenAI, onCancel, }: { goal: RecommendationGoal onBack: () => void onSave: (profile: ProviderProfile, env: ProfileEnv) => void onNeedOpenAI: (defaultModel: string) => void onCancel: () => void }): React.ReactNode { const [status, setStatus] = React.useState< | { state: 'loading' } | { state: 'ollama' model: string summary: string } | { state: 'openai' defaultModel: string } | { state: 'error' message: string } >({ state: 'loading' }) React.useEffect(() => { let cancelled = false void (async () => { const defaultModel = getGoalDefaultOpenAIModel(goal) try { const ollamaAvailable = await hasLocalOllama() if (!ollamaAvailable) { if (!cancelled) { setStatus({ state: 'openai', defaultModel }) } return } const models = await listOllamaModels() const recommended = recommendOllamaModel(models, goal) if (!recommended) { if (!cancelled) { setStatus({ state: 'openai', defaultModel }) } return } if (!cancelled) { setStatus({ state: 'ollama', model: recommended.name, summary: recommended.summary, }) } } catch (error) { if (!cancelled) { setStatus({ state: 'error', message: error instanceof Error ? error.message : String(error), }) } } })() return () => { cancelled = true } }, [goal]) if (status.state === 'loading') { return } if (status.state === 'error') { return ( {status.message} { if (value === 'continue') { onNeedOpenAI(status.defaultModel) } else if (value === 'back') { onBack() } else { onCancel() } }} onCancel={onCancel} /> ) } return ( Auto setup recommends a local Ollama profile for {goal} based on the models currently available on this machine. Recommended model: {status.model} {status.summary ? ` · ${status.summary}` : ''} (value === 'back' ? onBack() : onCancel())} onCancel={onCancel} /> ) } return ( Pick one of the installed Ollama models to save into a local provider profile. (value === 'back' ? onBack() : onCancel())} onCancel={onCancel} /> ) } const options: OptionWithDescription[] = [ { label: 'codexplan', value: 'codexplan', description: 'GPT-5.4 with higher reasoning on the Codex backend', }, { label: 'codexspark', value: 'codexspark', description: 'Faster Codex Spark tool loop profile', }, ] return ( Reuse your existing Codex credentials from{' '} {credentials.sourceDescription} and save a model alias profile. { if (value === 'api-key') { setStep({ name: 'gemini-key' }) } else if (value === 'access-token') { setStep({ name: 'gemini-access-token' }) } else { setStep({ name: 'gemini-model', authMode: 'adc', }) } }} onCancel={() => setStep({ name: 'choose' })} /> ) } case 'gemini-key': return ( { const apiKey = value.trim() || process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY || '' setStep({ name: 'gemini-model', apiKey, authMode: 'api-key' }) }} onCancel={() => setStep({ name: 'gemini-auth-method' })} /> ) case 'gemini-access-token': { const currentToken = process.env.GEMINI_ACCESS_TOKEN || readGeminiAccessToken() || '' return ( { const token = value.trim() || currentToken return token ? null : 'Enter a Gemini access token or go back and choose Local ADC.' }} onSubmit={value => { const token = value.trim() || currentToken const saved = saveGeminiAccessToken(token) if (!saved.success) { onDone( `Failed to save Gemini access token: ${saved.warning ?? 'unknown error'}`, { display: 'system', }, ) return } setStep({ name: 'gemini-model', authMode: 'access-token', }) }} onCancel={() => setStep({ name: 'gemini-auth-method' })} /> ) } case 'gemini-model': return ( { if ( step.authMode === 'adc' && !mayHaveGeminiAdcCredentials(process.env) ) { onDone( 'Local ADC credentials were not detected. Run `gcloud auth application-default login` first, then save the Gemini ADC profile again.', { display: 'system', }, ) return } const env = buildGeminiProfileEnv({ apiKey: step.apiKey, authMode: step.authMode, model: value.trim() || DEFAULT_GEMINI_MODEL, processEnv: {}, }) if (env) { finishProfileSave(onDone, 'gemini', env) } }} onCancel={() => step.authMode === 'api-key' ? setStep({ name: 'gemini-key' }) : step.authMode === 'access-token' ? setStep({ name: 'gemini-access-token' }) : setStep({ name: 'gemini-auth-method' }) } /> ) case 'codex-check': return ( finishProfileSave(onDone, profile, env)} onBack={() => setStep({ name: 'choose' })} onCancel={() => onDone()} /> ) } } export const call: LocalJSXCommandCall = async (onDone, _context, args) => { const trimmedArgs = args?.trim().toLowerCase() ?? '' if ( COMMON_HELP_ARGS.includes(trimmedArgs) || COMMON_INFO_ARGS.includes(trimmedArgs) || trimmedArgs === 'help' || trimmedArgs === '--help' || trimmedArgs === '-h' ) { onDone( 'Run /provider to add, edit, delete, or activate provider profiles. The active provider controls base URL, model, and API key.', { display: 'system' }, ) return } return ( { const message = result?.message ?? (result?.action === 'saved' ? 'Provider profile updated' : 'Provider manager closed') onDone(message, { display: 'system' }) }} /> ) }