fix: restore Ollama auto-detect in first-run setup (#561)

Co-authored-by: anandh8x <test@example.com>
This commit is contained in:
Anandan
2026-04-10 19:23:30 +05:30
committed by GitHub
parent 9ccaa7a675
commit 68c296833d
4 changed files with 332 additions and 38 deletions

View File

@@ -103,7 +103,7 @@ test('login picker shows the third-party platform option', async () => {
expect(output).toContain('3rd-party platform')
})
test('third-party provider branch opens the provider wizard', async () => {
test('third-party provider branch opens the first-run provider manager', async () => {
const output = await renderFrame(
<ConsoleOAuthFlow
initialStatus={{ state: 'platform_setup' }}
@@ -111,7 +111,9 @@ test('third-party provider branch opens the provider wizard', async () => {
/>,
)
expect(output).toContain('Set up a provider profile')
expect(output).toContain('OpenAI-compatible')
expect(output).toContain('Set up provider')
expect(output).toContain('Anthropic')
expect(output).toContain('OpenAI')
expect(output).toContain('Ollama')
expect(output).toContain('LM Studio')
})

View File

@@ -12,7 +12,7 @@ import { OAuthService } from '../services/oauth/index.js';
import { getOauthAccountInfo, validateForceLoginOrg } from '../utils/auth.js';
import { logError } from '../utils/log.js';
import { getSettings_DEPRECATED } from '../utils/settings/settings.js';
import { ProviderWizard } from '../commands/provider/provider.js';
import { ProviderManager } from './ProviderManager.js';
import { Select } from './CustomSelect/select.js';
import { KeyboardShortcutHint } from './design-system/KeyboardShortcutHint.js';
import { Spinner } from './Spinner.js';
@@ -450,16 +450,17 @@ function OAuthStatusMessage({
case 'platform_setup':
return (
<ProviderWizard
<ProviderManager
mode="first-run"
onDone={result => {
if (!result) {
if (!result || result.action !== 'saved' || !result.message) {
setOAuthStatus({ state: 'idle' })
return
}
setOAuthStatus({
state: 'platform_setup_complete',
message: result,
message: result.message,
})
}}
/>

View File

@@ -6,6 +6,7 @@ import stripAnsi from 'strip-ansi'
import { createRoot } from '../ink.js'
import { AppStateProvider } from '../state/AppState.js'
import { KeybindingSetup } from '../keybindings/KeybindingProviderSetup.js'
const SYNC_START = '\x1B[?2026h'
const SYNC_END = '\x1B[?2026l'
@@ -106,19 +107,30 @@ function createDeferred<T>(): {
return { promise, resolve }
}
function mockProviderProfilesModule(): void {
function mockProviderProfilesModule(options?: {
addProviderProfile?: (...args: unknown[]) => unknown
}): void {
mock.module('../utils/providerProfiles.js', () => ({
addProviderProfile: () => null,
addProviderProfile: options?.addProviderProfile ?? (() => null),
applyActiveProviderProfileFromConfig: () => {},
deleteProviderProfile: () => ({ removed: false, activeProfileId: null }),
getActiveProviderProfile: () => null,
getProviderPresetDefaults: () => ({
provider: 'openai',
name: 'Mock provider',
baseUrl: 'http://localhost:11434/v1',
model: 'mock-model',
apiKey: '',
}),
getProviderPresetDefaults: (preset: string) =>
preset === 'ollama'
? {
provider: 'openai',
name: 'Ollama',
baseUrl: 'http://localhost:11434/v1',
model: 'llama3.1:8b',
apiKey: '',
}
: {
provider: 'openai',
name: 'Mock provider',
baseUrl: 'http://localhost:11434/v1',
model: 'mock-model',
apiKey: '',
},
getProviderProfiles: () => [],
setActiveProviderProfile: () => null,
updateProviderProfile: () => null,
@@ -128,8 +140,27 @@ function mockProviderProfilesModule(): void {
function mockProviderManagerDependencies(
syncRead: () => string | undefined,
asyncRead: () => Promise<string | undefined>,
options?: {
addProviderProfile?: (...args: unknown[]) => unknown
hasLocalOllama?: () => Promise<boolean>
listOllamaModels?: () => Promise<
Array<{
name: string
sizeBytes?: number | null
family?: string | null
families?: string[]
parameterSize?: string | null
quantizationLevel?: string | null
}>
>
},
): void {
mockProviderProfilesModule()
mockProviderProfilesModule({ addProviderProfile: options?.addProviderProfile })
mock.module('../utils/providerDiscovery.js', () => ({
hasLocalOllama: options?.hasLocalOllama ?? (async () => false),
listOllamaModels: options?.listOllamaModels ?? (async () => []),
}))
mock.module('../utils/githubModelsCredentials.js', () => ({
clearGithubModelsToken: () => ({ success: true }),
@@ -162,9 +193,14 @@ async function waitForFrameOutput(
async function mountProviderManager(
ProviderManager: React.ComponentType<{
mode: 'first-run' | 'manage'
onDone: () => void
onDone: (result?: unknown) => void
}>,
options?: {
mode?: 'first-run' | 'manage'
onDone?: (result?: unknown) => void
},
): Promise<{
stdin: PassThrough
getOutput: () => string
dispose: () => Promise<void>
}> {
@@ -177,14 +213,17 @@ async function mountProviderManager(
root.render(
<AppStateProvider>
<ProviderManager
mode="manage"
onDone={() => {}}
/>
<KeybindingSetup>
<ProviderManager
mode={options?.mode ?? 'manage'}
onDone={options?.onDone ?? (() => {})}
/>
</KeybindingSetup>
</AppStateProvider>,
)
return {
stdin,
getOutput,
dispose: async () => {
root.unmount()
@@ -198,14 +237,17 @@ async function mountProviderManager(
async function renderProviderManagerFrame(
ProviderManager: React.ComponentType<{
mode: 'first-run' | 'manage'
onDone: () => void
onDone: (result?: unknown) => void
}>,
options?: {
waitForOutput?: (output: string) => boolean
timeoutMs?: number
mode?: 'first-run' | 'manage'
},
): Promise<string> {
const mounted = await mountProviderManager(ProviderManager)
const mounted = await mountProviderManager(ProviderManager, {
mode: options?.mode,
})
const output = await waitForFrameOutput(
mounted.getOutput,
frame => {
@@ -263,6 +305,96 @@ test('ProviderManager resolves GitHub virtual provider from async storage withou
expect(asyncRead).toHaveBeenCalled()
})
test('ProviderManager first-run Ollama preset auto-detects installed models', async () => {
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.GITHUB_TOKEN
delete process.env.GH_TOKEN
const onDone = mock(() => {})
const addProviderProfile = mock((payload: {
provider: string
name: string
baseUrl: string
model: string
apiKey?: string
}) => ({
id: 'provider_ollama',
provider: payload.provider,
name: payload.name,
baseUrl: payload.baseUrl,
model: payload.model,
apiKey: payload.apiKey,
}))
mockProviderManagerDependencies(
() => undefined,
async () => undefined,
{
addProviderProfile,
hasLocalOllama: async () => true,
listOllamaModels: async () => [
{
name: 'gemma4:31b-cloud',
family: 'gemma',
parameterSize: '31b',
},
{
name: 'kimi-k2.5:cloud',
family: 'kimi',
parameterSize: '2.5b',
},
],
},
)
const nonce = `${Date.now()}-${Math.random()}`
const { ProviderManager } = await import(`./ProviderManager.js?ts=${nonce}`)
const mounted = await mountProviderManager(ProviderManager, {
mode: 'first-run',
onDone,
})
await waitForFrameOutput(
mounted.getOutput,
frame => frame.includes('Set up provider') && frame.includes('Ollama'),
)
mounted.stdin.write('j')
await Bun.sleep(50)
mounted.stdin.write('\r')
const modelFrame = await waitForFrameOutput(
mounted.getOutput,
frame =>
frame.includes('Choose an Ollama model') &&
frame.includes('gemma4:31b-cloud') &&
frame.includes('kimi-k2.5:cloud'),
)
expect(modelFrame).toContain('Choose an Ollama model')
expect(modelFrame).toContain('gemma4:31b-cloud')
await Bun.sleep(25)
mounted.stdin.write('\r')
await waitForCondition(() => onDone.mock.calls.length > 0)
expect(addProviderProfile).toHaveBeenCalled()
expect(addProviderProfile.mock.calls[0]?.[0]).toMatchObject({
name: 'Ollama',
baseUrl: 'http://localhost:11434/v1',
model: 'gemma4:31b-cloud',
})
expect(onDone).toHaveBeenCalledWith(
expect.objectContaining({
action: 'saved',
message: 'Provider configured: Ollama',
}),
)
await mounted.dispose()
})
test('ProviderManager avoids first-frame false negative while stored-token lookup is pending', async () => {
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.GITHUB_TOKEN

View File

@@ -3,6 +3,7 @@ import * as React from 'react'
import { Box, Text } from '../ink.js'
import { useKeybinding } from '../keybindings/useKeybinding.js'
import type { ProviderProfile } from '../utils/config.js'
import { hasLocalOllama, listOllamaModels } from '../utils/providerDiscovery.js'
import {
addProviderProfile,
applyActiveProviderProfileFromConfig,
@@ -15,6 +16,10 @@ import {
type ProviderProfileInput,
updateProviderProfile,
} from '../utils/providerProfiles.js'
import {
rankOllamaModels,
recommendOllamaModel,
} from '../utils/providerRecommendation.js'
import {
clearGithubModelsToken,
GITHUB_MODELS_HYDRATED_ENV_MARKER,
@@ -24,7 +29,7 @@ import {
} from '../utils/githubModelsCredentials.js'
import { isEnvTruthy } from '../utils/envUtils.js'
import { updateSettingsForSource } from '../utils/settings/settings.js'
import { Select } from './CustomSelect/index.js'
import { type OptionWithDescription, Select } from './CustomSelect/index.js'
import { Pane } from './design-system/Pane.js'
import TextInput from './TextInput.js'
@@ -42,6 +47,7 @@ type Props = {
type Screen =
| 'menu'
| 'select-preset'
| 'select-ollama-model'
| 'form'
| 'select-active'
| 'select-edit'
@@ -51,6 +57,16 @@ type DraftField = 'name' | 'baseUrl' | 'model' | 'apiKey'
type ProviderDraft = Record<DraftField, string>
type OllamaSelectionState =
| { state: 'idle' }
| { state: 'loading' }
| {
state: 'ready'
options: OptionWithDescription<string>[]
defaultValue?: string
}
| { state: 'unavailable'; message: string }
const FORM_STEPS: Array<{
key: DraftField
label: string
@@ -210,6 +226,9 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
const [cursorOffset, setCursorOffset] = React.useState(0)
const [statusMessage, setStatusMessage] = React.useState<string | undefined>()
const [errorMessage, setErrorMessage] = React.useState<string | undefined>()
const [ollamaSelection, setOllamaSelection] = React.useState<OllamaSelectionState>({
state: 'idle',
})
const currentStep = FORM_STEPS[formStepIndex] ?? FORM_STEPS[0]
const currentStepKey = currentStep.key
@@ -364,6 +383,59 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
return null
}
React.useEffect(() => {
if (screen !== 'select-ollama-model') {
return
}
let cancelled = false
setOllamaSelection({ state: 'loading' })
void (async () => {
const available = await hasLocalOllama(draft.baseUrl)
if (!available) {
if (!cancelled) {
setOllamaSelection({
state: 'unavailable',
message:
'Could not reach Ollama. Start Ollama first, or enter the endpoint manually.',
})
}
return
}
const models = await listOllamaModels(draft.baseUrl)
if (models.length === 0) {
if (!cancelled) {
setOllamaSelection({
state: 'unavailable',
message:
'Ollama is running, but no installed models were found. Pull a chat model such as qwen2.5-coder:7b or llama3.1:8b first, or enter details manually.',
})
}
return
}
const ranked = rankOllamaModels(models, 'balanced')
const recommended = recommendOllamaModel(models, 'balanced')
if (!cancelled) {
setOllamaSelection({
state: 'ready',
defaultValue: recommended?.name ?? ranked[0]?.name,
options: ranked.map(model => ({
label: model.name,
value: model.name,
description: model.summary,
})),
})
}
})()
return () => {
cancelled = true
}
}, [draft.baseUrl, screen])
function startCreateFromPreset(preset: ProviderPreset): void {
const defaults = getProviderPresetDefaults(preset)
const nextDraft = {
@@ -378,6 +450,13 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
setFormStepIndex(0)
setCursorOffset(nextDraft.name.length)
setErrorMessage(undefined)
if (preset === 'ollama') {
setOllamaSelection({ state: 'loading' })
setScreen('select-ollama-model')
return
}
setScreen('form')
}
@@ -397,13 +476,13 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
setScreen('form')
}
function persistDraft(): void {
function persistDraft(nextDraft: ProviderDraft = draft): void {
const payload: ProviderProfileInput = {
provider: draftProvider,
name: draft.name,
baseUrl: draft.baseUrl,
model: draft.model,
apiKey: draft.apiKey,
name: nextDraft.name,
baseUrl: nextDraft.baseUrl,
model: nextDraft.model,
apiKey: nextDraft.apiKey,
}
const saved = editingProfileId
@@ -446,6 +525,83 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
setScreen('menu')
}
function renderOllamaSelection(): React.ReactNode {
if (ollamaSelection.state === 'loading' || ollamaSelection.state === 'idle') {
return (
<Box flexDirection="column" gap={1}>
<Text color="remember" bold>
Checking Ollama
</Text>
<Text dimColor>Looking for installed Ollama models...</Text>
</Box>
)
}
if (ollamaSelection.state === 'unavailable') {
return (
<Box flexDirection="column" gap={1}>
<Text color="remember" bold>
Ollama setup
</Text>
<Text dimColor>{ollamaSelection.message}</Text>
<Select
options={[
{
value: 'manual',
label: 'Enter manually',
description: 'Fill in the base URL and model yourself',
},
{
value: 'back',
label: 'Back',
description: 'Choose another provider preset',
},
]}
onChange={value => {
if (value === 'manual') {
setFormStepIndex(0)
setCursorOffset(draft.name.length)
setScreen('form')
return
}
setScreen('select-preset')
}}
onCancel={() => setScreen('select-preset')}
visibleOptionCount={2}
/>
</Box>
)
}
return (
<Box flexDirection="column" gap={1}>
<Text color="remember" bold>
Choose an Ollama model
</Text>
<Text dimColor>
Pick one of the installed Ollama models to save into a local provider
profile.
</Text>
<Select
options={ollamaSelection.options}
defaultValue={ollamaSelection.defaultValue}
defaultFocusValue={ollamaSelection.defaultValue}
inlineDescriptions
visibleOptionCount={Math.min(8, ollamaSelection.options.length)}
onChange={value => {
const nextDraft = {
...draft,
model: value,
}
setDraft(nextDraft)
persistDraft(nextDraft)
}}
onCancel={() => setScreen('select-preset')}
/>
</Box>
)
}
function handleFormSubmit(value: string): void {
const trimmed = value.trim()
@@ -470,7 +626,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
return
}
persistDraft()
persistDraft(nextDraft)
}
function handleBackFromForm(): void {
@@ -819,13 +975,16 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
let content: React.ReactNode
switch (screen) {
case 'select-preset':
content = renderPresetSelection()
break
case 'form':
content = renderForm()
break
switch (screen) {
case 'select-preset':
content = renderPresetSelection()
break
case 'select-ollama-model':
content = renderOllamaSelection()
break
case 'form':
content = renderForm()
break
case 'select-active':
content = renderProfileSelection(
'Set active provider',