feat(provider): expose Atomic Chat in /provider picker with autodetect (#810)
Adds Atomic Chat as a first-class preset inside the in-session /provider slash command, mirroring the Ollama auto-detect flow. Picking it probes 127.0.0.1:1337/v1/models, lists loaded models for direct selection, and falls back to "Enter manually" / "Back" when the server is unreachable or no models are loaded. README updated to reflect the new setup path. Made-with: Cursor
This commit is contained in:
@@ -302,6 +302,24 @@ export async function listAtomicChatModels(
|
||||
}
|
||||
}
|
||||
|
||||
export type AtomicChatReadiness =
|
||||
| { state: 'unreachable' }
|
||||
| { state: 'no_models' }
|
||||
| { state: 'ready'; models: string[] }
|
||||
|
||||
export async function probeAtomicChatReadiness(options?: {
|
||||
baseUrl?: string
|
||||
}): Promise<AtomicChatReadiness> {
|
||||
if (!(await hasLocalAtomicChat(options?.baseUrl))) {
|
||||
return { state: 'unreachable' }
|
||||
}
|
||||
const models = await listAtomicChatModels(options?.baseUrl)
|
||||
if (models.length === 0) {
|
||||
return { state: 'no_models' }
|
||||
}
|
||||
return { state: 'ready', models }
|
||||
}
|
||||
|
||||
export async function benchmarkOllamaModel(
|
||||
modelName: string,
|
||||
baseUrl?: string,
|
||||
|
||||
Reference in New Issue
Block a user