Merge pull request #74 from Vect0rM/feature/atomic-chat-integration

feat: add support for Atomic Chat provider
This commit is contained in:
Kevin Codex
2026-04-02 20:13:37 +08:00
committed by GitHub
11 changed files with 552 additions and 11 deletions

View File

@@ -10,6 +10,7 @@ import {
recommendOllamaModel,
} from '../src/utils/providerRecommendation.ts'
import {
buildAtomicChatProfileEnv,
buildCodexProfileEnv,
buildGeminiProfileEnv,
buildOllamaProfileEnv,
@@ -20,8 +21,11 @@ import {
type ProviderProfile,
} from '../src/utils/providerProfile.ts'
import {
getAtomicChatChatBaseUrl,
getOllamaChatBaseUrl,
hasLocalAtomicChat,
hasLocalOllama,
listAtomicChatModels,
listOllamaModels,
} from './provider-discovery.ts'
@@ -34,7 +38,7 @@ function parseArg(name: string): string | null {
function parseProviderArg(): ProviderProfile | 'auto' {
const p = parseArg('--provider')?.toLowerCase()
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini') return p
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini' || p === 'atomic-chat') return p
return 'auto'
}
@@ -102,6 +106,21 @@ async function main(): Promise<void> {
getOllamaChatBaseUrl,
},
)
} else if (selected === 'atomic-chat') {
const model = argModel || (await listAtomicChatModels(argBaseUrl || undefined))[0]
if (!model) {
if (!(await hasLocalAtomicChat(argBaseUrl || undefined))) {
console.error('Atomic Chat is not running (could not connect to 127.0.0.1:1337).\n Download from https://atomic.chat/ and launch the application.')
} else {
console.error('Atomic Chat is running but no model is loaded. Open Atomic Chat and download or start a model first.')
}
process.exit(1)
}
env = buildAtomicChatProfileEnv(model, {
baseUrl: argBaseUrl,
getAtomicChatChatBaseUrl,
})
} else if (selected === 'codex') {
const builtEnv = buildCodexProfileEnv({
model: argModel,