Merge pull request #74 from Vect0rM/feature/atomic-chat-integration
feat: add support for Atomic Chat provider
This commit is contained in:
@@ -5,6 +5,7 @@ import { join } from 'node:path'
|
||||
import test from 'node:test'
|
||||
|
||||
import {
|
||||
buildAtomicChatProfileEnv,
|
||||
buildCodexProfileEnv,
|
||||
buildGeminiProfileEnv,
|
||||
buildLaunchEnv,
|
||||
@@ -381,3 +382,72 @@ test('auto profile falls back to openai when no viable ollama model exists', ()
|
||||
assert.equal(selectAutoProfile(null), 'openai')
|
||||
assert.equal(selectAutoProfile('qwen2.5-coder:7b'), 'ollama')
|
||||
})
|
||||
|
||||
// ── Atomic Chat profile tests ────────────────────────────────────────────────
|
||||
|
||||
test('atomic-chat profiles never persist openai api keys', () => {
|
||||
const env = buildAtomicChatProfileEnv('some-local-model', {
|
||||
getAtomicChatChatBaseUrl: () => 'http://127.0.0.1:1337/v1',
|
||||
})
|
||||
|
||||
assert.deepEqual(env, {
|
||||
OPENAI_BASE_URL: 'http://127.0.0.1:1337/v1',
|
||||
OPENAI_MODEL: 'some-local-model',
|
||||
})
|
||||
assert.equal('OPENAI_API_KEY' in env, false)
|
||||
})
|
||||
|
||||
test('atomic-chat profiles respect custom base url', () => {
|
||||
const env = buildAtomicChatProfileEnv('my-model', {
|
||||
baseUrl: 'http://192.168.1.100:1337',
|
||||
getAtomicChatChatBaseUrl: (baseUrl?: string) =>
|
||||
baseUrl ? `${baseUrl}/v1` : 'http://127.0.0.1:1337/v1',
|
||||
})
|
||||
|
||||
assert.equal(env.OPENAI_BASE_URL, 'http://192.168.1.100:1337/v1')
|
||||
assert.equal(env.OPENAI_MODEL, 'my-model')
|
||||
})
|
||||
|
||||
test('matching persisted atomic-chat env is reused for atomic-chat launch', async () => {
|
||||
const env = await buildLaunchEnv({
|
||||
profile: 'atomic-chat',
|
||||
persisted: profile('atomic-chat', {
|
||||
OPENAI_BASE_URL: 'http://127.0.0.1:1337/v1',
|
||||
OPENAI_MODEL: 'llama-3.1-8b',
|
||||
}),
|
||||
goal: 'balanced',
|
||||
processEnv: {},
|
||||
getAtomicChatChatBaseUrl: () => 'http://127.0.0.1:1337/v1',
|
||||
resolveAtomicChatDefaultModel: async () => 'other-model',
|
||||
})
|
||||
|
||||
assert.equal(env.OPENAI_BASE_URL, 'http://127.0.0.1:1337/v1')
|
||||
assert.equal(env.OPENAI_MODEL, 'llama-3.1-8b')
|
||||
assert.equal(env.OPENAI_API_KEY, undefined)
|
||||
assert.equal(env.CODEX_API_KEY, undefined)
|
||||
})
|
||||
|
||||
test('atomic-chat launch ignores mismatched persisted openai env', async () => {
|
||||
const env = await buildLaunchEnv({
|
||||
profile: 'atomic-chat',
|
||||
persisted: profile('openai', {
|
||||
OPENAI_BASE_URL: 'https://api.openai.com/v1',
|
||||
OPENAI_MODEL: 'gpt-4o',
|
||||
OPENAI_API_KEY: 'sk-persisted',
|
||||
}),
|
||||
goal: 'balanced',
|
||||
processEnv: {
|
||||
OPENAI_API_KEY: 'sk-live',
|
||||
CODEX_API_KEY: 'codex-live',
|
||||
CHATGPT_ACCOUNT_ID: 'acct_live',
|
||||
},
|
||||
getAtomicChatChatBaseUrl: () => 'http://127.0.0.1:1337/v1',
|
||||
resolveAtomicChatDefaultModel: async () => 'local-model',
|
||||
})
|
||||
|
||||
assert.equal(env.OPENAI_BASE_URL, 'http://127.0.0.1:1337/v1')
|
||||
assert.equal(env.OPENAI_MODEL, 'local-model')
|
||||
assert.equal(env.OPENAI_API_KEY, undefined)
|
||||
assert.equal(env.CODEX_API_KEY, undefined)
|
||||
assert.equal(env.CHATGPT_ACCOUNT_ID, undefined)
|
||||
})
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
const DEFAULT_GEMINI_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
|
||||
const DEFAULT_GEMINI_MODEL = 'gemini-2.0-flash'
|
||||
|
||||
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini'
|
||||
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini' | 'atomic-chat'
|
||||
|
||||
export type ProfileEnv = {
|
||||
OPENAI_BASE_URL?: string
|
||||
@@ -53,6 +53,19 @@ export function buildOllamaProfileEnv(
|
||||
}
|
||||
}
|
||||
|
||||
export function buildAtomicChatProfileEnv(
|
||||
model: string,
|
||||
options: {
|
||||
baseUrl?: string | null
|
||||
getAtomicChatChatBaseUrl: (baseUrl?: string) => string
|
||||
},
|
||||
): ProfileEnv {
|
||||
return {
|
||||
OPENAI_BASE_URL: options.getAtomicChatChatBaseUrl(options.baseUrl ?? undefined),
|
||||
OPENAI_MODEL: model,
|
||||
}
|
||||
}
|
||||
|
||||
export function buildGeminiProfileEnv(options: {
|
||||
model?: string | null
|
||||
baseUrl?: string | null
|
||||
@@ -171,6 +184,8 @@ export async function buildLaunchEnv(options: {
|
||||
processEnv?: NodeJS.ProcessEnv
|
||||
getOllamaChatBaseUrl?: (baseUrl?: string) => string
|
||||
resolveOllamaDefaultModel?: (goal: RecommendationGoal) => Promise<string>
|
||||
getAtomicChatChatBaseUrl?: (baseUrl?: string) => string
|
||||
resolveAtomicChatDefaultModel?: () => Promise<string | null>
|
||||
}): Promise<NodeJS.ProcessEnv> {
|
||||
const processEnv = options.processEnv ?? process.env
|
||||
const persistedEnv =
|
||||
@@ -248,6 +263,26 @@ export async function buildLaunchEnv(options: {
|
||||
return env
|
||||
}
|
||||
|
||||
if (options.profile === 'atomic-chat') {
|
||||
const getAtomicChatBaseUrl =
|
||||
options.getAtomicChatChatBaseUrl ?? (() => 'http://127.0.0.1:1337/v1')
|
||||
const resolveModel =
|
||||
options.resolveAtomicChatDefaultModel ?? (async () => null as string | null)
|
||||
|
||||
env.OPENAI_BASE_URL = persistedEnv.OPENAI_BASE_URL || getAtomicChatBaseUrl()
|
||||
env.OPENAI_MODEL =
|
||||
persistedEnv.OPENAI_MODEL ||
|
||||
(await resolveModel()) ||
|
||||
''
|
||||
|
||||
delete env.OPENAI_API_KEY
|
||||
delete env.CODEX_API_KEY
|
||||
delete env.CHATGPT_ACCOUNT_ID
|
||||
delete env.CODEX_ACCOUNT_ID
|
||||
|
||||
return env
|
||||
}
|
||||
|
||||
if (options.profile === 'codex') {
|
||||
env.OPENAI_BASE_URL =
|
||||
persistedEnv.OPENAI_BASE_URL && isCodexBaseUrl(persistedEnv.OPENAI_BASE_URL)
|
||||
|
||||
Reference in New Issue
Block a user