From 3b7b9740f2dd3493a887e1277a7570c129bae3d9 Mon Sep 17 00:00:00 2001 From: Misha Skvortsov Date: Wed, 1 Apr 2026 23:06:25 +0300 Subject: [PATCH] fix: update OPENAI_API_KEY message and add Atomic Chat URL check - Updated the message for the OPENAI_API_KEY check to include Atomic Chat as an allowed local provider. - Introduced a new function to check if the base URL corresponds to Atomic Chat, enhancing the system's ability to identify local providers. - Adjusted the Ollama processor mode check to skip processing when an Atomic Chat local provider is detected. --- hello/world | 1 + scripts/system-check.ts | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 hello/world diff --git a/hello/world b/hello/world new file mode 100644 index 00000000..8ab686ea --- /dev/null +++ b/hello/world @@ -0,0 +1 @@ +Hello, World! diff --git a/scripts/system-check.ts b/scripts/system-check.ts index e129685a..dfb9db21 100644 --- a/scripts/system-check.ts +++ b/scripts/system-check.ts @@ -186,7 +186,7 @@ function checkOpenAIEnv(): CheckResult[] { } else if (!key && !isLocalBaseUrl(request.baseUrl)) { results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.')) } else if (!key) { - results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Ollama/LM Studio).')) + results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Atomic Chat/Ollama/LM Studio).')) } else { results.push(pass('OPENAI_API_KEY', 'Configured.')) } @@ -271,6 +271,15 @@ async function checkBaseUrlReachability(): Promise { } } +function isAtomicChatUrl(baseUrl: string): boolean { + try { + const parsed = new URL(baseUrl) + return parsed.port === '1337' && isLocalBaseUrl(baseUrl) + } catch { + return false + } +} + function checkOllamaProcessorMode(): CheckResult { if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) || isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) { return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).') @@ -281,6 +290,10 @@ function checkOllamaProcessorMode(): CheckResult { return pass('Ollama processor mode', 'Skipped (provider URL is not local).') } + if (isAtomicChatUrl(baseUrl)) { + return pass('Ollama processor mode', 'Skipped (Atomic Chat local provider detected, not Ollama).') + } + const result = spawnSync('ollama', ['ps'], { cwd: process.cwd(), encoding: 'utf8',