fix: update OPENAI_API_KEY message and add Atomic Chat URL check
- Updated the message for the OPENAI_API_KEY check to include Atomic Chat as an allowed local provider. - Introduced a new function to check if the base URL corresponds to Atomic Chat, enhancing the system's ability to identify local providers. - Adjusted the Ollama processor mode check to skip processing when an Atomic Chat local provider is detected.
This commit is contained in:
1
hello/world
Normal file
1
hello/world
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Hello, World!
|
||||||
@@ -186,7 +186,7 @@ function checkOpenAIEnv(): CheckResult[] {
|
|||||||
} else if (!key && !isLocalBaseUrl(request.baseUrl)) {
|
} else if (!key && !isLocalBaseUrl(request.baseUrl)) {
|
||||||
results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.'))
|
results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.'))
|
||||||
} else if (!key) {
|
} else if (!key) {
|
||||||
results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Ollama/LM Studio).'))
|
results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Atomic Chat/Ollama/LM Studio).'))
|
||||||
} else {
|
} else {
|
||||||
results.push(pass('OPENAI_API_KEY', 'Configured.'))
|
results.push(pass('OPENAI_API_KEY', 'Configured.'))
|
||||||
}
|
}
|
||||||
@@ -271,6 +271,15 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isAtomicChatUrl(baseUrl: string): boolean {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(baseUrl)
|
||||||
|
return parsed.port === '1337' && isLocalBaseUrl(baseUrl)
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function checkOllamaProcessorMode(): CheckResult {
|
function checkOllamaProcessorMode(): CheckResult {
|
||||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) || isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) || isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
||||||
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
|
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
|
||||||
@@ -281,6 +290,10 @@ function checkOllamaProcessorMode(): CheckResult {
|
|||||||
return pass('Ollama processor mode', 'Skipped (provider URL is not local).')
|
return pass('Ollama processor mode', 'Skipped (provider URL is not local).')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isAtomicChatUrl(baseUrl)) {
|
||||||
|
return pass('Ollama processor mode', 'Skipped (Atomic Chat local provider detected, not Ollama).')
|
||||||
|
}
|
||||||
|
|
||||||
const result = spawnSync('ollama', ['ps'], {
|
const result = spawnSync('ollama', ['ps'], {
|
||||||
cwd: process.cwd(),
|
cwd: process.cwd(),
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
|
|||||||
Reference in New Issue
Block a user