Compare commits

..

1 Commits

Author SHA1 Message Date
gnanam1990
effa6ef83d fix(errors): show actual host in 404 message instead of Ollama hint (#926)
When an OpenAI-compatible provider returns a 404, the user-facing error
message hardcoded "for Ollama: http://127.0.0.1:11434/v1" as a hint
regardless of the configured base URL. Users on remote providers
(NVIDIA NIM, OpenRouter, etc.) read this as the app ignoring their
custom OPENAI_BASE_URL and routing to localhost.

Plumb the request URL through the classifier and marker so the
user-facing message can name the actual host. Localhost endpoints keep
the existing Ollama-flavored guidance for backward compatibility.

- classifyOpenAIHttpFailure now accepts an optional url and produces a
  host-aware hint for non-localhost 404s
- the [openai_category=...] marker carries an optional host segment
- mapOpenAICompatibilityFailureToAssistantMessage branches on host to
  show "Endpoint at <host> returned 404. Verify OPENAI_BASE_URL is
  correct and the selected model (<model>) is supported by this
  provider." for remote URLs
- backward compatibility preserved when no URL is available
2026-04-28 08:58:04 +05:30
11 changed files with 139 additions and 160 deletions

View File

@@ -421,16 +421,3 @@ ANTHROPIC_API_KEY=sk-ant-your-key-here
# WEB_CUSTOM_ALLOW_HTTP=false — set "true" to allow http:// URLs # WEB_CUSTOM_ALLOW_HTTP=false — set "true" to allow http:// URLs
# WEB_CUSTOM_ALLOW_PRIVATE=false — set "true" to target localhost/private IPs # WEB_CUSTOM_ALLOW_PRIVATE=false — set "true" to target localhost/private IPs
# (needed for self-hosted SearXNG) # (needed for self-hosted SearXNG)
# ── Config directory override ───────────────────────────────────────
#
# By default openclaude stores per-user state under ~/.openclaude
# (and falls back to ~/.claude for installs that pre-date the rename).
# Set this to point openclaude at a different directory — useful for
# isolating profiles or sharing config across machines.
#
# OPENCLAUDE_CONFIG_DIR=/path/to/dir — preferred name
# CLAUDE_CONFIG_DIR=/path/to/dir — legacy alias (still works)
#
# When both are set with different values, OPENCLAUDE_CONFIG_DIR wins
# and a warning is logged once per process.

View File

@@ -28,6 +28,38 @@ test('maps endpoint_not_found category markers to actionable setup guidance', ()
expect(text).toContain('/v1') expect(text).toContain('/v1')
}) })
test('endpoint_not_found from a remote host shows the actual host, not Ollama (issue #926)', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found,host=integrate.api.nvidia.com] Hint: Endpoint at integrate.api.nvidia.com returned 404.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'moonshotai/kimi-k2.5-thinking')
const text = getFirstText(message)
expect(text).toContain('integrate.api.nvidia.com')
expect(text).toContain('moonshotai/kimi-k2.5-thinking')
expect(text).not.toContain('Ollama')
expect(text).not.toContain('11434')
})
test('endpoint_not_found without a host falls back to the Ollama-aware message', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found] Hint: Confirm OPENAI_BASE_URL includes /v1.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'qwen2.5-coder:7b')
const text = getFirstText(message)
expect(text).toContain('Provider endpoint was not found')
expect(text).toContain('Ollama')
})
test('maps tool_call_incompatible category markers to model/tool guidance', () => { test('maps tool_call_incompatible category markers to model/tool guidance', () => {
const error = APIError.generate( const error = APIError.generate(
400, 400,

View File

@@ -51,7 +51,9 @@ import {
import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command
import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js' import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js'
import { import {
extractOpenAICategoryHost,
extractOpenAICategoryMarker, extractOpenAICategoryMarker,
isLocalhostLikeHost,
type OpenAICompatibilityFailureCategory, type OpenAICompatibilityFailureCategory,
} from './openaiErrorClassification.js' } from './openaiErrorClassification.js'
@@ -68,25 +70,29 @@ function mapOpenAICompatibilityFailureToAssistantMessage(options: {
category: OpenAICompatibilityFailureCategory category: OpenAICompatibilityFailureCategory
model: string model: string
rawMessage: string rawMessage: string
host?: string
}): AssistantMessage { }): AssistantMessage {
const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model' const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model'
const compactHint = getIsNonInteractiveSession() const compactHint = getIsNonInteractiveSession()
? 'Reduce prompt size or start a new session.' ? 'Reduce prompt size or start a new session.'
: 'Run /compact or start a new session with /new.' : 'Run /compact or start a new session with /new.'
const isLocalhost = options.host === undefined || isLocalhostLikeHost(options.host)
switch (options.category) { switch (options.category) {
case 'localhost_resolution_failed': case 'localhost_resolution_failed':
case 'connection_refused': case 'connection_refused':
return createAssistantAPIErrorMessage({ return createAssistantAPIErrorMessage({
content: content: isLocalhost
'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.', ? 'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.'
: `Could not connect to the provider at ${options.host}. Verify OPENAI_BASE_URL is correct and that the host is reachable.`,
error: 'unknown', error: 'unknown',
}) })
case 'endpoint_not_found': case 'endpoint_not_found':
return createAssistantAPIErrorMessage({ return createAssistantAPIErrorMessage({
content: content: isLocalhost
'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).', ? 'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).'
: `Provider endpoint at ${options.host} returned 404. Verify OPENAI_BASE_URL is correct and that the selected model (${options.model}) is supported by this provider.`,
error: 'invalid_request', error: 'invalid_request',
}) })
@@ -567,6 +573,7 @@ export function getAssistantMessageFromError(
category: openaiCategory, category: openaiCategory,
model, model,
rawMessage: error.message, rawMessage: error.message,
host: extractOpenAICategoryHost(error.message),
}) })
} }
} }

View File

@@ -4,8 +4,10 @@ import {
buildOpenAICompatibilityErrorMessage, buildOpenAICompatibilityErrorMessage,
classifyOpenAIHttpFailure, classifyOpenAIHttpFailure,
classifyOpenAINetworkFailure, classifyOpenAINetworkFailure,
extractOpenAICategoryHost,
extractOpenAICategoryMarker, extractOpenAICategoryMarker,
formatOpenAICategoryMarker, formatOpenAICategoryMarker,
isLocalhostLikeHost,
} from './openaiErrorClassification.js' } from './openaiErrorClassification.js'
test('classifies localhost ECONNREFUSED as connection_refused', () => { test('classifies localhost ECONNREFUSED as connection_refused', () => {
@@ -95,3 +97,58 @@ test('ignores unknown category markers during extraction', () => {
const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]' const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]'
expect(extractOpenAICategoryMarker(malformed)).toBeUndefined() expect(extractOpenAICategoryMarker(malformed)).toBeUndefined()
}) })
test('endpoint_not_found 404 from a remote host gets a host-aware hint (issue #926)', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'https://integrate.api.nvidia.com/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.requestUrl).toBe('https://integrate.api.nvidia.com/v1/chat/completions')
expect(failure.hint).toContain('integrate.api.nvidia.com')
expect(failure.hint).not.toContain('local providers')
})
test('endpoint_not_found 404 from localhost keeps the Ollama-flavored hint', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'http://127.0.0.1:11434/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.hint).toContain('local providers')
})
test('marker round-trip preserves host segment', () => {
const formatted = buildOpenAICompatibilityErrorMessage(
'OpenAI API error 404: Not Found',
{
category: 'endpoint_not_found',
hint: 'Endpoint at integrate.api.nvidia.com returned 404.',
requestUrl: 'https://integrate.api.nvidia.com/v1/chat/completions',
},
)
expect(formatted).toContain('[openai_category=endpoint_not_found,host=integrate.api.nvidia.com]')
expect(extractOpenAICategoryMarker(formatted)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(formatted)).toBe('integrate.api.nvidia.com')
})
test('marker without host stays backward-compatible', () => {
const marker = formatOpenAICategoryMarker('endpoint_not_found')
expect(marker).toBe('[openai_category=endpoint_not_found]')
expect(extractOpenAICategoryMarker(marker)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(marker)).toBeUndefined()
})
test('isLocalhostLikeHost matches loopback variants', () => {
expect(isLocalhostLikeHost('localhost')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.1')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.5')).toBe(true)
expect(isLocalhostLikeHost('::1')).toBe(true)
expect(isLocalhostLikeHost('integrate.api.nvidia.com')).toBe(false)
expect(isLocalhostLikeHost(undefined)).toBe(false)
})

View File

@@ -21,6 +21,7 @@ export type OpenAICompatibilityFailure = {
hint?: string hint?: string
code?: string code?: string
status?: number status?: number
requestUrl?: string
} }
const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category=' const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category='
@@ -96,6 +97,11 @@ function isLocalhostLikeHostname(hostname: string | null): boolean {
return /^127\./.test(hostname) return /^127\./.test(hostname)
} }
export function isLocalhostLikeHost(host: string | null | undefined): boolean {
if (!host) return false
return isLocalhostLikeHostname(host.toLowerCase())
}
function isContextOverflowMessage(body: string): boolean { function isContextOverflowMessage(body: string): boolean {
const lower = body.toLowerCase() const lower = body.toLowerCase()
return ( return (
@@ -149,14 +155,18 @@ function isModelNotFoundMessage(body: string): boolean {
export function formatOpenAICategoryMarker( export function formatOpenAICategoryMarker(
category: OpenAICompatibilityFailureCategory, category: OpenAICompatibilityFailureCategory,
host?: string,
): string { ): string {
if (host && /^[A-Za-z0-9.\-:]+$/.test(host)) {
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category},host=${host}]`
}
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]` return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]`
} }
export function extractOpenAICategoryMarker( export function extractOpenAICategoryMarker(
message: string, message: string,
): OpenAICompatibilityFailureCategory | undefined { ): OpenAICompatibilityFailureCategory | undefined {
const match = message.match(/\[openai_category=([a-z_]+)]/) const match = message.match(/\[openai_category=([a-z_]+)(?:,host=[^\]]+)?]/)
const category = match?.[1] const category = match?.[1]
if (!category || !isOpenAICompatibilityFailureCategory(category)) { if (!category || !isOpenAICompatibilityFailureCategory(category)) {
@@ -166,11 +176,17 @@ export function extractOpenAICategoryMarker(
return category return category
} }
export function extractOpenAICategoryHost(message: string): string | undefined {
const match = message.match(/\[openai_category=[a-z_]+,host=([A-Za-z0-9.\-:]+)]/)
return match?.[1]
}
export function buildOpenAICompatibilityErrorMessage( export function buildOpenAICompatibilityErrorMessage(
baseMessage: string, baseMessage: string,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint'>, failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint' | 'requestUrl'>,
): string { ): string {
const marker = formatOpenAICategoryMarker(failure.category) const host = failure.requestUrl ? getHostname(failure.requestUrl) ?? undefined : undefined
const marker = formatOpenAICategoryMarker(failure.category, host)
const hint = failure.hint ? ` Hint: ${failure.hint}` : '' const hint = failure.hint ? ` Hint: ${failure.hint}` : ''
return `${baseMessage} ${marker}${hint}` return `${baseMessage} ${marker}${hint}`
} }
@@ -247,8 +263,11 @@ export function classifyOpenAINetworkFailure(
export function classifyOpenAIHttpFailure(options: { export function classifyOpenAIHttpFailure(options: {
status: number status: number
body: string body: string
url?: string
}): OpenAICompatibilityFailure { }): OpenAICompatibilityFailure {
const body = options.body ?? '' const body = options.body ?? ''
const hostname = options.url ? getHostname(options.url) : null
const isLocalHost = isLocalhostLikeHostname(hostname)
if (options.status === 401 || options.status === 403) { if (options.status === 401 || options.status === 403) {
return { return {
@@ -284,13 +303,17 @@ export function classifyOpenAIHttpFailure(options: {
} }
if (options.status === 404) { if (options.status === 404) {
const isRemote = hostname !== null && !isLocalHost
return { return {
source: 'http', source: 'http',
category: 'endpoint_not_found', category: 'endpoint_not_found',
retryable: false, retryable: false,
status: options.status, status: options.status,
message: body, message: body,
hint: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.', requestUrl: options.url,
hint: isRemote
? `Endpoint at ${hostname} returned 404. Verify OPENAI_BASE_URL is correct and the requested model is supported by this provider.`
: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
} }
} }

View File

@@ -1935,7 +1935,9 @@ class OpenAIShimMessages {
classifyOpenAIHttpFailure({ classifyOpenAIHttpFailure({
status, status,
body: errorBody, body: errorBody,
url: requestUrl,
}) })
const failureWithUrl = { ...failure, requestUrl: failure.requestUrl ?? requestUrl }
const redactedUrl = redactUrlForDiagnostics(requestUrl) const redactedUrl = redactUrlForDiagnostics(requestUrl)
logForDebugging( logForDebugging(
@@ -1948,7 +1950,7 @@ class OpenAIShimMessages {
parsedBody, parsedBody,
buildOpenAICompatibilityErrorMessage( buildOpenAICompatibilityErrorMessage(
`OpenAI API error ${status}: ${errorBody}${rateHint}`, `OpenAI API error ${status}: ${errorBody}${rateHint}`,
failure, failureWithUrl,
), ),
responseHeaders, responseHeaders,
) )

View File

@@ -3,11 +3,7 @@ import { homedir } from 'os'
import { join } from 'path' import { join } from 'path'
import { fileSuffixForOauthConfig } from '../constants/oauth.js' import { fileSuffixForOauthConfig } from '../constants/oauth.js'
import { isRunningWithBun } from './bundledMode.js' import { isRunningWithBun } from './bundledMode.js'
import { import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
getClaudeConfigHomeDir,
isEnvTruthy,
resolveConfigDirEnv,
} from './envUtils.js'
import { findExecutable } from './findExecutable.js' import { findExecutable } from './findExecutable.js'
import { getFsImplementation } from './fsOperations.js' import { getFsImplementation } from './fsOperations.js'
import { which } from './which.js' import { which } from './which.js'
@@ -26,11 +22,7 @@ export const getGlobalClaudeFile = memoize((): string => {
} }
const oauthSuffix = fileSuffixForOauthConfig() const oauthSuffix = fileSuffixForOauthConfig()
const configDir = const configDir = process.env.CLAUDE_CONFIG_DIR || homedir()
resolveConfigDirEnv({
openClaudeConfigDir: process.env.OPENCLAUDE_CONFIG_DIR,
legacyConfigDir: process.env.CLAUDE_CONFIG_DIR,
}) ?? homedir()
// Default to .openclaude.json. Fall back to .claude.json only if the new // Default to .openclaude.json. Fall back to .claude.json only if the new
// file doesn't exist yet and the legacy one does (same migration pattern // file doesn't exist yet and the legacy one does (same migration pattern

View File

@@ -3,39 +3,6 @@ import { existsSync } from 'fs'
import { homedir } from 'os' import { homedir } from 'os'
import { join } from 'path' import { join } from 'path'
/**
* Resolves the override env value for the config home directory.
* `OPENCLAUDE_CONFIG_DIR` is preferred — `CLAUDE_CONFIG_DIR` is the legacy
* Anthropic name kept working for backward compatibility. When both are set
* and disagree, `OPENCLAUDE_CONFIG_DIR` wins and we warn once so the user
* can clean up. Exported for tests.
*/
let warnedAboutConflictingConfigDirEnvs = false
export function resolveConfigDirEnv(options?: {
openClaudeConfigDir?: string
legacyConfigDir?: string
warn?: (message: string) => void
}): string | undefined {
const open = options?.openClaudeConfigDir
const legacy = options?.legacyConfigDir
if (open && legacy && open !== legacy && !warnedAboutConflictingConfigDirEnvs) {
warnedAboutConflictingConfigDirEnvs = true
options?.warn?.(
`Both OPENCLAUDE_CONFIG_DIR and CLAUDE_CONFIG_DIR are set to different values. Using OPENCLAUDE_CONFIG_DIR=${open}; ignoring CLAUDE_CONFIG_DIR=${legacy}.`,
)
}
return open || legacy || undefined
}
/**
* Test-only escape hatch — resets the once-per-process conflict warning so
* unit tests can re-trigger it.
*/
export function __resetConfigDirEnvWarningForTesting(): void {
warnedAboutConflictingConfigDirEnvs = false
}
export function resolveClaudeConfigHomeDir(options?: { export function resolveClaudeConfigHomeDir(options?: {
configDirEnv?: string configDirEnv?: string
homeDir?: string homeDir?: string
@@ -63,21 +30,13 @@ export function resolveClaudeConfigHomeDir(options?: {
return openClaudeDir.normalize('NFC') return openClaudeDir.normalize('NFC')
} }
// Memoized: 150+ callers, many on hot paths. Keyed off both override env // Memoized: 150+ callers, many on hot paths. Keyed off CLAUDE_CONFIG_DIR so
// vars so tests that change either get a fresh value without explicit // tests that change the env var get a fresh value without explicit cache.clear.
// cache.clear.
export const getClaudeConfigHomeDir = memoize( export const getClaudeConfigHomeDir = memoize(
(): string => resolveClaudeConfigHomeDir({ (): string => resolveClaudeConfigHomeDir({
configDirEnv: resolveConfigDirEnv({ configDirEnv: process.env.CLAUDE_CONFIG_DIR,
openClaudeConfigDir: process.env.OPENCLAUDE_CONFIG_DIR,
legacyConfigDir: process.env.CLAUDE_CONFIG_DIR,
warn: message => {
// eslint-disable-next-line no-console
console.warn(`[openclaude] ${message}`)
},
}), }),
}), () => process.env.CLAUDE_CONFIG_DIR,
() => `${process.env.OPENCLAUDE_CONFIG_DIR ?? ''}|${process.env.CLAUDE_CONFIG_DIR ?? ''}`,
) )
export function getTeamsDir(): string { export function getTeamsDir(): string {

View File

@@ -51,8 +51,7 @@ describe('OpenClaude paths', () => {
).toBe(join(homedir(), '.claude')) ).toBe(join(homedir(), '.claude'))
}) })
test('uses CLAUDE_CONFIG_DIR override when provided (legacy)', async () => { test('uses CLAUDE_CONFIG_DIR override when provided', async () => {
delete process.env.OPENCLAUDE_CONFIG_DIR
process.env.CLAUDE_CONFIG_DIR = '/tmp/custom-openclaude' process.env.CLAUDE_CONFIG_DIR = '/tmp/custom-openclaude'
const { getClaudeConfigHomeDir, resolveClaudeConfigHomeDir } = const { getClaudeConfigHomeDir, resolveClaudeConfigHomeDir } =
await importFreshEnvUtils() await importFreshEnvUtils()
@@ -65,83 +64,6 @@ describe('OpenClaude paths', () => {
).toBe('/tmp/custom-openclaude') ).toBe('/tmp/custom-openclaude')
}) })
test('OPENCLAUDE_CONFIG_DIR overrides the default (issue #454)', async () => {
delete process.env.CLAUDE_CONFIG_DIR
process.env.OPENCLAUDE_CONFIG_DIR = '/tmp/oc-config-only'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/oc-config-only')
})
test('OPENCLAUDE_CONFIG_DIR wins when both env vars are set with different values', async () => {
process.env.OPENCLAUDE_CONFIG_DIR = '/tmp/oc-wins'
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-loses'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/oc-wins')
})
test('CLAUDE_CONFIG_DIR is still honored when OPENCLAUDE_CONFIG_DIR is unset', async () => {
delete process.env.OPENCLAUDE_CONFIG_DIR
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-only'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/legacy-only')
})
test('empty OPENCLAUDE_CONFIG_DIR falls through to CLAUDE_CONFIG_DIR', async () => {
process.env.OPENCLAUDE_CONFIG_DIR = ''
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-fallback'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/legacy-fallback')
})
test('resolveConfigDirEnv prefers OPENCLAUDE over CLAUDE and warns on conflict', async () => {
const { resolveConfigDirEnv, __resetConfigDirEnvWarningForTesting } =
await importFreshEnvUtils()
__resetConfigDirEnvWarningForTesting()
const warnings: string[] = []
const result = resolveConfigDirEnv({
openClaudeConfigDir: '/a',
legacyConfigDir: '/b',
warn: m => warnings.push(m),
})
expect(result).toBe('/a')
expect(warnings.length).toBe(1)
expect(warnings[0]).toContain('OPENCLAUDE_CONFIG_DIR=/a')
expect(warnings[0]).toContain('CLAUDE_CONFIG_DIR=/b')
})
test('resolveConfigDirEnv does not warn when both env vars agree', async () => {
const { resolveConfigDirEnv, __resetConfigDirEnvWarningForTesting } =
await importFreshEnvUtils()
__resetConfigDirEnvWarningForTesting()
const warnings: string[] = []
const result = resolveConfigDirEnv({
openClaudeConfigDir: '/same',
legacyConfigDir: '/same',
warn: m => warnings.push(m),
})
expect(result).toBe('/same')
expect(warnings).toEqual([])
})
test('resolveConfigDirEnv returns undefined when neither env var is set', async () => {
const { resolveConfigDirEnv } = await importFreshEnvUtils()
expect(
resolveConfigDirEnv({
openClaudeConfigDir: undefined,
legacyConfigDir: undefined,
}),
).toBeUndefined()
})
test('project and local settings paths use .openclaude', async () => { test('project and local settings paths use .openclaude', async () => {
const { getRelativeSettingsFilePathForSource } = await importFreshSettings() const { getRelativeSettingsFilePathForSource } = await importFreshSettings()

View File

@@ -34,8 +34,7 @@ export function getSecureStorageServiceName(
serviceSuffix: string = '', serviceSuffix: string = '',
): string { ): string {
const configDir = getClaudeConfigHomeDir() const configDir = getClaudeConfigHomeDir()
const isDefaultDir = const isDefaultDir = !process.env.CLAUDE_CONFIG_DIR
!process.env.OPENCLAUDE_CONFIG_DIR && !process.env.CLAUDE_CONFIG_DIR
// Use a hash of the config dir path to create a unique but stable suffix // Use a hash of the config dir path to create a unique but stable suffix
// Only add suffix for non-default directories to maintain backwards compatibility // Only add suffix for non-default directories to maintain backwards compatibility

View File

@@ -117,8 +117,7 @@ const TEAMMATE_ENV_VARS = [
'MISTRAL_BASE_URL', 'MISTRAL_BASE_URL',
// Custom API endpoint // Custom API endpoint
'ANTHROPIC_BASE_URL', 'ANTHROPIC_BASE_URL',
// Config directory override (preferred name + legacy alias) // Config directory override
'OPENCLAUDE_CONFIG_DIR',
'CLAUDE_CONFIG_DIR', 'CLAUDE_CONFIG_DIR',
// CCR marker — teammates need this for CCR-aware code paths. Auth finds // CCR marker — teammates need this for CCR-aware code paths. Auth finds
// its own way via /home/claude/.claude/remote/.oauth_token regardless; // its own way via /home/claude/.claude/remote/.oauth_token regardless;