Compare commits

..

1 Commits

Author SHA1 Message Date
gnanam1990
e43ba9da69 feat(config): add OPENCLAUDE_CONFIG_DIR env var as preferred alias for CLAUDE_CONFIG_DIR (#454)
The legacy CLAUDE_CONFIG_DIR name was the only way to point openclaude
at a non-default config home, which leaked Anthropic branding for a
fork that has otherwise rebranded to OpenClaude. Add OPENCLAUDE_CONFIG_DIR
as the preferred name. CLAUDE_CONFIG_DIR continues to work for
backward compatibility; when both are set with different values,
OPENCLAUDE_CONFIG_DIR wins and a one-time warning is logged.

- src/utils/envUtils.ts: introduce resolveConfigDirEnv() that picks
  OPENCLAUDE_CONFIG_DIR over CLAUDE_CONFIG_DIR and emits a conflict
  warning. Memoize cache key now tracks both env vars so changing
  either invalidates the cached result.
- src/utils/env.ts: getGlobalClaudeFile() previously read
  CLAUDE_CONFIG_DIR directly, missing the new alias. Route through
  resolveConfigDirEnv() so the global config file path follows the
  same precedence.
- src/utils/secureStorage/macOsKeychainHelpers.ts: the "is default
  dir" check used by keychain service-name scoping now considers
  both env vars.
- src/utils/swarm/spawnUtils.ts: forward OPENCLAUDE_CONFIG_DIR to
  teammate processes alongside the legacy var.
- src/utils/openclaudePaths.test.ts: +6 unit tests covering the new
  alias, fallthrough, conflict warning, and resolveConfigDirEnv()
  in isolation.
- .env.example: document both env vars and the precedence rule.

Verified locally on Linux: with only OPENCLAUDE_CONFIG_DIR set, with
only CLAUDE_CONFIG_DIR set (legacy still works), with both set
matching (silent), with both set conflicting (warn once + OPENCLAUDE
wins), with neither set (default ~/.openclaude). Memo cache
invalidates across 4 sequential env transitions. Built dist/cli.mjs
honors the new var and emits the conflict warning to the user.
2026-04-28 11:50:34 +05:30
11 changed files with 160 additions and 139 deletions

View File

@@ -421,3 +421,16 @@ ANTHROPIC_API_KEY=sk-ant-your-key-here
# WEB_CUSTOM_ALLOW_HTTP=false — set "true" to allow http:// URLs
# WEB_CUSTOM_ALLOW_PRIVATE=false — set "true" to target localhost/private IPs
# (needed for self-hosted SearXNG)
# ── Config directory override ───────────────────────────────────────
#
# By default openclaude stores per-user state under ~/.openclaude
# (and falls back to ~/.claude for installs that pre-date the rename).
# Set this to point openclaude at a different directory — useful for
# isolating profiles or sharing config across machines.
#
# OPENCLAUDE_CONFIG_DIR=/path/to/dir — preferred name
# CLAUDE_CONFIG_DIR=/path/to/dir — legacy alias (still works)
#
# When both are set with different values, OPENCLAUDE_CONFIG_DIR wins
# and a warning is logged once per process.

View File

@@ -28,38 +28,6 @@ test('maps endpoint_not_found category markers to actionable setup guidance', ()
expect(text).toContain('/v1')
})
test('endpoint_not_found from a remote host shows the actual host, not Ollama (issue #926)', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found,host=integrate.api.nvidia.com] Hint: Endpoint at integrate.api.nvidia.com returned 404.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'moonshotai/kimi-k2.5-thinking')
const text = getFirstText(message)
expect(text).toContain('integrate.api.nvidia.com')
expect(text).toContain('moonshotai/kimi-k2.5-thinking')
expect(text).not.toContain('Ollama')
expect(text).not.toContain('11434')
})
test('endpoint_not_found without a host falls back to the Ollama-aware message', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found] Hint: Confirm OPENAI_BASE_URL includes /v1.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'qwen2.5-coder:7b')
const text = getFirstText(message)
expect(text).toContain('Provider endpoint was not found')
expect(text).toContain('Ollama')
})
test('maps tool_call_incompatible category markers to model/tool guidance', () => {
const error = APIError.generate(
400,

View File

@@ -51,9 +51,7 @@ import {
import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command
import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js'
import {
extractOpenAICategoryHost,
extractOpenAICategoryMarker,
isLocalhostLikeHost,
type OpenAICompatibilityFailureCategory,
} from './openaiErrorClassification.js'
@@ -70,29 +68,25 @@ function mapOpenAICompatibilityFailureToAssistantMessage(options: {
category: OpenAICompatibilityFailureCategory
model: string
rawMessage: string
host?: string
}): AssistantMessage {
const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model'
const compactHint = getIsNonInteractiveSession()
? 'Reduce prompt size or start a new session.'
: 'Run /compact or start a new session with /new.'
const isLocalhost = options.host === undefined || isLocalhostLikeHost(options.host)
switch (options.category) {
case 'localhost_resolution_failed':
case 'connection_refused':
return createAssistantAPIErrorMessage({
content: isLocalhost
? 'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.'
: `Could not connect to the provider at ${options.host}. Verify OPENAI_BASE_URL is correct and that the host is reachable.`,
content:
'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.',
error: 'unknown',
})
case 'endpoint_not_found':
return createAssistantAPIErrorMessage({
content: isLocalhost
? 'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).'
: `Provider endpoint at ${options.host} returned 404. Verify OPENAI_BASE_URL is correct and that the selected model (${options.model}) is supported by this provider.`,
content:
'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).',
error: 'invalid_request',
})
@@ -573,7 +567,6 @@ export function getAssistantMessageFromError(
category: openaiCategory,
model,
rawMessage: error.message,
host: extractOpenAICategoryHost(error.message),
})
}
}

View File

@@ -4,10 +4,8 @@ import {
buildOpenAICompatibilityErrorMessage,
classifyOpenAIHttpFailure,
classifyOpenAINetworkFailure,
extractOpenAICategoryHost,
extractOpenAICategoryMarker,
formatOpenAICategoryMarker,
isLocalhostLikeHost,
} from './openaiErrorClassification.js'
test('classifies localhost ECONNREFUSED as connection_refused', () => {
@@ -97,58 +95,3 @@ test('ignores unknown category markers during extraction', () => {
const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]'
expect(extractOpenAICategoryMarker(malformed)).toBeUndefined()
})
test('endpoint_not_found 404 from a remote host gets a host-aware hint (issue #926)', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'https://integrate.api.nvidia.com/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.requestUrl).toBe('https://integrate.api.nvidia.com/v1/chat/completions')
expect(failure.hint).toContain('integrate.api.nvidia.com')
expect(failure.hint).not.toContain('local providers')
})
test('endpoint_not_found 404 from localhost keeps the Ollama-flavored hint', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'http://127.0.0.1:11434/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.hint).toContain('local providers')
})
test('marker round-trip preserves host segment', () => {
const formatted = buildOpenAICompatibilityErrorMessage(
'OpenAI API error 404: Not Found',
{
category: 'endpoint_not_found',
hint: 'Endpoint at integrate.api.nvidia.com returned 404.',
requestUrl: 'https://integrate.api.nvidia.com/v1/chat/completions',
},
)
expect(formatted).toContain('[openai_category=endpoint_not_found,host=integrate.api.nvidia.com]')
expect(extractOpenAICategoryMarker(formatted)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(formatted)).toBe('integrate.api.nvidia.com')
})
test('marker without host stays backward-compatible', () => {
const marker = formatOpenAICategoryMarker('endpoint_not_found')
expect(marker).toBe('[openai_category=endpoint_not_found]')
expect(extractOpenAICategoryMarker(marker)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(marker)).toBeUndefined()
})
test('isLocalhostLikeHost matches loopback variants', () => {
expect(isLocalhostLikeHost('localhost')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.1')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.5')).toBe(true)
expect(isLocalhostLikeHost('::1')).toBe(true)
expect(isLocalhostLikeHost('integrate.api.nvidia.com')).toBe(false)
expect(isLocalhostLikeHost(undefined)).toBe(false)
})

View File

@@ -21,7 +21,6 @@ export type OpenAICompatibilityFailure = {
hint?: string
code?: string
status?: number
requestUrl?: string
}
const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category='
@@ -97,11 +96,6 @@ function isLocalhostLikeHostname(hostname: string | null): boolean {
return /^127\./.test(hostname)
}
export function isLocalhostLikeHost(host: string | null | undefined): boolean {
if (!host) return false
return isLocalhostLikeHostname(host.toLowerCase())
}
function isContextOverflowMessage(body: string): boolean {
const lower = body.toLowerCase()
return (
@@ -155,18 +149,14 @@ function isModelNotFoundMessage(body: string): boolean {
export function formatOpenAICategoryMarker(
category: OpenAICompatibilityFailureCategory,
host?: string,
): string {
if (host && /^[A-Za-z0-9.\-:]+$/.test(host)) {
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category},host=${host}]`
}
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]`
}
export function extractOpenAICategoryMarker(
message: string,
): OpenAICompatibilityFailureCategory | undefined {
const match = message.match(/\[openai_category=([a-z_]+)(?:,host=[^\]]+)?]/)
const match = message.match(/\[openai_category=([a-z_]+)]/)
const category = match?.[1]
if (!category || !isOpenAICompatibilityFailureCategory(category)) {
@@ -176,17 +166,11 @@ export function extractOpenAICategoryMarker(
return category
}
export function extractOpenAICategoryHost(message: string): string | undefined {
const match = message.match(/\[openai_category=[a-z_]+,host=([A-Za-z0-9.\-:]+)]/)
return match?.[1]
}
export function buildOpenAICompatibilityErrorMessage(
baseMessage: string,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint' | 'requestUrl'>,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint'>,
): string {
const host = failure.requestUrl ? getHostname(failure.requestUrl) ?? undefined : undefined
const marker = formatOpenAICategoryMarker(failure.category, host)
const marker = formatOpenAICategoryMarker(failure.category)
const hint = failure.hint ? ` Hint: ${failure.hint}` : ''
return `${baseMessage} ${marker}${hint}`
}
@@ -263,11 +247,8 @@ export function classifyOpenAINetworkFailure(
export function classifyOpenAIHttpFailure(options: {
status: number
body: string
url?: string
}): OpenAICompatibilityFailure {
const body = options.body ?? ''
const hostname = options.url ? getHostname(options.url) : null
const isLocalHost = isLocalhostLikeHostname(hostname)
if (options.status === 401 || options.status === 403) {
return {
@@ -303,17 +284,13 @@ export function classifyOpenAIHttpFailure(options: {
}
if (options.status === 404) {
const isRemote = hostname !== null && !isLocalHost
return {
source: 'http',
category: 'endpoint_not_found',
retryable: false,
status: options.status,
message: body,
requestUrl: options.url,
hint: isRemote
? `Endpoint at ${hostname} returned 404. Verify OPENAI_BASE_URL is correct and the requested model is supported by this provider.`
: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
hint: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
}
}

View File

@@ -1935,9 +1935,7 @@ class OpenAIShimMessages {
classifyOpenAIHttpFailure({
status,
body: errorBody,
url: requestUrl,
})
const failureWithUrl = { ...failure, requestUrl: failure.requestUrl ?? requestUrl }
const redactedUrl = redactUrlForDiagnostics(requestUrl)
logForDebugging(
@@ -1950,7 +1948,7 @@ class OpenAIShimMessages {
parsedBody,
buildOpenAICompatibilityErrorMessage(
`OpenAI API error ${status}: ${errorBody}${rateHint}`,
failureWithUrl,
failure,
),
responseHeaders,
)

View File

@@ -3,7 +3,11 @@ import { homedir } from 'os'
import { join } from 'path'
import { fileSuffixForOauthConfig } from '../constants/oauth.js'
import { isRunningWithBun } from './bundledMode.js'
import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
import {
getClaudeConfigHomeDir,
isEnvTruthy,
resolveConfigDirEnv,
} from './envUtils.js'
import { findExecutable } from './findExecutable.js'
import { getFsImplementation } from './fsOperations.js'
import { which } from './which.js'
@@ -22,7 +26,11 @@ export const getGlobalClaudeFile = memoize((): string => {
}
const oauthSuffix = fileSuffixForOauthConfig()
const configDir = process.env.CLAUDE_CONFIG_DIR || homedir()
const configDir =
resolveConfigDirEnv({
openClaudeConfigDir: process.env.OPENCLAUDE_CONFIG_DIR,
legacyConfigDir: process.env.CLAUDE_CONFIG_DIR,
}) ?? homedir()
// Default to .openclaude.json. Fall back to .claude.json only if the new
// file doesn't exist yet and the legacy one does (same migration pattern

View File

@@ -3,6 +3,39 @@ import { existsSync } from 'fs'
import { homedir } from 'os'
import { join } from 'path'
/**
* Resolves the override env value for the config home directory.
* `OPENCLAUDE_CONFIG_DIR` is preferred — `CLAUDE_CONFIG_DIR` is the legacy
* Anthropic name kept working for backward compatibility. When both are set
* and disagree, `OPENCLAUDE_CONFIG_DIR` wins and we warn once so the user
* can clean up. Exported for tests.
*/
let warnedAboutConflictingConfigDirEnvs = false
export function resolveConfigDirEnv(options?: {
openClaudeConfigDir?: string
legacyConfigDir?: string
warn?: (message: string) => void
}): string | undefined {
const open = options?.openClaudeConfigDir
const legacy = options?.legacyConfigDir
if (open && legacy && open !== legacy && !warnedAboutConflictingConfigDirEnvs) {
warnedAboutConflictingConfigDirEnvs = true
options?.warn?.(
`Both OPENCLAUDE_CONFIG_DIR and CLAUDE_CONFIG_DIR are set to different values. Using OPENCLAUDE_CONFIG_DIR=${open}; ignoring CLAUDE_CONFIG_DIR=${legacy}.`,
)
}
return open || legacy || undefined
}
/**
* Test-only escape hatch — resets the once-per-process conflict warning so
* unit tests can re-trigger it.
*/
export function __resetConfigDirEnvWarningForTesting(): void {
warnedAboutConflictingConfigDirEnvs = false
}
export function resolveClaudeConfigHomeDir(options?: {
configDirEnv?: string
homeDir?: string
@@ -30,13 +63,21 @@ export function resolveClaudeConfigHomeDir(options?: {
return openClaudeDir.normalize('NFC')
}
// Memoized: 150+ callers, many on hot paths. Keyed off CLAUDE_CONFIG_DIR so
// tests that change the env var get a fresh value without explicit cache.clear.
// Memoized: 150+ callers, many on hot paths. Keyed off both override env
// vars so tests that change either get a fresh value without explicit
// cache.clear.
export const getClaudeConfigHomeDir = memoize(
(): string => resolveClaudeConfigHomeDir({
configDirEnv: process.env.CLAUDE_CONFIG_DIR,
configDirEnv: resolveConfigDirEnv({
openClaudeConfigDir: process.env.OPENCLAUDE_CONFIG_DIR,
legacyConfigDir: process.env.CLAUDE_CONFIG_DIR,
warn: message => {
// eslint-disable-next-line no-console
console.warn(`[openclaude] ${message}`)
},
}),
() => process.env.CLAUDE_CONFIG_DIR,
}),
() => `${process.env.OPENCLAUDE_CONFIG_DIR ?? ''}|${process.env.CLAUDE_CONFIG_DIR ?? ''}`,
)
export function getTeamsDir(): string {

View File

@@ -51,7 +51,8 @@ describe('OpenClaude paths', () => {
).toBe(join(homedir(), '.claude'))
})
test('uses CLAUDE_CONFIG_DIR override when provided', async () => {
test('uses CLAUDE_CONFIG_DIR override when provided (legacy)', async () => {
delete process.env.OPENCLAUDE_CONFIG_DIR
process.env.CLAUDE_CONFIG_DIR = '/tmp/custom-openclaude'
const { getClaudeConfigHomeDir, resolveClaudeConfigHomeDir } =
await importFreshEnvUtils()
@@ -64,6 +65,83 @@ describe('OpenClaude paths', () => {
).toBe('/tmp/custom-openclaude')
})
test('OPENCLAUDE_CONFIG_DIR overrides the default (issue #454)', async () => {
delete process.env.CLAUDE_CONFIG_DIR
process.env.OPENCLAUDE_CONFIG_DIR = '/tmp/oc-config-only'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/oc-config-only')
})
test('OPENCLAUDE_CONFIG_DIR wins when both env vars are set with different values', async () => {
process.env.OPENCLAUDE_CONFIG_DIR = '/tmp/oc-wins'
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-loses'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/oc-wins')
})
test('CLAUDE_CONFIG_DIR is still honored when OPENCLAUDE_CONFIG_DIR is unset', async () => {
delete process.env.OPENCLAUDE_CONFIG_DIR
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-only'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/legacy-only')
})
test('empty OPENCLAUDE_CONFIG_DIR falls through to CLAUDE_CONFIG_DIR', async () => {
process.env.OPENCLAUDE_CONFIG_DIR = ''
process.env.CLAUDE_CONFIG_DIR = '/tmp/legacy-fallback'
const { getClaudeConfigHomeDir } = await importFreshEnvUtils()
expect(getClaudeConfigHomeDir()).toBe('/tmp/legacy-fallback')
})
test('resolveConfigDirEnv prefers OPENCLAUDE over CLAUDE and warns on conflict', async () => {
const { resolveConfigDirEnv, __resetConfigDirEnvWarningForTesting } =
await importFreshEnvUtils()
__resetConfigDirEnvWarningForTesting()
const warnings: string[] = []
const result = resolveConfigDirEnv({
openClaudeConfigDir: '/a',
legacyConfigDir: '/b',
warn: m => warnings.push(m),
})
expect(result).toBe('/a')
expect(warnings.length).toBe(1)
expect(warnings[0]).toContain('OPENCLAUDE_CONFIG_DIR=/a')
expect(warnings[0]).toContain('CLAUDE_CONFIG_DIR=/b')
})
test('resolveConfigDirEnv does not warn when both env vars agree', async () => {
const { resolveConfigDirEnv, __resetConfigDirEnvWarningForTesting } =
await importFreshEnvUtils()
__resetConfigDirEnvWarningForTesting()
const warnings: string[] = []
const result = resolveConfigDirEnv({
openClaudeConfigDir: '/same',
legacyConfigDir: '/same',
warn: m => warnings.push(m),
})
expect(result).toBe('/same')
expect(warnings).toEqual([])
})
test('resolveConfigDirEnv returns undefined when neither env var is set', async () => {
const { resolveConfigDirEnv } = await importFreshEnvUtils()
expect(
resolveConfigDirEnv({
openClaudeConfigDir: undefined,
legacyConfigDir: undefined,
}),
).toBeUndefined()
})
test('project and local settings paths use .openclaude', async () => {
const { getRelativeSettingsFilePathForSource } = await importFreshSettings()

View File

@@ -34,7 +34,8 @@ export function getSecureStorageServiceName(
serviceSuffix: string = '',
): string {
const configDir = getClaudeConfigHomeDir()
const isDefaultDir = !process.env.CLAUDE_CONFIG_DIR
const isDefaultDir =
!process.env.OPENCLAUDE_CONFIG_DIR && !process.env.CLAUDE_CONFIG_DIR
// Use a hash of the config dir path to create a unique but stable suffix
// Only add suffix for non-default directories to maintain backwards compatibility

View File

@@ -117,7 +117,8 @@ const TEAMMATE_ENV_VARS = [
'MISTRAL_BASE_URL',
// Custom API endpoint
'ANTHROPIC_BASE_URL',
// Config directory override
// Config directory override (preferred name + legacy alias)
'OPENCLAUDE_CONFIG_DIR',
'CLAUDE_CONFIG_DIR',
// CCR marker — teammates need this for CCR-aware code paths. Auth finds
// its own way via /home/claude/.claude/remote/.oauth_token regardless;