Compare commits

..

1 Commits

Author SHA1 Message Date
gnanam1990
5ae055d30b fix(mcp): allow third-party providers to approve project-scope .mcp.json servers (#696)
When a user runs openclaude with a third-party provider, project-scope
MCP servers added via `openclaude mcp add -s project ...` were silently
dropped from `/mcp` and `openclaude mcp list`. Re-adding the same
server printed "MCP server already exists in .mcp.json" but the server
never actually loaded. Reporter @gbmerrall pinpointed the cause to the
`if (usesAnthropicSetup)` gate around `handleMcpjsonServerApprovals`
in src/interactiveHelpers.tsx.

The MCP approval dialog and the CLAUDE.md external-includes warning
are about workspace trust, not about Anthropic auth. Gating them on
`usesAnthropicSetup` meant 3P-provider users never saw the dialog
that writes `enableAllProjectMcpServers: true` and
`enabledMcpjsonServers: [...]` to settings.local.json — without
those settings, the MCP server isn't loaded for use.

Drop the `usesAnthropicSetup` gate around the approval flow. The
inner logic is unchanged and `handleMcpjsonServerApprovals` already
early-returns when no servers are pending, so users without project
`.mcp.json` see no new behavior.

- src/interactiveHelpers.tsx: drop the gate, add a comment explaining
  why (and cite #696 so future readers can find context).
- src/__tests__/bugfixes.test.ts: +2 regression tests asserting the
  gate is gone and the issue is referenced.

Verified locally on Linux: build passes (v0.7.0), 1634 tests pass,
the 4 remaining failures (StartupScreen.test.ts, thinking.test.ts)
reproduce on main and are unrelated. The bundled dist/cli.mjs shows
`handleMcpjsonServerApprovals(root2)` running directly after
`setSessionTrustAccepted(true)` with no auth gate.
2026-04-28 12:13:33 +05:30
7 changed files with 54 additions and 147 deletions

View File

@@ -288,3 +288,30 @@ describe('Context overflow 500 fix', () => {
expect(content).toContain('automatic compaction has failed') expect(content).toContain('automatic compaction has failed')
}) })
}) })
// ---------------------------------------------------------------------------
// Fix N: Project-scope MCP servers from .mcp.json not detected for 3P providers (issue #696)
// ---------------------------------------------------------------------------
describe('Project-scope MCP approval — third-party providers (issue #696)', () => {
test('handleMcpjsonServerApprovals is NOT gated behind usesAnthropicSetup', async () => {
const content = await file('interactiveHelpers.tsx').text()
// The call site for handleMcpjsonServerApprovals must not sit inside an
// `if (usesAnthropicSetup) { ... }` block, or third-party providers will
// never get the dialog and project-scope .mcp.json servers will be silently
// dropped from /mcp listings (issue #696).
const approvalCallIdx = content.indexOf('await handleMcpjsonServerApprovals(root)')
expect(approvalCallIdx).toBeGreaterThan(-1)
// Look at the 800 chars BEFORE the call site for any `if (usesAnthropicSetup)`
// block that would still be open. Pick a window that's definitely inside the
// showSetupScreens function but not in earlier dialogs.
const before = content.slice(Math.max(0, approvalCallIdx - 800), approvalCallIdx)
expect(before).not.toMatch(/if\s*\(\s*usesAnthropicSetup\s*\)\s*{[^}]*$/)
})
test('issue #696 is referenced from the comment so future readers can find context', async () => {
const content = await file('interactiveHelpers.tsx').text()
expect(content).toContain('#696')
})
})

View File

@@ -158,12 +158,14 @@ export async function showSetupScreens(root: Root, permissionMode: PermissionMod
// Now that trust is established, prefetch system context if it wasn't already // Now that trust is established, prefetch system context if it wasn't already
void getSystemContext(); void getSystemContext();
// Skip MCP approval dialogs for third-party providers (no interactive auth prompts) // MCP approval and external-includes warnings are about workspace
if (usesAnthropicSetup) { // trust, not about Anthropic auth. They must run for all providers
// If settings are valid, check for any mcp.json servers that need approval // — including third-party — otherwise project-scoped .mcp.json
const { // servers never get the approval that writes
errors: allErrors // enableAllProjectMcpServers / enabledMcpjsonServers into
} = getSettingsWithAllErrors(); // settings.local.json, and the servers are silently dropped from
// /mcp and `mcp list` (issue #696).
const { errors: allErrors } = getSettingsWithAllErrors();
if (allErrors.length === 0) { if (allErrors.length === 0) {
await handleMcpjsonServerApprovals(root); await handleMcpjsonServerApprovals(root);
} }
@@ -177,7 +179,6 @@ export async function showSetupScreens(root: Root, permissionMode: PermissionMod
await showSetupDialog(root, done => <ClaudeMdExternalIncludesDialog onDone={done} isStandaloneDialog externalIncludes={externalIncludes} />); await showSetupDialog(root, done => <ClaudeMdExternalIncludesDialog onDone={done} isStandaloneDialog externalIncludes={externalIncludes} />);
} }
} }
}
// Track current repo path for teleport directory switching (fire-and-forget) // Track current repo path for teleport directory switching (fire-and-forget)
// This must happen AFTER trust to prevent untrusted directories from poisoning the mapping // This must happen AFTER trust to prevent untrusted directories from poisoning the mapping

View File

@@ -28,38 +28,6 @@ test('maps endpoint_not_found category markers to actionable setup guidance', ()
expect(text).toContain('/v1') expect(text).toContain('/v1')
}) })
test('endpoint_not_found from a remote host shows the actual host, not Ollama (issue #926)', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found,host=integrate.api.nvidia.com] Hint: Endpoint at integrate.api.nvidia.com returned 404.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'moonshotai/kimi-k2.5-thinking')
const text = getFirstText(message)
expect(text).toContain('integrate.api.nvidia.com')
expect(text).toContain('moonshotai/kimi-k2.5-thinking')
expect(text).not.toContain('Ollama')
expect(text).not.toContain('11434')
})
test('endpoint_not_found without a host falls back to the Ollama-aware message', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found] Hint: Confirm OPENAI_BASE_URL includes /v1.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'qwen2.5-coder:7b')
const text = getFirstText(message)
expect(text).toContain('Provider endpoint was not found')
expect(text).toContain('Ollama')
})
test('maps tool_call_incompatible category markers to model/tool guidance', () => { test('maps tool_call_incompatible category markers to model/tool guidance', () => {
const error = APIError.generate( const error = APIError.generate(
400, 400,

View File

@@ -51,9 +51,7 @@ import {
import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command
import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js' import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js'
import { import {
extractOpenAICategoryHost,
extractOpenAICategoryMarker, extractOpenAICategoryMarker,
isLocalhostLikeHost,
type OpenAICompatibilityFailureCategory, type OpenAICompatibilityFailureCategory,
} from './openaiErrorClassification.js' } from './openaiErrorClassification.js'
@@ -70,29 +68,25 @@ function mapOpenAICompatibilityFailureToAssistantMessage(options: {
category: OpenAICompatibilityFailureCategory category: OpenAICompatibilityFailureCategory
model: string model: string
rawMessage: string rawMessage: string
host?: string
}): AssistantMessage { }): AssistantMessage {
const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model' const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model'
const compactHint = getIsNonInteractiveSession() const compactHint = getIsNonInteractiveSession()
? 'Reduce prompt size or start a new session.' ? 'Reduce prompt size or start a new session.'
: 'Run /compact or start a new session with /new.' : 'Run /compact or start a new session with /new.'
const isLocalhost = options.host === undefined || isLocalhostLikeHost(options.host)
switch (options.category) { switch (options.category) {
case 'localhost_resolution_failed': case 'localhost_resolution_failed':
case 'connection_refused': case 'connection_refused':
return createAssistantAPIErrorMessage({ return createAssistantAPIErrorMessage({
content: isLocalhost content:
? 'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.' 'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.',
: `Could not connect to the provider at ${options.host}. Verify OPENAI_BASE_URL is correct and that the host is reachable.`,
error: 'unknown', error: 'unknown',
}) })
case 'endpoint_not_found': case 'endpoint_not_found':
return createAssistantAPIErrorMessage({ return createAssistantAPIErrorMessage({
content: isLocalhost content:
? 'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).' 'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).',
: `Provider endpoint at ${options.host} returned 404. Verify OPENAI_BASE_URL is correct and that the selected model (${options.model}) is supported by this provider.`,
error: 'invalid_request', error: 'invalid_request',
}) })
@@ -573,7 +567,6 @@ export function getAssistantMessageFromError(
category: openaiCategory, category: openaiCategory,
model, model,
rawMessage: error.message, rawMessage: error.message,
host: extractOpenAICategoryHost(error.message),
}) })
} }
} }

View File

@@ -4,10 +4,8 @@ import {
buildOpenAICompatibilityErrorMessage, buildOpenAICompatibilityErrorMessage,
classifyOpenAIHttpFailure, classifyOpenAIHttpFailure,
classifyOpenAINetworkFailure, classifyOpenAINetworkFailure,
extractOpenAICategoryHost,
extractOpenAICategoryMarker, extractOpenAICategoryMarker,
formatOpenAICategoryMarker, formatOpenAICategoryMarker,
isLocalhostLikeHost,
} from './openaiErrorClassification.js' } from './openaiErrorClassification.js'
test('classifies localhost ECONNREFUSED as connection_refused', () => { test('classifies localhost ECONNREFUSED as connection_refused', () => {
@@ -97,58 +95,3 @@ test('ignores unknown category markers during extraction', () => {
const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]' const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]'
expect(extractOpenAICategoryMarker(malformed)).toBeUndefined() expect(extractOpenAICategoryMarker(malformed)).toBeUndefined()
}) })
test('endpoint_not_found 404 from a remote host gets a host-aware hint (issue #926)', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'https://integrate.api.nvidia.com/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.requestUrl).toBe('https://integrate.api.nvidia.com/v1/chat/completions')
expect(failure.hint).toContain('integrate.api.nvidia.com')
expect(failure.hint).not.toContain('local providers')
})
test('endpoint_not_found 404 from localhost keeps the Ollama-flavored hint', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'http://127.0.0.1:11434/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.hint).toContain('local providers')
})
test('marker round-trip preserves host segment', () => {
const formatted = buildOpenAICompatibilityErrorMessage(
'OpenAI API error 404: Not Found',
{
category: 'endpoint_not_found',
hint: 'Endpoint at integrate.api.nvidia.com returned 404.',
requestUrl: 'https://integrate.api.nvidia.com/v1/chat/completions',
},
)
expect(formatted).toContain('[openai_category=endpoint_not_found,host=integrate.api.nvidia.com]')
expect(extractOpenAICategoryMarker(formatted)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(formatted)).toBe('integrate.api.nvidia.com')
})
test('marker without host stays backward-compatible', () => {
const marker = formatOpenAICategoryMarker('endpoint_not_found')
expect(marker).toBe('[openai_category=endpoint_not_found]')
expect(extractOpenAICategoryMarker(marker)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(marker)).toBeUndefined()
})
test('isLocalhostLikeHost matches loopback variants', () => {
expect(isLocalhostLikeHost('localhost')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.1')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.5')).toBe(true)
expect(isLocalhostLikeHost('::1')).toBe(true)
expect(isLocalhostLikeHost('integrate.api.nvidia.com')).toBe(false)
expect(isLocalhostLikeHost(undefined)).toBe(false)
})

View File

@@ -21,7 +21,6 @@ export type OpenAICompatibilityFailure = {
hint?: string hint?: string
code?: string code?: string
status?: number status?: number
requestUrl?: string
} }
const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category=' const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category='
@@ -97,11 +96,6 @@ function isLocalhostLikeHostname(hostname: string | null): boolean {
return /^127\./.test(hostname) return /^127\./.test(hostname)
} }
export function isLocalhostLikeHost(host: string | null | undefined): boolean {
if (!host) return false
return isLocalhostLikeHostname(host.toLowerCase())
}
function isContextOverflowMessage(body: string): boolean { function isContextOverflowMessage(body: string): boolean {
const lower = body.toLowerCase() const lower = body.toLowerCase()
return ( return (
@@ -155,18 +149,14 @@ function isModelNotFoundMessage(body: string): boolean {
export function formatOpenAICategoryMarker( export function formatOpenAICategoryMarker(
category: OpenAICompatibilityFailureCategory, category: OpenAICompatibilityFailureCategory,
host?: string,
): string { ): string {
if (host && /^[A-Za-z0-9.\-:]+$/.test(host)) {
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category},host=${host}]`
}
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]` return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]`
} }
export function extractOpenAICategoryMarker( export function extractOpenAICategoryMarker(
message: string, message: string,
): OpenAICompatibilityFailureCategory | undefined { ): OpenAICompatibilityFailureCategory | undefined {
const match = message.match(/\[openai_category=([a-z_]+)(?:,host=[^\]]+)?]/) const match = message.match(/\[openai_category=([a-z_]+)]/)
const category = match?.[1] const category = match?.[1]
if (!category || !isOpenAICompatibilityFailureCategory(category)) { if (!category || !isOpenAICompatibilityFailureCategory(category)) {
@@ -176,17 +166,11 @@ export function extractOpenAICategoryMarker(
return category return category
} }
export function extractOpenAICategoryHost(message: string): string | undefined {
const match = message.match(/\[openai_category=[a-z_]+,host=([A-Za-z0-9.\-:]+)]/)
return match?.[1]
}
export function buildOpenAICompatibilityErrorMessage( export function buildOpenAICompatibilityErrorMessage(
baseMessage: string, baseMessage: string,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint' | 'requestUrl'>, failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint'>,
): string { ): string {
const host = failure.requestUrl ? getHostname(failure.requestUrl) ?? undefined : undefined const marker = formatOpenAICategoryMarker(failure.category)
const marker = formatOpenAICategoryMarker(failure.category, host)
const hint = failure.hint ? ` Hint: ${failure.hint}` : '' const hint = failure.hint ? ` Hint: ${failure.hint}` : ''
return `${baseMessage} ${marker}${hint}` return `${baseMessage} ${marker}${hint}`
} }
@@ -263,11 +247,8 @@ export function classifyOpenAINetworkFailure(
export function classifyOpenAIHttpFailure(options: { export function classifyOpenAIHttpFailure(options: {
status: number status: number
body: string body: string
url?: string
}): OpenAICompatibilityFailure { }): OpenAICompatibilityFailure {
const body = options.body ?? '' const body = options.body ?? ''
const hostname = options.url ? getHostname(options.url) : null
const isLocalHost = isLocalhostLikeHostname(hostname)
if (options.status === 401 || options.status === 403) { if (options.status === 401 || options.status === 403) {
return { return {
@@ -303,17 +284,13 @@ export function classifyOpenAIHttpFailure(options: {
} }
if (options.status === 404) { if (options.status === 404) {
const isRemote = hostname !== null && !isLocalHost
return { return {
source: 'http', source: 'http',
category: 'endpoint_not_found', category: 'endpoint_not_found',
retryable: false, retryable: false,
status: options.status, status: options.status,
message: body, message: body,
requestUrl: options.url, hint: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
hint: isRemote
? `Endpoint at ${hostname} returned 404. Verify OPENAI_BASE_URL is correct and the requested model is supported by this provider.`
: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
} }
} }

View File

@@ -1935,9 +1935,7 @@ class OpenAIShimMessages {
classifyOpenAIHttpFailure({ classifyOpenAIHttpFailure({
status, status,
body: errorBody, body: errorBody,
url: requestUrl,
}) })
const failureWithUrl = { ...failure, requestUrl: failure.requestUrl ?? requestUrl }
const redactedUrl = redactUrlForDiagnostics(requestUrl) const redactedUrl = redactUrlForDiagnostics(requestUrl)
logForDebugging( logForDebugging(
@@ -1950,7 +1948,7 @@ class OpenAIShimMessages {
parsedBody, parsedBody,
buildOpenAICompatibilityErrorMessage( buildOpenAICompatibilityErrorMessage(
`OpenAI API error ${status}: ${errorBody}${rateHint}`, `OpenAI API error ${status}: ${errorBody}${rateHint}`,
failureWithUrl, failure,
), ),
responseHeaders, responseHeaders,
) )