Compare commits

..

1 Commits

Author SHA1 Message Date
gnanam1990
effa6ef83d fix(errors): show actual host in 404 message instead of Ollama hint (#926)
When an OpenAI-compatible provider returns a 404, the user-facing error
message hardcoded "for Ollama: http://127.0.0.1:11434/v1" as a hint
regardless of the configured base URL. Users on remote providers
(NVIDIA NIM, OpenRouter, etc.) read this as the app ignoring their
custom OPENAI_BASE_URL and routing to localhost.

Plumb the request URL through the classifier and marker so the
user-facing message can name the actual host. Localhost endpoints keep
the existing Ollama-flavored guidance for backward compatibility.

- classifyOpenAIHttpFailure now accepts an optional url and produces a
  host-aware hint for non-localhost 404s
- the [openai_category=...] marker carries an optional host segment
- mapOpenAICompatibilityFailureToAssistantMessage branches on host to
  show "Endpoint at <host> returned 404. Verify OPENAI_BASE_URL is
  correct and the selected model (<model>) is supported by this
  provider." for remote URLs
- backward compatibility preserved when no URL is available
2026-04-28 08:58:04 +05:30
10 changed files with 159 additions and 115 deletions

View File

@@ -28,7 +28,6 @@
"@opentelemetry/sdk-trace-base": "2.6.1",
"@opentelemetry/sdk-trace-node": "2.6.1",
"@opentelemetry/semantic-conventions": "1.40.0",
"@vscode/ripgrep": "^1.17.1",
"ajv": "8.18.0",
"auto-bind": "5.0.1",
"axios": "1.15.0",
@@ -462,8 +461,6 @@
"@types/react": ["@types/react@19.2.14", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w=="],
"@vscode/ripgrep": ["@vscode/ripgrep@1.17.1", "", { "dependencies": { "https-proxy-agent": "^7.0.2", "proxy-from-env": "^1.1.0", "yauzl": "^2.9.2" } }, "sha512-xTs7DGyAO3IsJYOCTBP8LnTvPiYVKEuyv8s0xyJDBXfs8rhBfqnZPvb6xDT+RnwWzcXqW27xLS/aGrkjX7lNWw=="],
"accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
"agent-base": ["agent-base@7.1.4", "", {}, "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="],
@@ -494,8 +491,6 @@
"bowser": ["bowser@2.14.1", "", {}, "sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg=="],
"buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="],
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
"bun-types": ["bun-types@1.3.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-1KGPpoxQWl9f6wcZh57LvrPIInQMn2TQ7jsgxqpRzg+l0QPOFvJVH7HmvHo/AiPgwXy+/Thf6Ov3EdVn1vOabg=="],
@@ -614,8 +609,6 @@
"fast-xml-parser": ["fast-xml-parser@5.5.8", "", { "dependencies": { "fast-xml-builder": "^1.1.4", "path-expression-matcher": "^1.2.0", "strnum": "^2.2.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ=="],
"fd-slicer": ["fd-slicer@1.1.0", "", { "dependencies": { "pend": "~1.2.0" } }, "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g=="],
"fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="],
"figures": ["figures@6.1.0", "", { "dependencies": { "is-unicode-supported": "^2.0.0" } }, "sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg=="],
@@ -794,8 +787,6 @@
"path-to-regexp": ["path-to-regexp@8.4.1", "", {}, "sha512-fvU78fIjZ+SBM9YwCknCvKOUKkLVqtWDVctl0s7xIqfmfb38t2TT4ZU2gHm+Z8xGwgW+QWEU3oQSAzIbo89Ggw=="],
"pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="],
"picomatch": ["picomatch@4.0.4", "", {}, "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A=="],
"pkce-challenge": ["pkce-challenge@5.0.1", "", {}, "sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ=="],
@@ -810,7 +801,7 @@
"proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="],
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
"proxy-from-env": ["proxy-from-env@2.1.0", "", {}, "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA=="],
"qrcode": ["qrcode@1.5.4", "", { "dependencies": { "dijkstrajs": "^1.0.1", "pngjs": "^5.0.0", "yargs": "^15.3.1" }, "bin": { "qrcode": "bin/qrcode" } }, "sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg=="],
@@ -962,8 +953,6 @@
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
"yauzl": ["yauzl@2.10.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } }, "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g=="],
"yoctocolors": ["yoctocolors@2.1.2", "", {}, "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug=="],
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
@@ -1380,8 +1369,6 @@
"@smithy/uuid/tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"axios/proxy-from-env": ["proxy-from-env@2.1.0", "", {}, "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA=="],
"cli-highlight/chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
"cli-highlight/yargs": ["yargs@16.2.0", "", { "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw=="],
@@ -1442,8 +1429,6 @@
"@aws-sdk/nested-clients/@smithy/util-base64/@smithy/util-buffer-from": ["@smithy/util-buffer-from@4.2.2", "", { "dependencies": { "@smithy/is-array-buffer": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-FDXD7cvUoFWwN6vtQfEta540Y/YBe5JneK3SoZg9bThSoOAC/eGeYEua6RkBgKjGa/sz6Y+DuBZj3+YEY21y4Q=="],
"@mendable/firecrawl-js/axios/proxy-from-env": ["proxy-from-env@2.1.0", "", {}, "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA=="],
"@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/core/@opentelemetry/semantic-conventions": ["@opentelemetry/semantic-conventions@1.28.0", "", {}, "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA=="],
"@opentelemetry/exporter-trace-otlp-grpc/@opentelemetry/otlp-transformer/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.57.2", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-uIX52NnTM0iBh84MShlpouI7UKqkZ7MrUszTmaypHBu4r7NofznSnQRfJ+uUeDtQDj6w8eFGg5KBLDAwAPz1+A=="],
@@ -1524,8 +1509,6 @@
"cliui/wrap-ansi/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
"firecrawl/axios/proxy-from-env": ["proxy-from-env@2.1.0", "", {}, "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA=="],
"form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"qrcode/yargs/cliui": ["cliui@6.0.0", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^6.2.0" } }, "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ=="],

View File

@@ -74,7 +74,6 @@
"@opentelemetry/sdk-trace-base": "2.6.1",
"@opentelemetry/sdk-trace-node": "2.6.1",
"@opentelemetry/semantic-conventions": "1.40.0",
"@vscode/ripgrep": "^1.17.1",
"ajv": "8.18.0",
"auto-bind": "5.0.1",
"axios": "1.15.0",

View File

@@ -472,11 +472,6 @@ ${exports}
'@aws-sdk/credential-providers',
'@azure/identity',
'google-auth-library',
// @vscode/ripgrep ships a platform-specific binary alongside its
// index.js and resolves the path via __dirname at runtime. Bundling
// would freeze the build host's absolute path into dist/cli.mjs, so we
// keep it external and rely on the npm package being installed.
'@vscode/ripgrep',
],
})

View File

@@ -28,6 +28,38 @@ test('maps endpoint_not_found category markers to actionable setup guidance', ()
expect(text).toContain('/v1')
})
test('endpoint_not_found from a remote host shows the actual host, not Ollama (issue #926)', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found,host=integrate.api.nvidia.com] Hint: Endpoint at integrate.api.nvidia.com returned 404.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'moonshotai/kimi-k2.5-thinking')
const text = getFirstText(message)
expect(text).toContain('integrate.api.nvidia.com')
expect(text).toContain('moonshotai/kimi-k2.5-thinking')
expect(text).not.toContain('Ollama')
expect(text).not.toContain('11434')
})
test('endpoint_not_found without a host falls back to the Ollama-aware message', () => {
const error = APIError.generate(
404,
undefined,
'OpenAI API error 404: Not Found [openai_category=endpoint_not_found] Hint: Confirm OPENAI_BASE_URL includes /v1.',
new Headers(),
)
const message = getAssistantMessageFromError(error, 'qwen2.5-coder:7b')
const text = getFirstText(message)
expect(text).toContain('Provider endpoint was not found')
expect(text).toContain('Ollama')
})
test('maps tool_call_incompatible category markers to model/tool guidance', () => {
const error = APIError.generate(
400,

View File

@@ -51,7 +51,9 @@ import {
import { shouldProcessRateLimits } from '../rateLimitMocking.js' // Used for /mock-limits command
import { extractConnectionErrorDetails, formatAPIError } from './errorUtils.js'
import {
extractOpenAICategoryHost,
extractOpenAICategoryMarker,
isLocalhostLikeHost,
type OpenAICompatibilityFailureCategory,
} from './openaiErrorClassification.js'
@@ -68,25 +70,29 @@ function mapOpenAICompatibilityFailureToAssistantMessage(options: {
category: OpenAICompatibilityFailureCategory
model: string
rawMessage: string
host?: string
}): AssistantMessage {
const switchCmd = getIsNonInteractiveSession() ? '--model' : '/model'
const compactHint = getIsNonInteractiveSession()
? 'Reduce prompt size or start a new session.'
: 'Run /compact or start a new session with /new.'
const isLocalhost = options.host === undefined || isLocalhostLikeHost(options.host)
switch (options.category) {
case 'localhost_resolution_failed':
case 'connection_refused':
return createAssistantAPIErrorMessage({
content:
'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.',
content: isLocalhost
? 'Could not connect to the local OpenAI-compatible provider. Ensure the local server is running, then use OPENAI_BASE_URL=http://127.0.0.1:11434/v1 for Ollama.'
: `Could not connect to the provider at ${options.host}. Verify OPENAI_BASE_URL is correct and that the host is reachable.`,
error: 'unknown',
})
case 'endpoint_not_found':
return createAssistantAPIErrorMessage({
content:
'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).',
content: isLocalhost
? 'Provider endpoint was not found. Confirm OPENAI_BASE_URL targets an OpenAI-compatible /v1 endpoint (for Ollama: http://127.0.0.1:11434/v1).'
: `Provider endpoint at ${options.host} returned 404. Verify OPENAI_BASE_URL is correct and that the selected model (${options.model}) is supported by this provider.`,
error: 'invalid_request',
})
@@ -567,6 +573,7 @@ export function getAssistantMessageFromError(
category: openaiCategory,
model,
rawMessage: error.message,
host: extractOpenAICategoryHost(error.message),
})
}
}

View File

@@ -4,8 +4,10 @@ import {
buildOpenAICompatibilityErrorMessage,
classifyOpenAIHttpFailure,
classifyOpenAINetworkFailure,
extractOpenAICategoryHost,
extractOpenAICategoryMarker,
formatOpenAICategoryMarker,
isLocalhostLikeHost,
} from './openaiErrorClassification.js'
test('classifies localhost ECONNREFUSED as connection_refused', () => {
@@ -95,3 +97,58 @@ test('ignores unknown category markers during extraction', () => {
const malformed = 'OpenAI API error 500 [openai_category=totally_fake_category]'
expect(extractOpenAICategoryMarker(malformed)).toBeUndefined()
})
test('endpoint_not_found 404 from a remote host gets a host-aware hint (issue #926)', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'https://integrate.api.nvidia.com/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.requestUrl).toBe('https://integrate.api.nvidia.com/v1/chat/completions')
expect(failure.hint).toContain('integrate.api.nvidia.com')
expect(failure.hint).not.toContain('local providers')
})
test('endpoint_not_found 404 from localhost keeps the Ollama-flavored hint', () => {
const failure = classifyOpenAIHttpFailure({
status: 404,
body: 'Not Found',
url: 'http://127.0.0.1:11434/v1/chat/completions',
})
expect(failure.category).toBe('endpoint_not_found')
expect(failure.hint).toContain('local providers')
})
test('marker round-trip preserves host segment', () => {
const formatted = buildOpenAICompatibilityErrorMessage(
'OpenAI API error 404: Not Found',
{
category: 'endpoint_not_found',
hint: 'Endpoint at integrate.api.nvidia.com returned 404.',
requestUrl: 'https://integrate.api.nvidia.com/v1/chat/completions',
},
)
expect(formatted).toContain('[openai_category=endpoint_not_found,host=integrate.api.nvidia.com]')
expect(extractOpenAICategoryMarker(formatted)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(formatted)).toBe('integrate.api.nvidia.com')
})
test('marker without host stays backward-compatible', () => {
const marker = formatOpenAICategoryMarker('endpoint_not_found')
expect(marker).toBe('[openai_category=endpoint_not_found]')
expect(extractOpenAICategoryMarker(marker)).toBe('endpoint_not_found')
expect(extractOpenAICategoryHost(marker)).toBeUndefined()
})
test('isLocalhostLikeHost matches loopback variants', () => {
expect(isLocalhostLikeHost('localhost')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.1')).toBe(true)
expect(isLocalhostLikeHost('127.0.0.5')).toBe(true)
expect(isLocalhostLikeHost('::1')).toBe(true)
expect(isLocalhostLikeHost('integrate.api.nvidia.com')).toBe(false)
expect(isLocalhostLikeHost(undefined)).toBe(false)
})

View File

@@ -21,6 +21,7 @@ export type OpenAICompatibilityFailure = {
hint?: string
code?: string
status?: number
requestUrl?: string
}
const OPENAI_CATEGORY_MARKER_PREFIX = '[openai_category='
@@ -96,6 +97,11 @@ function isLocalhostLikeHostname(hostname: string | null): boolean {
return /^127\./.test(hostname)
}
export function isLocalhostLikeHost(host: string | null | undefined): boolean {
if (!host) return false
return isLocalhostLikeHostname(host.toLowerCase())
}
function isContextOverflowMessage(body: string): boolean {
const lower = body.toLowerCase()
return (
@@ -149,14 +155,18 @@ function isModelNotFoundMessage(body: string): boolean {
export function formatOpenAICategoryMarker(
category: OpenAICompatibilityFailureCategory,
host?: string,
): string {
if (host && /^[A-Za-z0-9.\-:]+$/.test(host)) {
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category},host=${host}]`
}
return `${OPENAI_CATEGORY_MARKER_PREFIX}${category}]`
}
export function extractOpenAICategoryMarker(
message: string,
): OpenAICompatibilityFailureCategory | undefined {
const match = message.match(/\[openai_category=([a-z_]+)]/)
const match = message.match(/\[openai_category=([a-z_]+)(?:,host=[^\]]+)?]/)
const category = match?.[1]
if (!category || !isOpenAICompatibilityFailureCategory(category)) {
@@ -166,11 +176,17 @@ export function extractOpenAICategoryMarker(
return category
}
export function extractOpenAICategoryHost(message: string): string | undefined {
const match = message.match(/\[openai_category=[a-z_]+,host=([A-Za-z0-9.\-:]+)]/)
return match?.[1]
}
export function buildOpenAICompatibilityErrorMessage(
baseMessage: string,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint'>,
failure: Pick<OpenAICompatibilityFailure, 'category' | 'hint' | 'requestUrl'>,
): string {
const marker = formatOpenAICategoryMarker(failure.category)
const host = failure.requestUrl ? getHostname(failure.requestUrl) ?? undefined : undefined
const marker = formatOpenAICategoryMarker(failure.category, host)
const hint = failure.hint ? ` Hint: ${failure.hint}` : ''
return `${baseMessage} ${marker}${hint}`
}
@@ -247,8 +263,11 @@ export function classifyOpenAINetworkFailure(
export function classifyOpenAIHttpFailure(options: {
status: number
body: string
url?: string
}): OpenAICompatibilityFailure {
const body = options.body ?? ''
const hostname = options.url ? getHostname(options.url) : null
const isLocalHost = isLocalhostLikeHostname(hostname)
if (options.status === 401 || options.status === 403) {
return {
@@ -284,13 +303,17 @@ export function classifyOpenAIHttpFailure(options: {
}
if (options.status === 404) {
const isRemote = hostname !== null && !isLocalHost
return {
source: 'http',
category: 'endpoint_not_found',
retryable: false,
status: options.status,
message: body,
hint: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
requestUrl: options.url,
hint: isRemote
? `Endpoint at ${hostname} returned 404. Verify OPENAI_BASE_URL is correct and the requested model is supported by this provider.`
: 'Endpoint was not found. Confirm OPENAI_BASE_URL includes /v1 for OpenAI-compatible local providers.',
}
}

View File

@@ -1935,7 +1935,9 @@ class OpenAIShimMessages {
classifyOpenAIHttpFailure({
status,
body: errorBody,
url: requestUrl,
})
const failureWithUrl = { ...failure, requestUrl: failure.requestUrl ?? requestUrl }
const redactedUrl = redactUrlForDiagnostics(requestUrl)
logForDebugging(
@@ -1948,7 +1950,7 @@ class OpenAIShimMessages {
parsedBody,
buildOpenAICompatibilityErrorMessage(
`OpenAI API error ${status}: ${errorBody}${rateHint}`,
failure,
failureWithUrl,
),
responseHeaders,
)

View File

@@ -5,15 +5,16 @@ import { resolveRipgrepConfig, wrapRipgrepUnavailableError } from './ripgrep.js'
const MOCK_BUILTIN_PATH = path.normalize(
process.platform === 'win32'
? `node_modules/@vscode/ripgrep/bin/rg.exe`
: `node_modules/@vscode/ripgrep/bin/rg`,
? `vendor/ripgrep/${process.arch}-win32/rg.exe`
: `vendor/ripgrep/${process.arch}-${process.platform}/rg`,
)
test('falls back to system rg when @vscode/ripgrep cannot be resolved', () => {
test('ripgrepCommand falls back to system rg when builtin binary is missing', () => {
const config = resolveRipgrepConfig({
userWantsSystemRipgrep: false,
bundledMode: false,
builtinCommand: null,
builtinCommand: MOCK_BUILTIN_PATH,
builtinExists: false,
systemExecutablePath: '/usr/bin/rg',
processExecPath: '/fake/bun',
})
@@ -25,11 +26,12 @@ test('falls back to system rg when @vscode/ripgrep cannot be resolved', () => {
})
})
test('uses builtin @vscode/ripgrep path when the package resolves', () => {
test('ripgrepCommand keeps builtin mode when bundled binary exists', () => {
const config = resolveRipgrepConfig({
userWantsSystemRipgrep: false,
bundledMode: false,
builtinCommand: MOCK_BUILTIN_PATH,
builtinExists: true,
systemExecutablePath: '/usr/bin/rg',
processExecPath: '/fake/bun',
})
@@ -41,59 +43,10 @@ test('uses builtin @vscode/ripgrep path when the package resolves', () => {
})
})
test('honors USE_BUILTIN_RIPGREP=0 by selecting system rg even when builtin is available', () => {
const config = resolveRipgrepConfig({
userWantsSystemRipgrep: true,
bundledMode: false,
builtinCommand: MOCK_BUILTIN_PATH,
systemExecutablePath: '/usr/bin/rg',
processExecPath: '/fake/bun',
})
expect(config).toMatchObject({
mode: 'system',
command: 'rg',
args: [],
})
})
test('keeps embedded mode for Bun-compiled standalone executables', () => {
const config = resolveRipgrepConfig({
userWantsSystemRipgrep: false,
bundledMode: true,
builtinCommand: null,
systemExecutablePath: '/usr/bin/rg',
processExecPath: '/opt/openclaude/bin/openclaude',
})
expect(config).toMatchObject({
mode: 'embedded',
command: '/opt/openclaude/bin/openclaude',
args: ['--no-config'],
argv0: 'rg',
})
})
test('falls through to system rg as a last resort even when not on PATH', () => {
const config = resolveRipgrepConfig({
userWantsSystemRipgrep: false,
bundledMode: false,
builtinCommand: null,
systemExecutablePath: 'rg',
processExecPath: '/fake/bun',
})
expect(config).toMatchObject({
mode: 'system',
command: 'rg',
args: [],
})
})
test('wrapRipgrepUnavailableError explains missing packaged fallback', () => {
const error = wrapRipgrepUnavailableError(
{ code: 'ENOENT', message: 'spawn rg ENOENT' },
{ mode: 'builtin', command: 'C:\\fake\\node_modules\\@vscode\\ripgrep\\bin\\rg.exe', args: [] },
{ mode: 'builtin', command: 'C:\\fake\\vendor\\ripgrep\\rg.exe', args: [] },
'win32',
)

View File

@@ -5,6 +5,7 @@ import memoize from 'lodash-es/memoize.js'
import { homedir } from 'os'
import * as path from 'path'
import { logEvent } from 'src/services/analytics/index.js'
import { fileURLToPath } from 'url'
import { isInBundledMode } from './bundledMode.js'
import { logForDebugging } from './debug.js'
import { isEnvDefinedFalsy } from './envUtils.js'
@@ -14,6 +15,13 @@ import { logError } from './log.js'
import { getPlatform } from './platform.js'
import { countCharInString } from './stringUtils.js'
const __filename = fileURLToPath(import.meta.url)
// we use node:path.join instead of node:url.resolve because the former doesn't encode spaces
const __dirname = path.join(
__filename,
process.env.NODE_ENV === 'test' ? '../../../' : '../',
)
type RipgrepConfig = {
mode: 'system' | 'builtin' | 'embedded'
command: string
@@ -27,31 +35,11 @@ function isErrnoException(error: unknown): error is NodeJS.ErrnoException {
return error instanceof Error
}
/**
* Returns the ripgrep binary path provided by the @vscode/ripgrep package.
* The package downloads a platform/arch-specific binary at npm install time
* (cached under the package's bin/ directory). Returns null when the package
* cannot be resolved — for example when running as a Bun-compiled standalone
* executable that doesn't ship node_modules.
*/
function resolveBuiltinRgPath(): string | null {
try {
// Lazy require so the resolution failure path stays graceful at import
// time. The package only exports `rgPath`, so we do not need the rest.
const mod = require('@vscode/ripgrep') as { rgPath?: string }
if (mod.rgPath && existsSync(mod.rgPath)) {
return mod.rgPath
}
} catch {
// Falls through to null — caller decides the fallback.
}
return null
}
type ResolveRipgrepConfigArgs = {
userWantsSystemRipgrep: boolean
bundledMode: boolean
builtinCommand: string | null
builtinCommand: string
builtinExists: boolean
systemExecutablePath: string
processExecPath?: string
}
@@ -60,6 +48,7 @@ export function resolveRipgrepConfig({
userWantsSystemRipgrep,
bundledMode,
builtinCommand,
builtinExists,
systemExecutablePath,
processExecPath = process.execPath,
}: ResolveRipgrepConfigArgs): RipgrepConfig {
@@ -77,7 +66,7 @@ export function resolveRipgrepConfig({
}
}
if (builtinCommand) {
if (builtinExists) {
return { mode: 'builtin', command: builtinCommand, args: [] }
}
@@ -85,9 +74,7 @@ export function resolveRipgrepConfig({
return { mode: 'system', command: 'rg', args: [] }
}
// Last resort — leaves error reporting to the executor when no binary
// can be located. wrapRipgrepUnavailableError() surfaces an install hint.
return { mode: 'system', command: 'rg', args: [] }
return { mode: 'builtin', command: builtinCommand, args: [] }
}
const getRipgrepConfig = memoize((): RipgrepConfig => {
@@ -95,13 +82,19 @@ const getRipgrepConfig = memoize((): RipgrepConfig => {
process.env.USE_BUILTIN_RIPGREP,
)
const bundledMode = isInBundledMode()
const builtinCommand = resolveBuiltinRgPath()
const rgRoot = path.resolve(__dirname, 'vendor', 'ripgrep')
const builtinCommand =
process.platform === 'win32'
? path.resolve(rgRoot, `${process.arch}-win32`, 'rg.exe')
: path.resolve(rgRoot, `${process.arch}-${process.platform}`, 'rg')
const builtinExists = existsSync(builtinCommand)
const { cmd: systemExecutablePath } = findExecutable('rg', [])
return resolveRipgrepConfig({
userWantsSystemRipgrep,
bundledMode,
builtinCommand,
builtinExists,
systemExecutablePath,
})
})