Compare commits
8 Commits
fix/theme-
...
feat/auto-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff7eccc36c | ||
|
|
fbc838ce55 | ||
|
|
8c2d56844b | ||
|
|
6041b7f016 | ||
|
|
122f7b83f3 | ||
|
|
68230f3ffb | ||
|
|
832e80e535 | ||
|
|
93dc5a1554 |
90
.env.example
90
.env.example
@@ -248,93 +248,3 @@ ANTHROPIC_API_KEY=sk-ant-your-key-here
|
||||
|
||||
# Enable debug logging
|
||||
# CLAUDE_DEBUG=1
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# WEB SEARCH (OPTIONAL)
|
||||
# =============================================================================
|
||||
# OpenClaude includes a web search tool. By default it uses DuckDuckGo (free)
|
||||
# or the provider's native search (Anthropic firstParty / vertex).
|
||||
#
|
||||
# Set one API key below to enable a provider. That's it.
|
||||
|
||||
# ── Provider API keys — set ONE of these ────────────────────────────
|
||||
|
||||
# Tavily (AI-optimized search, recommended)
|
||||
# TAVILY_API_KEY=tvly-your-key-here
|
||||
|
||||
# Exa (neural/semantic search)
|
||||
# EXA_API_KEY=your-exa-key-here
|
||||
|
||||
# You.com (RAG-ready snippets)
|
||||
# YOU_API_KEY=your-you-key-here
|
||||
|
||||
# Jina (s.jina.ai endpoint)
|
||||
# JINA_API_KEY=your-jina-key-here
|
||||
|
||||
# Bing Web Search
|
||||
# BING_API_KEY=your-bing-key-here
|
||||
|
||||
# Mojeek (privacy-focused)
|
||||
# MOJEEK_API_KEY=your-mojeek-key-here
|
||||
|
||||
# Linkup
|
||||
# LINKUP_API_KEY=your-linkup-key-here
|
||||
|
||||
# Firecrawl (premium, uses @mendable/firecrawl-js)
|
||||
# FIRECRAWL_API_KEY=fc-your-key-here
|
||||
|
||||
# ── Provider selection mode ─────────────────────────────────────────
|
||||
#
|
||||
# WEB_SEARCH_PROVIDER controls fallback behavior:
|
||||
#
|
||||
# "auto" (default) — try all configured providers, fall through on failure
|
||||
# "custom" — custom API only, throw on failure (NOT in auto chain)
|
||||
# "firecrawl" — firecrawl only
|
||||
# "tavily" — tavily only
|
||||
# "exa" — exa only
|
||||
# "you" — you.com only
|
||||
# "jina" — jina only
|
||||
# "bing" — bing only
|
||||
# "mojeek" — mojeek only
|
||||
# "linkup" — linkup only
|
||||
# "ddg" — duckduckgo only
|
||||
# "native" — anthropic native / codex only
|
||||
#
|
||||
# Auto mode priority: firecrawl → tavily → exa → you → jina → bing → mojeek →
|
||||
# linkup → ddg
|
||||
# Note: "custom" is NOT in the auto chain. To use the custom API provider,
|
||||
# you must explicitly set WEB_SEARCH_PROVIDER=custom.
|
||||
#
|
||||
# WEB_SEARCH_PROVIDER=auto
|
||||
|
||||
# ── Built-in custom API presets ─────────────────────────────────────
|
||||
#
|
||||
# Use with WEB_KEY for the API key:
|
||||
# WEB_PROVIDER=searxng|google|brave|serpapi
|
||||
# WEB_KEY=your-api-key-here
|
||||
|
||||
# ── Custom API endpoint (advanced) ──────────────────────────────────
|
||||
#
|
||||
# WEB_SEARCH_API — base URL of your search endpoint
|
||||
# WEB_QUERY_PARAM — query parameter name (default: "q")
|
||||
# WEB_METHOD — GET or POST (default: GET)
|
||||
# WEB_PARAMS — extra static query params as JSON: {"lang":"en","count":"10"}
|
||||
# WEB_URL_TEMPLATE — URL template with {query} for path embedding
|
||||
# WEB_BODY_TEMPLATE — custom POST body with {query} placeholder
|
||||
# WEB_AUTH_HEADER — header name for API key (default: "Authorization")
|
||||
# WEB_AUTH_SCHEME — prefix before key (default: "Bearer")
|
||||
# WEB_HEADERS — extra headers as "Name: value; Name2: value2"
|
||||
# WEB_JSON_PATH — dot-path to results array in response
|
||||
|
||||
# ── Custom API security guardrails ──────────────────────────────────
|
||||
#
|
||||
# The custom provider enforces security guardrails by default.
|
||||
# Override these only if you understand the risks.
|
||||
#
|
||||
# WEB_CUSTOM_TIMEOUT_SEC=15 — request timeout in seconds (default 15)
|
||||
# WEB_CUSTOM_MAX_BODY_KB=300 — max POST body size in KB (default 300)
|
||||
# WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS=false — set "true" to use non-standard headers
|
||||
# WEB_CUSTOM_ALLOW_HTTP=false — set "true" to allow http:// URLs
|
||||
# WEB_CUSTOM_ALLOW_PRIVATE=false — set "true" to target localhost/private IPs
|
||||
# (needed for self-hosted SearXNG)
|
||||
|
||||
@@ -137,9 +137,10 @@ export OPENAI_MODEL=llama-3.3-70b-versatile
|
||||
### Mistral
|
||||
|
||||
```bash
|
||||
export CLAUDE_CODE_USE_MISTRAL=1
|
||||
export MISTRAL_API_KEY=...
|
||||
export MISTRAL_MODEL=mistral-large-latest
|
||||
export CLAUDE_CODE_USE_OPENAI=1
|
||||
export OPENAI_API_KEY=...
|
||||
export OPENAI_BASE_URL=https://api.mistral.ai/v1
|
||||
export OPENAI_MODEL=mistral-large-latest
|
||||
```
|
||||
|
||||
### Azure OpenAI
|
||||
|
||||
@@ -112,14 +112,6 @@ def build_default_providers() -> list[Provider]:
|
||||
big_model=big if "gemini" in big else "gemini-2.5-pro",
|
||||
small_model=small if "gemini" in small else "gemini-2.0-flash",
|
||||
),
|
||||
Provider(
|
||||
name="mistral",
|
||||
ping_url="",
|
||||
api_key_env="MISTRAL_API_KEY",
|
||||
cost_per_1k_tokens=0.0001,
|
||||
big_model=big if "mistral" in big else "devstral-latest",
|
||||
small_model=small if "small" in small else "ministral-3b-latest",
|
||||
),
|
||||
Provider(
|
||||
name="ollama",
|
||||
ping_url=f"{ollama_url}/api/tags",
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
buildAtomicChatProfileEnv,
|
||||
buildCodexProfileEnv,
|
||||
buildGeminiProfileEnv,
|
||||
buildMistralProfileEnv,
|
||||
buildOllamaProfileEnv,
|
||||
buildOpenAIProfileEnv,
|
||||
createProfileFile,
|
||||
@@ -38,7 +37,7 @@ function parseArg(name: string): string | null {
|
||||
|
||||
function parseProviderArg(): ProviderProfile | 'auto' {
|
||||
const p = parseArg('--provider')?.toLowerCase()
|
||||
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini' || p === 'mistral' || p === 'atomic-chat') return p
|
||||
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini' || p === 'atomic-chat') return p
|
||||
return 'auto'
|
||||
}
|
||||
|
||||
@@ -91,21 +90,6 @@ async function main(): Promise<void> {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
env = builtEnv
|
||||
} else if (selected === 'mistral') {
|
||||
const builtEnv = buildMistralProfileEnv({
|
||||
model: argModel || null,
|
||||
baseUrl: argBaseUrl || null,
|
||||
apiKey: argApiKey || null,
|
||||
processEnv: process.env,
|
||||
})
|
||||
|
||||
if (!builtEnv) {
|
||||
console.error('Mistral profile requires an API key. Use --api-key or set MISTRAL_API_KEY.')
|
||||
console.error('Get a free key at: https://admin.mistral.ai/organization/api-keys')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
env = builtEnv
|
||||
} else if (selected === 'ollama') {
|
||||
resolvedOllamaModel ??= await resolveOllamaModel(argModel, argBaseUrl, goal)
|
||||
@@ -185,7 +169,7 @@ async function main(): Promise<void> {
|
||||
|
||||
console.log(`Saved profile: ${selected}`)
|
||||
console.log(`Goal: ${goal}`)
|
||||
console.log(`Model: ${profile.env.GEMINI_MODEL || profile.env.MISTRAL_MODEL || profile.env.OPENAI_MODEL || getGoalDefaultOpenAIModel(goal)}`)
|
||||
console.log(`Model: ${profile.env.GEMINI_MODEL || profile.env.OPENAI_MODEL || getGoalDefaultOpenAIModel(goal)}`)
|
||||
console.log(`Path: ${outputPath}`)
|
||||
console.log('Next: bun run dev:profile')
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
|
||||
continue
|
||||
}
|
||||
|
||||
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini' || lower ==='mistral' || lower === 'atomic-chat') && requestedProfile === 'auto') {
|
||||
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini' || lower === 'atomic-chat') && requestedProfile === 'auto') {
|
||||
requestedProfile = lower as ProviderProfile | 'auto'
|
||||
continue
|
||||
}
|
||||
@@ -124,8 +124,6 @@ function printSummary(profile: ProviderProfile): void {
|
||||
console.log(`Launching profile: ${profile}`)
|
||||
if (profile === 'gemini') {
|
||||
console.log('Using configured Gemini provider settings.')
|
||||
} else if (profile === 'mistral') {
|
||||
console.log('Using configured Mistral provider settings.')
|
||||
} else if (profile === 'codex') {
|
||||
console.log('Using configured Codex/OpenAI-compatible provider settings.')
|
||||
} else if (profile === 'atomic-chat') {
|
||||
@@ -141,7 +139,7 @@ async function main(): Promise<void> {
|
||||
const options = parseLaunchOptions(process.argv.slice(2))
|
||||
const requestedProfile = options.requestedProfile
|
||||
if (!requestedProfile) {
|
||||
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|mistral|atomic-chat|mistral|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
|
||||
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|atomic-chat|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
@@ -207,11 +205,6 @@ async function main(): Promise<void> {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (profile === 'mistral' && !env.MISTRAL_API_KEY) {
|
||||
console.error('MISTRAL_API_KEY is required for mistral profile. Run: bun run profile:init -- --provider mistral --api-key <key>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (profile === 'openai' && (!env.OPENAI_API_KEY || env.OPENAI_API_KEY === 'SUA_CHAVE')) {
|
||||
console.error('OPENAI_API_KEY is required for openai profile and cannot be SUA_CHAVE. Run: bun run profile:init -- --provider openai --api-key <key>')
|
||||
process.exit(1)
|
||||
|
||||
@@ -118,16 +118,12 @@ function isLocalBaseUrl(baseUrl: string): boolean {
|
||||
}
|
||||
|
||||
const GEMINI_DEFAULT_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
|
||||
const MISTRAL_DEFAULT_BASE_URL = 'https://api.mistral.ai/v1'
|
||||
const GITHUB_COPILOT_BASE = 'https://api.githubcopilot.com'
|
||||
|
||||
function currentBaseUrl(): string {
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
||||
return process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
|
||||
}
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
|
||||
return process.env.MISTRAL_BASE_URL ?? MISTRAL_DEFAULT_BASE_URL
|
||||
}
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)) {
|
||||
return process.env.OPENAI_BASE_URL ?? GITHUB_COPILOT_BASE
|
||||
}
|
||||
@@ -159,31 +155,6 @@ function checkGeminiEnv(): CheckResult[] {
|
||||
return results
|
||||
}
|
||||
|
||||
function checkMistralEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const model = process.env.MISTRAL_MODEL
|
||||
const key = process.env.MISTRAL_API_KEY
|
||||
const baseUrl = process.env.MISTRAL_BASE_URL ?? MISTRAL_DEFAULT_BASE_URL
|
||||
|
||||
results.push(pass('Provider mode', 'Mistral provider enabled.'))
|
||||
|
||||
if (!model) {
|
||||
results.push(pass('MISTRAL_MODEL', 'Not set. Default will be used at runtime.'))
|
||||
} else {
|
||||
results.push(pass('MISTRAL_MODEL', model))
|
||||
}
|
||||
|
||||
results.push(pass('MISTRAL_BASE_URL', baseUrl))
|
||||
|
||||
if (!key) {
|
||||
results.push(fail('MISTRAL_API_KEY', 'Missing. Set MISTRAL_API_KEY.'))
|
||||
} else {
|
||||
results.push(pass('MISTRAL_API_KEY', 'Configured.'))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function checkGithubEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const baseUrl = process.env.OPENAI_BASE_URL ?? GITHUB_COPILOT_BASE
|
||||
@@ -215,17 +186,12 @@ function checkOpenAIEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
const useGithub = isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
const useMistral = isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
|
||||
if (useGemini) {
|
||||
return checkGeminiEnv()
|
||||
}
|
||||
|
||||
if (useMistral) {
|
||||
return checkMistralEnv()
|
||||
}
|
||||
|
||||
if (useGithub && !useOpenAI) {
|
||||
return checkGithubEnv()
|
||||
}
|
||||
@@ -302,9 +268,8 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
const useGithub = isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
const useMistral = isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
|
||||
if (!useGemini && !useOpenAI && !useGithub && !useMistral) {
|
||||
if (!useGemini && !useOpenAI && !useGithub) {
|
||||
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
|
||||
@@ -361,8 +326,6 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
})
|
||||
} else if (useGemini && (process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY)) {
|
||||
headers.Authorization = `Bearer ${process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY}`
|
||||
} else if (useMistral && process.env.MISTRAL_API_KEY) {
|
||||
headers.Authorization = `Bearer ${process.env.MISTRAL_API_KEY}`
|
||||
} else if (process.env.OPENAI_API_KEY) {
|
||||
headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}`
|
||||
}
|
||||
@@ -410,8 +373,7 @@ function checkOllamaProcessorMode(): CheckResult {
|
||||
if (
|
||||
!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
|
||||
isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
) {
|
||||
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
@@ -463,14 +425,6 @@ function serializeSafeEnvSummary(): Record<string, string | boolean> {
|
||||
GEMINI_API_KEY_SET: Boolean(process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY),
|
||||
}
|
||||
}
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
|
||||
return {
|
||||
CLAUDE_CODE_USE_MISTRAL: true,
|
||||
MISTRAL_MODEL: process.env.MISTRAL_MODEL ?? '(unset, default: devstral-latest)',
|
||||
MISTRAL_BASE_URL: process.env.MISTRAL_BASE_URL ?? 'https://api.mistral.ai/v1',
|
||||
MISTRAL_API_KEY_SET: Boolean(process.env.MISTRAL_API_KEY),
|
||||
}
|
||||
}
|
||||
if (
|
||||
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB) &&
|
||||
!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
|
||||
@@ -400,12 +400,12 @@ export async function update() {
|
||||
if (useLocalUpdate) {
|
||||
process.stderr.write('Try manually updating with:\n')
|
||||
process.stderr.write(
|
||||
` cd ~/.openclaude/local && npm update ${MACRO.PACKAGE_URL}\n`,
|
||||
` cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}\n`,
|
||||
)
|
||||
} else {
|
||||
process.stderr.write('Try running with sudo or fix npm permissions\n')
|
||||
process.stderr.write(
|
||||
'Or consider using native installation with: openclaude install\n',
|
||||
'Or consider using native installation with: claude install\n',
|
||||
)
|
||||
}
|
||||
await gracefulShutdown(1)
|
||||
@@ -415,11 +415,11 @@ export async function update() {
|
||||
if (useLocalUpdate) {
|
||||
process.stderr.write('Try manually updating with:\n')
|
||||
process.stderr.write(
|
||||
` cd ~/.openclaude/local && npm update ${MACRO.PACKAGE_URL}\n`,
|
||||
` cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}\n`,
|
||||
)
|
||||
} else {
|
||||
process.stderr.write(
|
||||
'Or consider using native installation with: openclaude install\n',
|
||||
'Or consider using native installation with: claude install\n',
|
||||
)
|
||||
}
|
||||
await gracefulShutdown(1)
|
||||
|
||||
@@ -32,7 +32,6 @@ import logout from './commands/logout/index.js'
|
||||
import installGitHubApp from './commands/install-github-app/index.js'
|
||||
import installSlackApp from './commands/install-slack-app/index.js'
|
||||
import breakCache from './commands/break-cache/index.js'
|
||||
import cacheProbe from './commands/cache-probe/index.js'
|
||||
import mcp from './commands/mcp/index.js'
|
||||
import mobile from './commands/mobile/index.js'
|
||||
import onboarding from './commands/onboarding/index.js'
|
||||
@@ -145,7 +144,6 @@ import heapDump from './commands/heapdump/index.js'
|
||||
import mockLimits from './commands/mock-limits/index.js'
|
||||
import bridgeKick from './commands/bridge-kick.js'
|
||||
import version from './commands/version.js'
|
||||
import wiki from './commands/wiki/index.js'
|
||||
import summary from './commands/summary/index.js'
|
||||
import {
|
||||
resetLimits,
|
||||
@@ -269,7 +267,6 @@ const COMMANDS = memoize((): Command[] => [
|
||||
autoFix,
|
||||
branch,
|
||||
btw,
|
||||
cacheProbe,
|
||||
chrome,
|
||||
clear,
|
||||
color,
|
||||
@@ -329,7 +326,6 @@ const COMMANDS = memoize((): Command[] => [
|
||||
usage,
|
||||
usageReport,
|
||||
vim,
|
||||
wiki,
|
||||
...(webCmd ? [webCmd] : []),
|
||||
...(forkCmd ? [forkCmd] : []),
|
||||
...(buddy ? [buddy] : []),
|
||||
|
||||
@@ -1,413 +0,0 @@
|
||||
import { getSessionId } from '../../bootstrap/state.js'
|
||||
import { resolveProviderRequest } from '../../services/api/providerConfig.js'
|
||||
import type { LocalCommandCall } from '../../types/command.js'
|
||||
import { logForDebugging } from '../../utils/debug.js'
|
||||
import { isEnvTruthy } from '../../utils/envUtils.js'
|
||||
import { hydrateGithubModelsTokenFromSecureStorage } from '../../utils/githubModelsCredentials.js'
|
||||
import { getMainLoopModel } from '../../utils/model/model.js'
|
||||
|
||||
const COPILOT_HEADERS: Record<string, string> = {
|
||||
'User-Agent': 'GitHubCopilotChat/0.26.7',
|
||||
'Editor-Version': 'vscode/1.99.3',
|
||||
'Editor-Plugin-Version': 'copilot-chat/0.26.7',
|
||||
'Copilot-Integration-Id': 'vscode-chat',
|
||||
}
|
||||
|
||||
// Large system prompt (~6000 chars, ~1500 tokens) to cross the 1024-token cache threshold
|
||||
const SYSTEM_PROMPT = [
|
||||
'You are a coding assistant. Answer concisely.',
|
||||
'CONTEXT: User is working on a TypeScript project with Bun runtime.',
|
||||
...Array.from(
|
||||
{ length: 80 },
|
||||
(_, i) =>
|
||||
`Rule ${i + 1}: Follow best practices for TypeScript including strict typing, error handling, testing, and clean code. Prefer explicit types over any. Use const assertions. Await all async operations.`,
|
||||
),
|
||||
].join('\n\n')
|
||||
|
||||
const USER_MESSAGE = 'Say "hello" and nothing else.'
|
||||
const DELAY_MS = 3000
|
||||
|
||||
/**
|
||||
* Extract model family from a versioned model string.
|
||||
* e.g. "gpt-5.4-0626" → "gpt-5.4", "codex-mini-latest" → "codex-mini"
|
||||
*/
|
||||
function getModelFamily(model: string | undefined): string {
|
||||
if (!model) return 'unknown'
|
||||
return model
|
||||
.replace(/-\d{4,}$/, '')
|
||||
.replace(/-latest$/, '')
|
||||
.replace(/-preview$/, '')
|
||||
}
|
||||
|
||||
function getField(obj: unknown, path: string): unknown {
|
||||
return path
|
||||
.split('.')
|
||||
.reduce((o: any, k: string) => (o != null ? o[k] : undefined), obj)
|
||||
}
|
||||
|
||||
interface ProbeResult {
|
||||
label: string
|
||||
status: number
|
||||
elapsed: number
|
||||
headers: Record<string, string>
|
||||
usage: Record<string, unknown> | null
|
||||
responseId: string | null
|
||||
error: string | null
|
||||
}
|
||||
|
||||
async function sendProbe(
|
||||
url: string,
|
||||
headers: Record<string, string>,
|
||||
body: Record<string, unknown>,
|
||||
label: string,
|
||||
): Promise<ProbeResult> {
|
||||
const start = Date.now()
|
||||
let response: Response
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
} catch (err: any) {
|
||||
return {
|
||||
label,
|
||||
status: 0,
|
||||
elapsed: Date.now() - start,
|
||||
headers: {},
|
||||
usage: null,
|
||||
responseId: null,
|
||||
error: err.message,
|
||||
}
|
||||
}
|
||||
const elapsed = Date.now() - start
|
||||
|
||||
const respHeaders: Record<string, string> = {}
|
||||
response.headers.forEach((value, key) => {
|
||||
respHeaders[key] = value
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.text().catch(() => '')
|
||||
return {
|
||||
label,
|
||||
status: response.status,
|
||||
elapsed,
|
||||
headers: respHeaders,
|
||||
usage: null,
|
||||
responseId: null,
|
||||
error: errorBody,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse SSE stream for usage data
|
||||
const text = await response.text()
|
||||
let usage: Record<string, unknown> | null = null
|
||||
let responseId: string | null = null
|
||||
|
||||
const isResponses = url.endsWith('/responses')
|
||||
for (const chunk of text.split('\n\n')) {
|
||||
const lines = chunk
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
if (isResponses) {
|
||||
const eventLine = lines.find((l) => l.startsWith('event: '))
|
||||
const dataLines = lines.filter((l) => l.startsWith('data: '))
|
||||
if (!eventLine || !dataLines.length) continue
|
||||
const event = eventLine.slice(7).trim()
|
||||
if (
|
||||
event === 'response.completed' ||
|
||||
event === 'response.incomplete'
|
||||
) {
|
||||
try {
|
||||
const data = JSON.parse(
|
||||
dataLines.map((l) => l.slice(6)).join('\n'),
|
||||
)
|
||||
usage = (data?.response?.usage as Record<string, unknown>) ?? null
|
||||
responseId = (data?.response?.id as string) ?? null
|
||||
} catch {}
|
||||
}
|
||||
} else {
|
||||
for (const line of lines) {
|
||||
if (!line.startsWith('data: ')) continue
|
||||
const raw = line.slice(6).trim()
|
||||
if (raw === '[DONE]') continue
|
||||
try {
|
||||
const data = JSON.parse(raw) as Record<string, unknown>
|
||||
if (data.usage) {
|
||||
usage = data.usage as Record<string, unknown>
|
||||
responseId = (data.id as string) ?? null
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { label, status: response.status, elapsed, headers: respHeaders, usage, responseId, error: null }
|
||||
}
|
||||
|
||||
function formatResult(r: ProbeResult): string {
|
||||
const lines: string[] = [`--- ${r.label} ---`]
|
||||
if (r.error) {
|
||||
lines.push(` ERROR (HTTP ${r.status}): ${r.error.slice(0, 200)}`)
|
||||
return lines.join('\n')
|
||||
}
|
||||
lines.push(` HTTP ${r.status} — ${r.elapsed}ms`)
|
||||
if (r.responseId) lines.push(` response.id: ${r.responseId}`)
|
||||
|
||||
if (r.usage) {
|
||||
lines.push(' Usage:')
|
||||
lines.push(` ${JSON.stringify(r.usage, null, 2).replace(/\n/g, '\n ')}`)
|
||||
} else {
|
||||
lines.push(' Usage: null')
|
||||
}
|
||||
|
||||
// Interesting headers
|
||||
for (const h of [
|
||||
'openai-processing-ms',
|
||||
'x-ratelimit-remaining',
|
||||
'x-ratelimit-limit',
|
||||
'x-ms-region',
|
||||
'x-github-request-id',
|
||||
'x-request-id',
|
||||
]) {
|
||||
if (r.headers[h]) lines.push(` ${h}: ${r.headers[h]}`)
|
||||
}
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
export const call: LocalCommandCall = async (args) => {
|
||||
const parts = (args ?? '').trim().split(/\s+/).filter(Boolean)
|
||||
const noKey = parts.includes('--no-key')
|
||||
const modelOverride = parts.find((p) => !p.startsWith('--')) || undefined
|
||||
const modelStr = modelOverride ?? getMainLoopModel()
|
||||
const request = resolveProviderRequest({ model: modelStr })
|
||||
const isGithub = isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
|
||||
// Resolve API key the same way the OpenAI shim does
|
||||
let apiKey = process.env.OPENAI_API_KEY ?? ''
|
||||
if (!apiKey && isGithub) {
|
||||
hydrateGithubModelsTokenFromSecureStorage()
|
||||
apiKey =
|
||||
process.env.OPENAI_API_KEY ??
|
||||
process.env.GITHUB_TOKEN ??
|
||||
process.env.GH_TOKEN ??
|
||||
''
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
return {
|
||||
type: 'text',
|
||||
value:
|
||||
'No API key found. Make sure you are in an active OpenAI-compatible or GitHub Copilot session.\n' +
|
||||
'For GitHub Copilot: run /onboard-github first.\n' +
|
||||
'For OpenAI-compatible: set OPENAI_API_KEY.',
|
||||
}
|
||||
}
|
||||
|
||||
const useResponses = request.transport === 'codex_responses'
|
||||
const endpoint = useResponses ? '/responses' : '/chat/completions'
|
||||
const url = `${request.baseUrl}${endpoint}`
|
||||
const family = getModelFamily(request.resolvedModel)
|
||||
const cacheKey = `${getSessionId()}:${family}`
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
originator: 'openclaude',
|
||||
}
|
||||
if (isGithub) {
|
||||
Object.assign(headers, COPILOT_HEADERS)
|
||||
}
|
||||
|
||||
let body: Record<string, unknown>
|
||||
if (useResponses) {
|
||||
body = {
|
||||
model: request.resolvedModel,
|
||||
instructions: SYSTEM_PROMPT,
|
||||
input: [
|
||||
{
|
||||
type: 'message',
|
||||
role: 'user',
|
||||
content: [{ type: 'input_text', text: USER_MESSAGE }],
|
||||
},
|
||||
],
|
||||
stream: true,
|
||||
...(noKey ? {} : {
|
||||
store: false,
|
||||
prompt_cache_key: cacheKey,
|
||||
prompt_cache_retention: '24h',
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
body = {
|
||||
model: request.resolvedModel,
|
||||
messages: [
|
||||
{ role: 'system', content: SYSTEM_PROMPT },
|
||||
{ role: 'user', content: USER_MESSAGE },
|
||||
],
|
||||
stream: true,
|
||||
stream_options: { include_usage: true },
|
||||
max_tokens: 20,
|
||||
...(noKey ? {} : {
|
||||
store: false,
|
||||
prompt_cache_key: cacheKey,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// Log configuration
|
||||
const config = [
|
||||
`[cache-probe] Starting cache probe${noKey ? ' (--no-key: cache params OMITTED)' : ''}`,
|
||||
` model: ${request.resolvedModel} (family: ${family})`,
|
||||
` transport: ${request.transport}`,
|
||||
` endpoint: ${url}`,
|
||||
` prompt_cache_key: ${noKey ? 'NOT SENT' : cacheKey}`,
|
||||
` store: ${noKey ? 'NOT SENT' : 'false'}`,
|
||||
` system prompt: ~${Math.round(SYSTEM_PROMPT.length / 4)} tokens`,
|
||||
` delay between calls: ${DELAY_MS}ms`,
|
||||
].join('\n')
|
||||
logForDebugging(config)
|
||||
|
||||
// Call 1 — Cold
|
||||
const r1 = await sendProbe(url, headers, body, 'CALL 1 — Cold (no cache)')
|
||||
logForDebugging(`[cache-probe]\n${formatResult(r1)}`)
|
||||
|
||||
if (r1.error) {
|
||||
return {
|
||||
type: 'text',
|
||||
value: `Cache probe failed on first call: HTTP ${r1.status}\n${r1.error.slice(0, 300)}\n\nFull details in debug log.`,
|
||||
}
|
||||
}
|
||||
|
||||
// Wait
|
||||
await new Promise((r) => setTimeout(r, DELAY_MS))
|
||||
|
||||
// Call 2 — Warm
|
||||
const r2 = await sendProbe(url, headers, body, 'CALL 2 — Warm (cache expected)')
|
||||
logForDebugging(`[cache-probe]\n${formatResult(r2)}`)
|
||||
|
||||
// --- Comparison ---
|
||||
const fields = [
|
||||
'input_tokens',
|
||||
'output_tokens',
|
||||
'total_tokens',
|
||||
'prompt_tokens',
|
||||
'completion_tokens',
|
||||
'input_tokens_details.cached_tokens',
|
||||
'prompt_tokens_details.cached_tokens',
|
||||
'output_tokens_details.reasoning_tokens',
|
||||
]
|
||||
|
||||
const comparison: string[] = ['[cache-probe] COMPARISON']
|
||||
comparison.push(
|
||||
` ${'Field'.padEnd(42)} ${'Call 1'.padStart(8)} ${'Call 2'.padStart(8)} ${'Delta'.padStart(8)}`,
|
||||
)
|
||||
comparison.push(` ${'-'.repeat(72)}`)
|
||||
|
||||
for (const f of fields) {
|
||||
const v1 = getField(r1.usage, f)
|
||||
const v2 = getField(r2.usage, f)
|
||||
if (v1 === undefined && v2 === undefined) continue
|
||||
const d =
|
||||
typeof v1 === 'number' && typeof v2 === 'number' ? v2 - v1 : ''
|
||||
comparison.push(
|
||||
` ${f.padEnd(42)} ${String(v1 ?? '-').padStart(8)} ${String(v2 ?? '-').padStart(8)} ${String(d).padStart(8)}`,
|
||||
)
|
||||
}
|
||||
|
||||
comparison.push('')
|
||||
comparison.push(
|
||||
` Latency: ${r1.elapsed}ms → ${r2.elapsed}ms (${r2.elapsed - r1.elapsed > 0 ? '+' : ''}${r2.elapsed - r1.elapsed}ms)`,
|
||||
)
|
||||
|
||||
// Header comparison
|
||||
for (const h of ['openai-processing-ms', 'x-ms-region', 'x-ratelimit-remaining']) {
|
||||
const v1 = r1.headers[h]
|
||||
const v2 = r2.headers[h]
|
||||
if (v1 || v2) {
|
||||
comparison.push(` ${h}: ${v1 ?? '-'} → ${v2 ?? '-'}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Verdict
|
||||
const cached2 =
|
||||
(getField(r2.usage, 'input_tokens_details.cached_tokens') as number) ??
|
||||
(getField(r2.usage, 'prompt_tokens_details.cached_tokens') as number) ??
|
||||
0
|
||||
const input1 =
|
||||
((r1.usage?.input_tokens ?? r1.usage?.prompt_tokens) as number) ?? 0
|
||||
const input2 =
|
||||
((r2.usage?.input_tokens ?? r2.usage?.prompt_tokens) as number) ?? 0
|
||||
|
||||
let verdict: string
|
||||
if (cached2 > 0) {
|
||||
const rate = input2 > 0 ? Math.round((cached2 / input2) * 100) : '?'
|
||||
verdict = `CACHE HIT: ${cached2} cached tokens (${rate}% of input)`
|
||||
} else if (input1 === 0 && input2 === 0) {
|
||||
verdict = 'INCONCLUSIVE: Server returns 0 input_tokens — cannot measure'
|
||||
} else if (r2.elapsed < r1.elapsed * 0.6 && input1 > 100) {
|
||||
verdict = `POSSIBLE SILENT CACHING: Call 2 was ${Math.round((1 - r2.elapsed / r1.elapsed) * 100)}% faster but no cached_tokens reported`
|
||||
} else {
|
||||
verdict = 'NO CACHE DETECTED'
|
||||
}
|
||||
|
||||
comparison.push(`\n Verdict: ${verdict}`)
|
||||
|
||||
// --- Simulate what main's shim code does with this usage ---
|
||||
// codexShim.ts makeUsage() — used for Responses API (GPT-5+/Codex)
|
||||
function mainMakeUsage(u: any) {
|
||||
return {
|
||||
input_tokens: u?.input_tokens ?? 0,
|
||||
output_tokens: u?.output_tokens ?? 0,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0, // ← main hardcodes this to 0
|
||||
}
|
||||
}
|
||||
// openaiShim.ts convertChunkUsage() — used for Chat Completions
|
||||
function mainConvertChunkUsage(u: any) {
|
||||
return {
|
||||
input_tokens: u?.prompt_tokens ?? 0,
|
||||
output_tokens: u?.completion_tokens ?? 0,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: u?.prompt_tokens_details?.cached_tokens ?? 0,
|
||||
}
|
||||
}
|
||||
|
||||
const shimFn = useResponses ? mainMakeUsage : mainConvertChunkUsage
|
||||
const shim1 = shimFn(r1.usage)
|
||||
const shim2 = shimFn(r2.usage)
|
||||
|
||||
comparison.push('')
|
||||
comparison.push(` --- What main's shim reports (${useResponses ? 'codexShim.makeUsage' : 'openaiShim.convertChunkUsage'}) ---`)
|
||||
comparison.push(` Call 1: cache_read_input_tokens=${shim1.cache_read_input_tokens}`)
|
||||
comparison.push(` Call 2: cache_read_input_tokens=${shim2.cache_read_input_tokens}`)
|
||||
if (useResponses && cached2 > 0) {
|
||||
comparison.push(` BUG: Server returned ${cached2} cached tokens but main's makeUsage() drops it → reports 0`)
|
||||
} else if (!useResponses && shim2.cache_read_input_tokens > 0) {
|
||||
comparison.push(` OK: Chat Completions path on main correctly reads cached_tokens`)
|
||||
}
|
||||
|
||||
logForDebugging(comparison.join('\n'))
|
||||
|
||||
// User-facing summary
|
||||
const mode = noKey ? ' (NO cache key sent)' : ''
|
||||
const shimLabel = useResponses ? 'codexShim.makeUsage()' : 'openaiShim.convertChunkUsage()'
|
||||
const summary = [
|
||||
`Cache Probe — ${request.resolvedModel} via ${useResponses ? 'Responses API' : 'Chat Completions'}${mode}`,
|
||||
'',
|
||||
`Call 1: ${r1.elapsed}ms, input=${input1}, cached=${(getField(r1.usage, 'input_tokens_details.cached_tokens') as number) ?? (getField(r1.usage, 'prompt_tokens_details.cached_tokens') as number) ?? 0}`,
|
||||
`Call 2: ${r2.elapsed}ms, input=${input2}, cached=${cached2}`,
|
||||
'',
|
||||
verdict,
|
||||
'',
|
||||
`What main's ${shimLabel} reports:`,
|
||||
` Call 2 cache_read_input_tokens = ${shim2.cache_read_input_tokens}${useResponses && cached2 > 0 ? ' ← BUG: server sent ' + cached2 + ' but main drops it' : ''}`,
|
||||
'',
|
||||
'Full details written to debug log.',
|
||||
].join('\n')
|
||||
|
||||
return { type: 'text', value: summary }
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import type { Command } from '../../commands.js'
|
||||
import { isEnvTruthy } from '../../utils/envUtils.js'
|
||||
|
||||
const cacheProbe: Command = {
|
||||
type: 'local',
|
||||
name: 'cache-probe',
|
||||
description:
|
||||
'Send identical requests to test prompt caching (results in debug log)',
|
||||
argumentHint: '[model] [--no-key]',
|
||||
isEnabled: () =>
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB),
|
||||
supportsNonInteractive: false,
|
||||
load: () => import('./cache-probe.js'),
|
||||
}
|
||||
|
||||
export default cacheProbe
|
||||
@@ -39,16 +39,16 @@ type InstallState = {
|
||||
message: string;
|
||||
warnings?: string[];
|
||||
};
|
||||
export function getInstallationPath(): string {
|
||||
function getInstallationPath(): string {
|
||||
const isWindows = env.platform === 'win32';
|
||||
const homeDir = homedir();
|
||||
if (isWindows) {
|
||||
// Convert to Windows-style path
|
||||
const windowsPath = join(homeDir, '.local', 'bin', 'openclaude.exe');
|
||||
const windowsPath = join(homeDir, '.local', 'bin', 'claude.exe');
|
||||
// Replace forward slashes with backslashes for Windows display
|
||||
return windowsPath.replace(/\//g, '\\');
|
||||
}
|
||||
return '~/.local/bin/openclaude';
|
||||
return '~/.local/bin/claude';
|
||||
}
|
||||
function SetupNotes(t0) {
|
||||
const $ = _c(5);
|
||||
|
||||
@@ -1,44 +1,20 @@
|
||||
import { afterEach, expect, mock, test } from 'bun:test'
|
||||
|
||||
import { getAdditionalModelOptionsCacheScope } from '../../services/api/providerConfig.js'
|
||||
import { getAPIProvider } from '../../utils/model/providers.js'
|
||||
|
||||
const originalEnv = {
|
||||
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
|
||||
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
|
||||
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
|
||||
CLAUDE_CODE_USE_MISTRAL: process.env.CLAUDE_CODE_USE_MISTRAL,
|
||||
CLAUDE_CODE_USE_BEDROCK: process.env.CLAUDE_CODE_USE_BEDROCK,
|
||||
CLAUDE_CODE_USE_VERTEX: process.env.CLAUDE_CODE_USE_VERTEX,
|
||||
CLAUDE_CODE_USE_FOUNDRY: process.env.CLAUDE_CODE_USE_FOUNDRY,
|
||||
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
|
||||
OPENAI_API_BASE: process.env.OPENAI_API_BASE,
|
||||
OPENAI_MODEL: process.env.OPENAI_MODEL,
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore()
|
||||
process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI
|
||||
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI
|
||||
process.env.CLAUDE_CODE_USE_GITHUB = originalEnv.CLAUDE_CODE_USE_GITHUB
|
||||
process.env.CLAUDE_CODE_USE_MISTRAL = originalEnv.CLAUDE_CODE_USE_MISTRAL
|
||||
process.env.CLAUDE_CODE_USE_BEDROCK = originalEnv.CLAUDE_CODE_USE_BEDROCK
|
||||
process.env.CLAUDE_CODE_USE_VERTEX = originalEnv.CLAUDE_CODE_USE_VERTEX
|
||||
process.env.CLAUDE_CODE_USE_FOUNDRY = originalEnv.CLAUDE_CODE_USE_FOUNDRY
|
||||
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
|
||||
process.env.OPENAI_API_BASE = originalEnv.OPENAI_API_BASE
|
||||
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
|
||||
})
|
||||
|
||||
test('opens the model picker without awaiting local model discovery refresh', async () => {
|
||||
process.env.CLAUDE_CODE_USE_OPENAI = '1'
|
||||
delete process.env.CLAUDE_CODE_USE_GEMINI
|
||||
delete process.env.CLAUDE_CODE_USE_GITHUB
|
||||
delete process.env.CLAUDE_CODE_USE_MISTRAL
|
||||
delete process.env.CLAUDE_CODE_USE_BEDROCK
|
||||
delete process.env.CLAUDE_CODE_USE_VERTEX
|
||||
delete process.env.CLAUDE_CODE_USE_FOUNDRY
|
||||
delete process.env.OPENAI_API_BASE
|
||||
process.env.OPENAI_BASE_URL = 'http://127.0.0.1:8080/v1'
|
||||
process.env.OPENAI_MODEL = 'qwen2.5-coder-7b-instruct'
|
||||
|
||||
@@ -54,9 +30,7 @@ test('opens the model picker without awaiting local model discovery refresh', as
|
||||
discoverOpenAICompatibleModelOptions,
|
||||
}))
|
||||
|
||||
expect(getAdditionalModelOptionsCacheScope()).toBe('openai:http://127.0.0.1:8080/v1')
|
||||
|
||||
const { call } = await import('./model.js')
|
||||
const { call } = await import(`./model.js?ts=${Date.now()}-${Math.random()}`)
|
||||
const result = await Promise.race([
|
||||
call(() => {}, {} as never, ''),
|
||||
new Promise(resolve => setTimeout(() => resolve('timeout'), 50)),
|
||||
|
||||
@@ -284,7 +284,7 @@ function haveSameModelOptions(left: ModelOption[], right: ModelOption[]): boolea
|
||||
});
|
||||
}
|
||||
async function refreshOpenAIModelOptionsCache(): Promise<void> {
|
||||
if (!getAdditionalModelOptionsCacheScope()?.startsWith('openai:')) {
|
||||
if (getAPIProvider() !== 'openai') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
||||
@@ -22,14 +22,11 @@ import {
|
||||
import {
|
||||
buildCodexProfileEnv,
|
||||
buildGeminiProfileEnv,
|
||||
buildMistralProfileEnv,
|
||||
buildOllamaProfileEnv,
|
||||
buildOpenAIProfileEnv,
|
||||
createProfileFile,
|
||||
DEFAULT_GEMINI_BASE_URL,
|
||||
DEFAULT_GEMINI_MODEL,
|
||||
DEFAULT_MISTRAL_BASE_URL,
|
||||
DEFAULT_MISTRAL_MODEL,
|
||||
deleteProfileFile,
|
||||
loadProfileFile,
|
||||
maskSecretForDisplay,
|
||||
@@ -77,14 +74,6 @@ type Step =
|
||||
baseUrl: string | null
|
||||
defaultModel: string
|
||||
}
|
||||
| { name: 'mistral-key'; defaultModel: string }
|
||||
| { name: 'mistral-base'; apiKey: string; defaultModel: string }
|
||||
| {
|
||||
name: 'mistral-model'
|
||||
apiKey: string
|
||||
baseUrl: string | null
|
||||
defaultModel: string
|
||||
}
|
||||
| { name: 'gemini-auth-method' }
|
||||
| { name: 'gemini-key' }
|
||||
| { name: 'gemini-access-token' }
|
||||
@@ -127,8 +116,6 @@ type ProviderWizardDefaults = {
|
||||
openAIModel: string
|
||||
openAIBaseUrl: string
|
||||
geminiModel: string
|
||||
mistralModel: string
|
||||
mistralBaseUrl: string
|
||||
}
|
||||
|
||||
function isEnvTruthy(value: string | undefined): boolean {
|
||||
@@ -160,19 +147,11 @@ export function getProviderWizardDefaults(
|
||||
const safeGeminiModel =
|
||||
sanitizeProviderConfigValue(processEnv.GEMINI_MODEL, processEnv) ||
|
||||
DEFAULT_GEMINI_MODEL
|
||||
const safeMistralModel =
|
||||
sanitizeProviderConfigValue(processEnv.MISTRAL_MODEL, processEnv) ||
|
||||
DEFAULT_MISTRAL_MODEL
|
||||
const safeMistralBaseUrl =
|
||||
sanitizeProviderConfigValue(processEnv.MISTRAL_BASE_URL, processEnv) ||
|
||||
DEFAULT_MISTRAL_BASE_URL
|
||||
|
||||
return {
|
||||
openAIModel: safeOpenAIModel,
|
||||
openAIBaseUrl: safeOpenAIBaseUrl,
|
||||
geminiModel: safeGeminiModel,
|
||||
mistralModel: safeMistralModel,
|
||||
mistralBaseUrl: safeMistralBaseUrl,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,21 +178,6 @@ export function buildCurrentProviderSummary(options?: {
|
||||
}
|
||||
}
|
||||
|
||||
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_MISTRAL)) {
|
||||
return {
|
||||
providerLabel: 'Mistral',
|
||||
modelLabel: getSafeDisplayValue(
|
||||
processEnv.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
|
||||
processEnv
|
||||
),
|
||||
endpointLabel: getSafeDisplayValue(
|
||||
processEnv.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
|
||||
processEnv
|
||||
),
|
||||
savedProfileLabel,
|
||||
}
|
||||
}
|
||||
|
||||
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
|
||||
return {
|
||||
providerLabel: 'GitHub Models',
|
||||
@@ -295,24 +259,6 @@ function buildSavedProfileSummary(
|
||||
? 'configured'
|
||||
: undefined,
|
||||
}
|
||||
case 'mistral':
|
||||
return {
|
||||
providerLabel: 'Mistral',
|
||||
modelLabel: getSafeDisplayValue(
|
||||
env.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
|
||||
process.env,
|
||||
env,
|
||||
),
|
||||
endpointLabel: getSafeDisplayValue(
|
||||
env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
|
||||
process.env,
|
||||
env,
|
||||
),
|
||||
credentialLabel:
|
||||
maskSecretForDisplay(env.MISTRAL_API_KEY) !== undefined
|
||||
? 'configured'
|
||||
: undefined,
|
||||
}
|
||||
case 'codex':
|
||||
return {
|
||||
providerLabel: 'Codex',
|
||||
@@ -527,11 +473,6 @@ function ProviderChooser({
|
||||
value: 'gemini',
|
||||
description: 'Use Google Gemini with API key, access token, or local ADC',
|
||||
},
|
||||
{
|
||||
label: 'Mistral',
|
||||
value: 'mistral',
|
||||
description: 'Use Mistral with API key'
|
||||
},
|
||||
{
|
||||
label: 'Codex',
|
||||
value: 'codex',
|
||||
@@ -1030,11 +971,6 @@ export function ProviderWizard({
|
||||
})
|
||||
} else if (value === 'gemini') {
|
||||
setStep({ name: 'gemini-auth-method' })
|
||||
} else if (value === 'mistral') {
|
||||
setStep({
|
||||
name: 'mistral-key',
|
||||
defaultModel: defaults.mistralModel,
|
||||
})
|
||||
} else if (value === 'clear') {
|
||||
const filePath = deleteProfileFile()
|
||||
onDone(`Removed saved provider profile at ${filePath}. Restart OpenClaude to go back to normal startup.`, {
|
||||
@@ -1174,101 +1110,6 @@ export function ProviderWizard({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'mistral-key':
|
||||
return (
|
||||
<TextEntryDialog
|
||||
resetStateKey={step.name}
|
||||
title="Mistral setup"
|
||||
subtitle="Step 1 of 3"
|
||||
description={
|
||||
process.env.MISTRAL_API_KEY
|
||||
? 'Enter an API key, or leave this blank to reuse the current MISTRAL_API_KEY from this session.'
|
||||
: 'Enter the API key for your Mistral provider.'
|
||||
}
|
||||
initialValue=""
|
||||
placeholder="..."
|
||||
mask="*"
|
||||
allowEmpty={Boolean(process.env.MISTRAL_API_KEY)}
|
||||
validate={value => {
|
||||
const candidate = value.trim() || process.env.MISTRAL_API_KEY || ''
|
||||
return sanitizeApiKey(candidate)
|
||||
? null
|
||||
: 'Enter a real API key. Placeholder values like SUA_CHAVE are not valid.'
|
||||
}}
|
||||
onSubmit={value => {
|
||||
const apiKey = value.trim() || process.env.MISTRAL_API_KEY || ''
|
||||
setStep({
|
||||
name: 'mistral-base',
|
||||
apiKey,
|
||||
defaultModel: step.defaultModel,
|
||||
})
|
||||
}}
|
||||
onCancel={() => setStep({ name: 'choose' })}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'mistral-base':
|
||||
return (
|
||||
<TextEntryDialog
|
||||
resetStateKey={step.name}
|
||||
title="Mistral setup"
|
||||
subtitle="Step 2 of 3"
|
||||
description={`Optionally enter a base URL. Leave blank for ${DEFAULT_MISTRAL_BASE_URL}.`}
|
||||
initialValue={
|
||||
defaults.mistralBaseUrl === DEFAULT_MISTRAL_BASE_URL
|
||||
? ''
|
||||
: defaults.mistralBaseUrl
|
||||
}
|
||||
placeholder={DEFAULT_MISTRAL_BASE_URL}
|
||||
allowEmpty
|
||||
onSubmit={value => {
|
||||
setStep({
|
||||
name: 'mistral-model',
|
||||
apiKey: step.apiKey,
|
||||
baseUrl: value.trim() || null,
|
||||
defaultModel: step.defaultModel,
|
||||
})
|
||||
}}
|
||||
onCancel={() =>
|
||||
setStep({
|
||||
name: 'mistral-key',
|
||||
defaultModel: step.defaultModel,
|
||||
})
|
||||
}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'mistral-model':
|
||||
return (
|
||||
<TextEntryDialog
|
||||
resetStateKey={step.name}
|
||||
title="Mistral setup"
|
||||
subtitle="Step 3 of 3"
|
||||
description={`Enter a model name. Leave blank for ${step.defaultModel}.`}
|
||||
initialValue={defaults.mistralModel ?? step.defaultModel}
|
||||
placeholder={step.defaultModel}
|
||||
allowEmpty
|
||||
onSubmit={value => {
|
||||
const env = buildMistralProfileEnv({
|
||||
model: value.trim() || step.defaultModel,
|
||||
baseUrl: step.baseUrl,
|
||||
apiKey: step.apiKey,
|
||||
processEnv: process.env,
|
||||
})
|
||||
if (env) {
|
||||
finishProfileSave(onDone, 'mistral', env)
|
||||
}
|
||||
}}
|
||||
onCancel={() =>
|
||||
setStep({
|
||||
name: 'mistral-base',
|
||||
apiKey: step.apiKey,
|
||||
defaultModel: step.defaultModel,
|
||||
})
|
||||
}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'gemini-auth-method': {
|
||||
const hasShellGeminiKey = Boolean(
|
||||
process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY,
|
||||
|
||||
@@ -65,7 +65,7 @@ export async function call(onDone: (result?: string) => void, _context: unknown,
|
||||
|
||||
// Get the local settings path and make it relative to cwd
|
||||
const localSettingsPath = getSettingsFilePathForSource('localSettings');
|
||||
const relativePath = localSettingsPath ? relative(getCwdState(), localSettingsPath) : '.openclaude/settings.local.json';
|
||||
const relativePath = localSettingsPath ? relative(getCwdState(), localSettingsPath) : '.claude/settings.local.json';
|
||||
const message = color('success', themeName)(`Added "${cleanPattern}" to excluded commands in ${relativePath}`);
|
||||
onDone(message);
|
||||
return null;
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
import type { Command } from '../../commands.js'
|
||||
|
||||
const wiki = {
|
||||
type: 'local-jsx',
|
||||
name: 'wiki',
|
||||
description: 'Initialize and inspect the OpenClaude project wiki',
|
||||
argumentHint: '[init|status]',
|
||||
immediate: true,
|
||||
load: () => import('./wiki.js'),
|
||||
} satisfies Command
|
||||
|
||||
export default wiki
|
||||
@@ -1,123 +0,0 @@
|
||||
import React from 'react'
|
||||
import { COMMON_HELP_ARGS, COMMON_INFO_ARGS } from '../../constants/xml.js'
|
||||
import { ingestLocalWikiSource } from '../../services/wiki/ingest.js'
|
||||
import { initializeWiki } from '../../services/wiki/init.js'
|
||||
import { getWikiStatus } from '../../services/wiki/status.js'
|
||||
import type {
|
||||
LocalJSXCommandCall,
|
||||
LocalJSXCommandOnDone,
|
||||
} from '../../types/command.js'
|
||||
import { getCwd } from '../../utils/cwd.js'
|
||||
|
||||
function renderHelp(): string {
|
||||
return `Usage: /wiki [init|status|ingest <path>]
|
||||
|
||||
Manage the OpenClaude project wiki stored in .openclaude/wiki.
|
||||
|
||||
Commands:
|
||||
/wiki init Initialize the wiki structure in the current project
|
||||
/wiki status Show wiki status and page/source counts
|
||||
/wiki ingest Ingest a local file into wiki sources
|
||||
|
||||
Examples:
|
||||
/wiki init
|
||||
/wiki status
|
||||
/wiki ingest README.md`
|
||||
}
|
||||
|
||||
function formatInitResult(result: Awaited<ReturnType<typeof initializeWiki>>): string {
|
||||
const lines = [`Initialized OpenClaude wiki at ${result.root}`]
|
||||
|
||||
if (result.alreadyExisted) {
|
||||
lines.push('', 'Wiki already existed. No new files were created.')
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
if (result.createdFiles.length > 0) {
|
||||
lines.push('', 'Created files:')
|
||||
for (const file of result.createdFiles) {
|
||||
lines.push(`- ${file}`)
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
function formatStatus(status: Awaited<ReturnType<typeof getWikiStatus>>): string {
|
||||
if (!status.initialized) {
|
||||
return `OpenClaude wiki is not initialized in this project.\n\nRun /wiki init to create ${status.root}.`
|
||||
}
|
||||
|
||||
return [
|
||||
'OpenClaude wiki status',
|
||||
'',
|
||||
`Root: ${status.root}`,
|
||||
`Pages: ${status.pageCount}`,
|
||||
`Sources: ${status.sourceCount}`,
|
||||
`Schema: ${status.hasSchema ? 'present' : 'missing'}`,
|
||||
`Index: ${status.hasIndex ? 'present' : 'missing'}`,
|
||||
`Log: ${status.hasLog ? 'present' : 'missing'}`,
|
||||
`Last updated: ${status.lastUpdatedAt ?? 'unknown'}`,
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
function formatIngestResult(
|
||||
result: Awaited<ReturnType<typeof ingestLocalWikiSource>>,
|
||||
): string {
|
||||
return [
|
||||
`Ingested ${result.sourceFile} into the OpenClaude wiki.`,
|
||||
'',
|
||||
`Title: ${result.title}`,
|
||||
`Source note: ${result.sourceNote}`,
|
||||
`Summary: ${result.summary}`,
|
||||
].join('\n')
|
||||
}
|
||||
|
||||
async function runWikiCommand(
|
||||
onDone: LocalJSXCommandOnDone,
|
||||
args: string,
|
||||
): Promise<void> {
|
||||
const cwd = getCwd()
|
||||
const normalized = args.trim().toLowerCase()
|
||||
|
||||
if (COMMON_HELP_ARGS.includes(normalized) || COMMON_INFO_ARGS.includes(normalized)) {
|
||||
onDone(renderHelp(), { display: 'system' })
|
||||
return
|
||||
}
|
||||
|
||||
if (!normalized || normalized === 'status') {
|
||||
onDone(formatStatus(await getWikiStatus(cwd)), { display: 'system' })
|
||||
return
|
||||
}
|
||||
|
||||
if (normalized === 'init') {
|
||||
onDone(formatInitResult(await initializeWiki(cwd)), { display: 'system' })
|
||||
return
|
||||
}
|
||||
|
||||
if (normalized.startsWith('ingest')) {
|
||||
const pathArg = args.trim().slice('ingest'.length).trim()
|
||||
if (!pathArg) {
|
||||
onDone('Usage: /wiki ingest <local-file-path>', { display: 'system' })
|
||||
return
|
||||
}
|
||||
|
||||
onDone(formatIngestResult(await ingestLocalWikiSource(cwd, pathArg)), {
|
||||
display: 'system',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
onDone(`Unknown wiki subcommand: ${args.trim()}\n\n${renderHelp()}`, {
|
||||
display: 'system',
|
||||
})
|
||||
}
|
||||
|
||||
export const call: LocalJSXCommandCall = async (
|
||||
onDone,
|
||||
_context,
|
||||
args,
|
||||
): Promise<React.ReactNode> => {
|
||||
await runWikiCommand(onDone, args ?? '')
|
||||
return null
|
||||
}
|
||||
@@ -188,9 +188,9 @@ export function AutoUpdater({
|
||||
✓ Update installed · Restart to apply
|
||||
</Text>}
|
||||
{(autoUpdaterResult?.status === 'install_failed' || autoUpdaterResult?.status === 'no_permissions') && <Text color="error" wrap="truncate">
|
||||
✗ Auto-update failed · Try <Text bold>openclaude doctor</Text> or{' '}
|
||||
✗ Auto-update failed · Try <Text bold>claude doctor</Text> or{' '}
|
||||
<Text bold>
|
||||
{hasLocalInstall ? `cd ~/.openclaude/local && npm update ${MACRO.PACKAGE_URL}` : `npm i -g ${MACRO.PACKAGE_URL}`}
|
||||
{hasLocalInstall ? `cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}` : `npm i -g ${MACRO.PACKAGE_URL}`}
|
||||
</Text>
|
||||
</Text>}
|
||||
</Box>;
|
||||
|
||||
@@ -31,11 +31,9 @@ export function BaseTextInput(t0) {
|
||||
} = t0;
|
||||
const {
|
||||
onInput,
|
||||
value,
|
||||
renderedValue,
|
||||
cursorLine,
|
||||
cursorColumn,
|
||||
offset,
|
||||
cursorColumn
|
||||
} = inputState;
|
||||
const t1 = Boolean(props.focus && props.showCursor && terminalFocus);
|
||||
let t2;
|
||||
@@ -80,7 +78,7 @@ export function BaseTextInput(t0) {
|
||||
renderedPlaceholder
|
||||
} = renderPlaceholder({
|
||||
placeholder: props.placeholder,
|
||||
value,
|
||||
value: props.value,
|
||||
showCursor: props.showCursor,
|
||||
focus: props.focus,
|
||||
terminalFocus,
|
||||
@@ -90,9 +88,9 @@ export function BaseTextInput(t0) {
|
||||
useInput(wrappedOnInput, {
|
||||
isActive: props.focus
|
||||
});
|
||||
const commandWithoutArgs = value && value.trim().indexOf(" ") === -1 || value && value.endsWith(" ");
|
||||
const showArgumentHint = Boolean(props.argumentHint && value && commandWithoutArgs && value.startsWith("/"));
|
||||
const cursorFiltered = props.showCursor && props.highlights ? props.highlights.filter(h => h.dimColor || offset < h.start || offset >= h.end) : props.highlights;
|
||||
const commandWithoutArgs = props.value && props.value.trim().indexOf(" ") === -1 || props.value && props.value.endsWith(" ");
|
||||
const showArgumentHint = Boolean(props.argumentHint && props.value && commandWithoutArgs && props.value.startsWith("/"));
|
||||
const cursorFiltered = props.showCursor && props.highlights ? props.highlights.filter(h => h.dimColor || props.cursorOffset < h.start || props.cursorOffset >= h.end) : props.highlights;
|
||||
const {
|
||||
viewportCharOffset,
|
||||
viewportCharEnd
|
||||
@@ -104,13 +102,13 @@ export function BaseTextInput(t0) {
|
||||
})) : cursorFiltered;
|
||||
const hasHighlights = filteredHighlights && filteredHighlights.length > 0;
|
||||
if (hasHighlights) {
|
||||
return <Box ref={cursorRef}><HighlightedInput text={renderedValue} highlights={filteredHighlights} />{showArgumentHint && <Text dimColor={true}>{value.endsWith(" ") ? "" : " "}{props.argumentHint}</Text>}{children}</Box>;
|
||||
return <Box ref={cursorRef}><HighlightedInput text={renderedValue} highlights={filteredHighlights} />{showArgumentHint && <Text dimColor={true}>{props.value?.endsWith(" ") ? "" : " "}{props.argumentHint}</Text>}{children}</Box>;
|
||||
}
|
||||
const T0 = Box;
|
||||
const T1 = Text;
|
||||
const t4 = "truncate-end";
|
||||
const t5 = showPlaceholder && props.placeholderElement ? props.placeholderElement : showPlaceholder && renderedPlaceholder ? <Ansi>{renderedPlaceholder}</Ansi> : <Ansi>{renderedValue}</Ansi>;
|
||||
const t6 = showArgumentHint && <Text dimColor={true}>{value.endsWith(" ") ? "" : " "}{props.argumentHint}</Text>;
|
||||
const t6 = showArgumentHint && <Text dimColor={true}>{props.value?.endsWith(" ") ? "" : " "}{props.argumentHint}</Text>;
|
||||
let t7;
|
||||
if ($[4] !== T1 || $[5] !== children || $[6] !== props || $[7] !== t5 || $[8] !== t6) {
|
||||
t7 = <T1 wrap={t4} dimColor={props.dimColor}>{t5}{t6}{children}</T1>;
|
||||
|
||||
@@ -103,7 +103,7 @@ test('login picker shows the third-party platform option', async () => {
|
||||
expect(output).toContain('3rd-party platform')
|
||||
})
|
||||
|
||||
test('third-party provider branch opens the first-run provider manager', async () => {
|
||||
test('third-party provider branch opens the provider wizard', async () => {
|
||||
const output = await renderFrame(
|
||||
<ConsoleOAuthFlow
|
||||
initialStatus={{ state: 'platform_setup' }}
|
||||
@@ -111,9 +111,7 @@ test('third-party provider branch opens the first-run provider manager', async (
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(output).toContain('Set up provider')
|
||||
expect(output).toContain('Anthropic')
|
||||
expect(output).toContain('OpenAI')
|
||||
expect(output).toContain('Set up a provider profile')
|
||||
expect(output).toContain('OpenAI-compatible')
|
||||
expect(output).toContain('Ollama')
|
||||
expect(output).toContain('LM Studio')
|
||||
})
|
||||
|
||||
@@ -12,7 +12,7 @@ import { OAuthService } from '../services/oauth/index.js';
|
||||
import { getOauthAccountInfo, validateForceLoginOrg } from '../utils/auth.js';
|
||||
import { logError } from '../utils/log.js';
|
||||
import { getSettings_DEPRECATED } from '../utils/settings/settings.js';
|
||||
import { ProviderManager } from './ProviderManager.js';
|
||||
import { ProviderWizard } from '../commands/provider/provider.js';
|
||||
import { Select } from './CustomSelect/select.js';
|
||||
import { KeyboardShortcutHint } from './design-system/KeyboardShortcutHint.js';
|
||||
import { Spinner } from './Spinner.js';
|
||||
@@ -450,17 +450,16 @@ function OAuthStatusMessage({
|
||||
|
||||
case 'platform_setup':
|
||||
return (
|
||||
<ProviderManager
|
||||
mode="first-run"
|
||||
<ProviderWizard
|
||||
onDone={result => {
|
||||
if (!result || result.action !== 'saved' || !result.message) {
|
||||
if (!result) {
|
||||
setOAuthStatus({ state: 'idle' })
|
||||
return
|
||||
}
|
||||
|
||||
setOAuthStatus({
|
||||
state: 'platform_setup_complete',
|
||||
message: result.message,
|
||||
message: result,
|
||||
})
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -285,7 +285,7 @@ export function Select(t0) {
|
||||
onChange,
|
||||
onCancel,
|
||||
onFocus,
|
||||
defaultFocusValue,
|
||||
focusValue: defaultFocusValue
|
||||
};
|
||||
$[7] = defaultFocusValue;
|
||||
$[8] = defaultValue;
|
||||
|
||||
@@ -35,11 +35,6 @@ export type UseSelectStateProps<T> = {
|
||||
*/
|
||||
onFocus?: (value: T) => void
|
||||
|
||||
/**
|
||||
* Initial value to focus when the component mounts.
|
||||
*/
|
||||
defaultFocusValue?: T
|
||||
|
||||
/**
|
||||
* Value to focus
|
||||
*/
|
||||
@@ -136,7 +131,6 @@ export function useSelectState<T>({
|
||||
onChange,
|
||||
onCancel,
|
||||
onFocus,
|
||||
defaultFocusValue,
|
||||
focusValue,
|
||||
}: UseSelectStateProps<T>): SelectState<T> {
|
||||
const [value, setValue] = useState<T | undefined>(defaultValue)
|
||||
@@ -144,7 +138,7 @@ export function useSelectState<T>({
|
||||
const navigation = useSelectNavigation<T>({
|
||||
visibleOptionCount,
|
||||
options,
|
||||
initialFocusValue: defaultFocusValue,
|
||||
initialFocusValue: undefined,
|
||||
onFocus,
|
||||
focusValue,
|
||||
})
|
||||
|
||||
@@ -252,24 +252,14 @@ function PromptInput({
|
||||
show: false
|
||||
});
|
||||
const [cursorOffset, setCursorOffset] = useState<number>(input.length);
|
||||
// Track the last input value set via internal handlers so external updates
|
||||
// (for example speech-to-text injection) can still move the cursor to end
|
||||
// without clobbering a pending internal keystroke during render.
|
||||
// Track the last input value set via internal handlers so we can detect
|
||||
// external input changes (e.g. speech-to-text injection) and move cursor to end.
|
||||
const lastInternalInputRef = React.useRef(input);
|
||||
const lastPropInputRef = React.useRef(input);
|
||||
React.useLayoutEffect(() => {
|
||||
if (input === lastPropInputRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
lastPropInputRef.current = input;
|
||||
if (input === lastInternalInputRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (input !== lastInternalInputRef.current) {
|
||||
// Input changed externally (not through any internal handler) — move cursor to end
|
||||
setCursorOffset(input.length);
|
||||
lastInternalInputRef.current = input;
|
||||
setCursorOffset(prev => prev === input.length ? prev : input.length);
|
||||
}, [input]);
|
||||
}
|
||||
// Wrap onInputChange to track internal changes before they trigger re-render
|
||||
const trackAndSetInput = React.useCallback((value: string) => {
|
||||
lastInternalInputRef.current = value;
|
||||
@@ -2211,7 +2201,7 @@ function PromptInput({
|
||||
multiline: true,
|
||||
onSubmit,
|
||||
onChange,
|
||||
value: isSearchingHistory && historyMatch ? getValueFromInput(typeof historyMatch === 'string' ? historyMatch : historyMatch.display) : input,
|
||||
value: historyMatch ? getValueFromInput(typeof historyMatch === 'string' ? historyMatch : historyMatch.display) : input,
|
||||
// History navigation is handled via TextInput props (onHistoryUp/onHistoryDown),
|
||||
// NOT via useKeybindings. This allows useTextInput's upOrHistoryUp/downOrHistoryDown
|
||||
// to try cursor movement first and only fall through to history navigation when the
|
||||
|
||||
@@ -6,7 +6,6 @@ import stripAnsi from 'strip-ansi'
|
||||
|
||||
import { createRoot } from '../ink.js'
|
||||
import { AppStateProvider } from '../state/AppState.js'
|
||||
import { KeybindingSetup } from '../keybindings/KeybindingProviderSetup.js'
|
||||
|
||||
const SYNC_START = '\x1B[?2026h'
|
||||
const SYNC_END = '\x1B[?2026l'
|
||||
@@ -107,30 +106,19 @@ function createDeferred<T>(): {
|
||||
return { promise, resolve }
|
||||
}
|
||||
|
||||
function mockProviderProfilesModule(options?: {
|
||||
addProviderProfile?: (...args: unknown[]) => unknown
|
||||
}): void {
|
||||
function mockProviderProfilesModule(): void {
|
||||
mock.module('../utils/providerProfiles.js', () => ({
|
||||
addProviderProfile: options?.addProviderProfile ?? (() => null),
|
||||
addProviderProfile: () => null,
|
||||
applyActiveProviderProfileFromConfig: () => {},
|
||||
deleteProviderProfile: () => ({ removed: false, activeProfileId: null }),
|
||||
getActiveProviderProfile: () => null,
|
||||
getProviderPresetDefaults: (preset: string) =>
|
||||
preset === 'ollama'
|
||||
? {
|
||||
provider: 'openai',
|
||||
name: 'Ollama',
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
model: 'llama3.1:8b',
|
||||
apiKey: '',
|
||||
}
|
||||
: {
|
||||
provider: 'openai',
|
||||
name: 'Mock provider',
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
model: 'mock-model',
|
||||
apiKey: '',
|
||||
},
|
||||
getProviderPresetDefaults: () => ({
|
||||
provider: 'openai',
|
||||
name: 'Mock provider',
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
model: 'mock-model',
|
||||
apiKey: '',
|
||||
}),
|
||||
getProviderProfiles: () => [],
|
||||
setActiveProviderProfile: () => null,
|
||||
updateProviderProfile: () => null,
|
||||
@@ -140,27 +128,8 @@ function mockProviderProfilesModule(options?: {
|
||||
function mockProviderManagerDependencies(
|
||||
syncRead: () => string | undefined,
|
||||
asyncRead: () => Promise<string | undefined>,
|
||||
options?: {
|
||||
addProviderProfile?: (...args: unknown[]) => unknown
|
||||
hasLocalOllama?: () => Promise<boolean>
|
||||
listOllamaModels?: () => Promise<
|
||||
Array<{
|
||||
name: string
|
||||
sizeBytes?: number | null
|
||||
family?: string | null
|
||||
families?: string[]
|
||||
parameterSize?: string | null
|
||||
quantizationLevel?: string | null
|
||||
}>
|
||||
>
|
||||
},
|
||||
): void {
|
||||
mockProviderProfilesModule({ addProviderProfile: options?.addProviderProfile })
|
||||
|
||||
mock.module('../utils/providerDiscovery.js', () => ({
|
||||
hasLocalOllama: options?.hasLocalOllama ?? (async () => false),
|
||||
listOllamaModels: options?.listOllamaModels ?? (async () => []),
|
||||
}))
|
||||
mockProviderProfilesModule()
|
||||
|
||||
mock.module('../utils/githubModelsCredentials.js', () => ({
|
||||
clearGithubModelsToken: () => ({ success: true }),
|
||||
@@ -193,14 +162,9 @@ async function waitForFrameOutput(
|
||||
async function mountProviderManager(
|
||||
ProviderManager: React.ComponentType<{
|
||||
mode: 'first-run' | 'manage'
|
||||
onDone: (result?: unknown) => void
|
||||
onDone: () => void
|
||||
}>,
|
||||
options?: {
|
||||
mode?: 'first-run' | 'manage'
|
||||
onDone?: (result?: unknown) => void
|
||||
},
|
||||
): Promise<{
|
||||
stdin: PassThrough
|
||||
getOutput: () => string
|
||||
dispose: () => Promise<void>
|
||||
}> {
|
||||
@@ -213,17 +177,14 @@ async function mountProviderManager(
|
||||
|
||||
root.render(
|
||||
<AppStateProvider>
|
||||
<KeybindingSetup>
|
||||
<ProviderManager
|
||||
mode={options?.mode ?? 'manage'}
|
||||
onDone={options?.onDone ?? (() => {})}
|
||||
/>
|
||||
</KeybindingSetup>
|
||||
<ProviderManager
|
||||
mode="manage"
|
||||
onDone={() => {}}
|
||||
/>
|
||||
</AppStateProvider>,
|
||||
)
|
||||
|
||||
return {
|
||||
stdin,
|
||||
getOutput,
|
||||
dispose: async () => {
|
||||
root.unmount()
|
||||
@@ -237,17 +198,14 @@ async function mountProviderManager(
|
||||
async function renderProviderManagerFrame(
|
||||
ProviderManager: React.ComponentType<{
|
||||
mode: 'first-run' | 'manage'
|
||||
onDone: (result?: unknown) => void
|
||||
onDone: () => void
|
||||
}>,
|
||||
options?: {
|
||||
waitForOutput?: (output: string) => boolean
|
||||
timeoutMs?: number
|
||||
mode?: 'first-run' | 'manage'
|
||||
},
|
||||
): Promise<string> {
|
||||
const mounted = await mountProviderManager(ProviderManager, {
|
||||
mode: options?.mode,
|
||||
})
|
||||
const mounted = await mountProviderManager(ProviderManager)
|
||||
const output = await waitForFrameOutput(
|
||||
mounted.getOutput,
|
||||
frame => {
|
||||
@@ -305,96 +263,6 @@ test('ProviderManager resolves GitHub virtual provider from async storage withou
|
||||
expect(asyncRead).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
test('ProviderManager first-run Ollama preset auto-detects installed models', async () => {
|
||||
delete process.env.CLAUDE_CODE_USE_GITHUB
|
||||
delete process.env.GITHUB_TOKEN
|
||||
delete process.env.GH_TOKEN
|
||||
|
||||
const onDone = mock(() => {})
|
||||
const addProviderProfile = mock((payload: {
|
||||
provider: string
|
||||
name: string
|
||||
baseUrl: string
|
||||
model: string
|
||||
apiKey?: string
|
||||
}) => ({
|
||||
id: 'provider_ollama',
|
||||
provider: payload.provider,
|
||||
name: payload.name,
|
||||
baseUrl: payload.baseUrl,
|
||||
model: payload.model,
|
||||
apiKey: payload.apiKey,
|
||||
}))
|
||||
|
||||
mockProviderManagerDependencies(
|
||||
() => undefined,
|
||||
async () => undefined,
|
||||
{
|
||||
addProviderProfile,
|
||||
hasLocalOllama: async () => true,
|
||||
listOllamaModels: async () => [
|
||||
{
|
||||
name: 'gemma4:31b-cloud',
|
||||
family: 'gemma',
|
||||
parameterSize: '31b',
|
||||
},
|
||||
{
|
||||
name: 'kimi-k2.5:cloud',
|
||||
family: 'kimi',
|
||||
parameterSize: '2.5b',
|
||||
},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
const nonce = `${Date.now()}-${Math.random()}`
|
||||
const { ProviderManager } = await import(`./ProviderManager.js?ts=${nonce}`)
|
||||
const mounted = await mountProviderManager(ProviderManager, {
|
||||
mode: 'first-run',
|
||||
onDone,
|
||||
})
|
||||
|
||||
await waitForFrameOutput(
|
||||
mounted.getOutput,
|
||||
frame => frame.includes('Set up provider') && frame.includes('Ollama'),
|
||||
)
|
||||
|
||||
mounted.stdin.write('j')
|
||||
await Bun.sleep(50)
|
||||
mounted.stdin.write('\r')
|
||||
|
||||
const modelFrame = await waitForFrameOutput(
|
||||
mounted.getOutput,
|
||||
frame =>
|
||||
frame.includes('Choose an Ollama model') &&
|
||||
frame.includes('gemma4:31b-cloud') &&
|
||||
frame.includes('kimi-k2.5:cloud'),
|
||||
)
|
||||
|
||||
expect(modelFrame).toContain('Choose an Ollama model')
|
||||
expect(modelFrame).toContain('gemma4:31b-cloud')
|
||||
|
||||
await Bun.sleep(25)
|
||||
mounted.stdin.write('\r')
|
||||
|
||||
await waitForCondition(() => onDone.mock.calls.length > 0)
|
||||
|
||||
expect(addProviderProfile).toHaveBeenCalled()
|
||||
expect(addProviderProfile.mock.calls[0]?.[0]).toMatchObject({
|
||||
name: 'Ollama',
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
model: 'gemma4:31b-cloud',
|
||||
})
|
||||
expect(onDone).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
action: 'saved',
|
||||
message: 'Provider configured: Ollama',
|
||||
}),
|
||||
)
|
||||
|
||||
await mounted.dispose()
|
||||
})
|
||||
|
||||
test('ProviderManager avoids first-frame false negative while stored-token lookup is pending', async () => {
|
||||
delete process.env.CLAUDE_CODE_USE_GITHUB
|
||||
delete process.env.GITHUB_TOKEN
|
||||
|
||||
@@ -3,7 +3,6 @@ import * as React from 'react'
|
||||
import { Box, Text } from '../ink.js'
|
||||
import { useKeybinding } from '../keybindings/useKeybinding.js'
|
||||
import type { ProviderProfile } from '../utils/config.js'
|
||||
import { hasLocalOllama, listOllamaModels } from '../utils/providerDiscovery.js'
|
||||
import {
|
||||
addProviderProfile,
|
||||
applyActiveProviderProfileFromConfig,
|
||||
@@ -16,10 +15,6 @@ import {
|
||||
type ProviderProfileInput,
|
||||
updateProviderProfile,
|
||||
} from '../utils/providerProfiles.js'
|
||||
import {
|
||||
rankOllamaModels,
|
||||
recommendOllamaModel,
|
||||
} from '../utils/providerRecommendation.js'
|
||||
import {
|
||||
clearGithubModelsToken,
|
||||
GITHUB_MODELS_HYDRATED_ENV_MARKER,
|
||||
@@ -29,7 +24,7 @@ import {
|
||||
} from '../utils/githubModelsCredentials.js'
|
||||
import { isEnvTruthy } from '../utils/envUtils.js'
|
||||
import { updateSettingsForSource } from '../utils/settings/settings.js'
|
||||
import { type OptionWithDescription, Select } from './CustomSelect/index.js'
|
||||
import { Select } from './CustomSelect/index.js'
|
||||
import { Pane } from './design-system/Pane.js'
|
||||
import TextInput from './TextInput.js'
|
||||
|
||||
@@ -47,7 +42,6 @@ type Props = {
|
||||
type Screen =
|
||||
| 'menu'
|
||||
| 'select-preset'
|
||||
| 'select-ollama-model'
|
||||
| 'form'
|
||||
| 'select-active'
|
||||
| 'select-edit'
|
||||
@@ -57,16 +51,6 @@ type DraftField = 'name' | 'baseUrl' | 'model' | 'apiKey'
|
||||
|
||||
type ProviderDraft = Record<DraftField, string>
|
||||
|
||||
type OllamaSelectionState =
|
||||
| { state: 'idle' }
|
||||
| { state: 'loading' }
|
||||
| {
|
||||
state: 'ready'
|
||||
options: OptionWithDescription<string>[]
|
||||
defaultValue?: string
|
||||
}
|
||||
| { state: 'unavailable'; message: string }
|
||||
|
||||
const FORM_STEPS: Array<{
|
||||
key: DraftField
|
||||
label: string
|
||||
@@ -226,9 +210,6 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
const [cursorOffset, setCursorOffset] = React.useState(0)
|
||||
const [statusMessage, setStatusMessage] = React.useState<string | undefined>()
|
||||
const [errorMessage, setErrorMessage] = React.useState<string | undefined>()
|
||||
const [ollamaSelection, setOllamaSelection] = React.useState<OllamaSelectionState>({
|
||||
state: 'idle',
|
||||
})
|
||||
|
||||
const currentStep = FORM_STEPS[formStepIndex] ?? FORM_STEPS[0]
|
||||
const currentStepKey = currentStep.key
|
||||
@@ -383,59 +364,6 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
return null
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
if (screen !== 'select-ollama-model') {
|
||||
return
|
||||
}
|
||||
|
||||
let cancelled = false
|
||||
setOllamaSelection({ state: 'loading' })
|
||||
|
||||
void (async () => {
|
||||
const available = await hasLocalOllama(draft.baseUrl)
|
||||
if (!available) {
|
||||
if (!cancelled) {
|
||||
setOllamaSelection({
|
||||
state: 'unavailable',
|
||||
message:
|
||||
'Could not reach Ollama. Start Ollama first, or enter the endpoint manually.',
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const models = await listOllamaModels(draft.baseUrl)
|
||||
if (models.length === 0) {
|
||||
if (!cancelled) {
|
||||
setOllamaSelection({
|
||||
state: 'unavailable',
|
||||
message:
|
||||
'Ollama is running, but no installed models were found. Pull a chat model such as qwen2.5-coder:7b or llama3.1:8b first, or enter details manually.',
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const ranked = rankOllamaModels(models, 'balanced')
|
||||
const recommended = recommendOllamaModel(models, 'balanced')
|
||||
if (!cancelled) {
|
||||
setOllamaSelection({
|
||||
state: 'ready',
|
||||
defaultValue: recommended?.name ?? ranked[0]?.name,
|
||||
options: ranked.map(model => ({
|
||||
label: model.name,
|
||||
value: model.name,
|
||||
description: model.summary,
|
||||
})),
|
||||
})
|
||||
}
|
||||
})()
|
||||
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [draft.baseUrl, screen])
|
||||
|
||||
function startCreateFromPreset(preset: ProviderPreset): void {
|
||||
const defaults = getProviderPresetDefaults(preset)
|
||||
const nextDraft = {
|
||||
@@ -450,13 +378,6 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
setFormStepIndex(0)
|
||||
setCursorOffset(nextDraft.name.length)
|
||||
setErrorMessage(undefined)
|
||||
|
||||
if (preset === 'ollama') {
|
||||
setOllamaSelection({ state: 'loading' })
|
||||
setScreen('select-ollama-model')
|
||||
return
|
||||
}
|
||||
|
||||
setScreen('form')
|
||||
}
|
||||
|
||||
@@ -476,13 +397,13 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
setScreen('form')
|
||||
}
|
||||
|
||||
function persistDraft(nextDraft: ProviderDraft = draft): void {
|
||||
function persistDraft(): void {
|
||||
const payload: ProviderProfileInput = {
|
||||
provider: draftProvider,
|
||||
name: nextDraft.name,
|
||||
baseUrl: nextDraft.baseUrl,
|
||||
model: nextDraft.model,
|
||||
apiKey: nextDraft.apiKey,
|
||||
name: draft.name,
|
||||
baseUrl: draft.baseUrl,
|
||||
model: draft.model,
|
||||
apiKey: draft.apiKey,
|
||||
}
|
||||
|
||||
const saved = editingProfileId
|
||||
@@ -525,83 +446,6 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
setScreen('menu')
|
||||
}
|
||||
|
||||
function renderOllamaSelection(): React.ReactNode {
|
||||
if (ollamaSelection.state === 'loading' || ollamaSelection.state === 'idle') {
|
||||
return (
|
||||
<Box flexDirection="column" gap={1}>
|
||||
<Text color="remember" bold>
|
||||
Checking Ollama
|
||||
</Text>
|
||||
<Text dimColor>Looking for installed Ollama models...</Text>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
|
||||
if (ollamaSelection.state === 'unavailable') {
|
||||
return (
|
||||
<Box flexDirection="column" gap={1}>
|
||||
<Text color="remember" bold>
|
||||
Ollama setup
|
||||
</Text>
|
||||
<Text dimColor>{ollamaSelection.message}</Text>
|
||||
<Select
|
||||
options={[
|
||||
{
|
||||
value: 'manual',
|
||||
label: 'Enter manually',
|
||||
description: 'Fill in the base URL and model yourself',
|
||||
},
|
||||
{
|
||||
value: 'back',
|
||||
label: 'Back',
|
||||
description: 'Choose another provider preset',
|
||||
},
|
||||
]}
|
||||
onChange={value => {
|
||||
if (value === 'manual') {
|
||||
setFormStepIndex(0)
|
||||
setCursorOffset(draft.name.length)
|
||||
setScreen('form')
|
||||
return
|
||||
}
|
||||
setScreen('select-preset')
|
||||
}}
|
||||
onCancel={() => setScreen('select-preset')}
|
||||
visibleOptionCount={2}
|
||||
/>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Box flexDirection="column" gap={1}>
|
||||
<Text color="remember" bold>
|
||||
Choose an Ollama model
|
||||
</Text>
|
||||
<Text dimColor>
|
||||
Pick one of the installed Ollama models to save into a local provider
|
||||
profile.
|
||||
</Text>
|
||||
<Select
|
||||
options={ollamaSelection.options}
|
||||
defaultValue={ollamaSelection.defaultValue}
|
||||
defaultFocusValue={ollamaSelection.defaultValue}
|
||||
inlineDescriptions
|
||||
visibleOptionCount={Math.min(8, ollamaSelection.options.length)}
|
||||
onChange={value => {
|
||||
const nextDraft = {
|
||||
...draft,
|
||||
model: value,
|
||||
}
|
||||
setDraft(nextDraft)
|
||||
persistDraft(nextDraft)
|
||||
}}
|
||||
onCancel={() => setScreen('select-preset')}
|
||||
/>
|
||||
</Box>
|
||||
)
|
||||
}
|
||||
|
||||
function handleFormSubmit(value: string): void {
|
||||
const trimmed = value.trim()
|
||||
|
||||
@@ -626,7 +470,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
return
|
||||
}
|
||||
|
||||
persistDraft(nextDraft)
|
||||
persistDraft()
|
||||
}
|
||||
|
||||
function handleBackFromForm(): void {
|
||||
@@ -975,16 +819,13 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
|
||||
let content: React.ReactNode
|
||||
|
||||
switch (screen) {
|
||||
case 'select-preset':
|
||||
content = renderPresetSelection()
|
||||
break
|
||||
case 'select-ollama-model':
|
||||
content = renderOllamaSelection()
|
||||
break
|
||||
case 'form':
|
||||
content = renderForm()
|
||||
break
|
||||
switch (screen) {
|
||||
case 'select-preset':
|
||||
content = renderPresetSelection()
|
||||
break
|
||||
case 'form':
|
||||
content = renderForm()
|
||||
break
|
||||
case 'select-active':
|
||||
content = renderProfileSelection(
|
||||
'Set active provider',
|
||||
|
||||
@@ -7,8 +7,6 @@
|
||||
|
||||
import { isLocalProviderUrl } from '../services/api/providerConfig.js'
|
||||
import { getLocalOpenAICompatibleProviderLabel } from '../utils/providerDiscovery.js'
|
||||
import { getSettings_DEPRECATED } from '../utils/settings/settings.js'
|
||||
import { parseUserSpecifiedModel } from '../utils/model/model.js'
|
||||
|
||||
declare const MACRO: { VERSION: string; DISPLAY_VERSION?: string }
|
||||
|
||||
@@ -87,7 +85,6 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
|
||||
const useGemini = process.env.CLAUDE_CODE_USE_GEMINI === '1' || process.env.CLAUDE_CODE_USE_GEMINI === 'true'
|
||||
const useGithub = process.env.CLAUDE_CODE_USE_GITHUB === '1' || process.env.CLAUDE_CODE_USE_GITHUB === 'true'
|
||||
const useOpenAI = process.env.CLAUDE_CODE_USE_OPENAI === '1' || process.env.CLAUDE_CODE_USE_OPENAI === 'true'
|
||||
const useMistral = process.env.CLAUDE_CODE_USE_MISTRAL === '1' || process.env.CLAUDE_CODE_USE_MISTRAL === 'true'
|
||||
|
||||
if (useGemini) {
|
||||
const model = process.env.GEMINI_MODEL || 'gemini-2.0-flash'
|
||||
@@ -95,12 +92,6 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
|
||||
return { name: 'Google Gemini', model, baseUrl, isLocal: false }
|
||||
}
|
||||
|
||||
if (useMistral) {
|
||||
const model = process.env.MISTRAL_MODEL || 'devstral-latest'
|
||||
const baseUrl = process.env.MISTRAL_BASE_URL || 'https://api.mistral.ai/v1'
|
||||
return { name: 'Mistral', model, baseUrl, isLocal: false }
|
||||
}
|
||||
|
||||
if (useGithub) {
|
||||
const model = process.env.OPENAI_MODEL || 'github:copilot'
|
||||
const baseUrl =
|
||||
@@ -148,11 +139,9 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
|
||||
return { name, model: displayModel, baseUrl, isLocal }
|
||||
}
|
||||
|
||||
// Default: Anthropic - check settings.model first, then env vars
|
||||
const settings = getSettings_DEPRECATED() || {}
|
||||
const modelSetting = settings.model || process.env.ANTHROPIC_MODEL || process.env.CLAUDE_MODEL || 'claude-sonnet-4-6'
|
||||
const resolvedModel = parseUserSpecifiedModel(modelSetting)
|
||||
return { name: 'Anthropic', model: resolvedModel, baseUrl: 'https://api.anthropic.com', isLocal: false }
|
||||
// Default: Anthropic
|
||||
const model = process.env.ANTHROPIC_MODEL || process.env.CLAUDE_MODEL || 'claude-sonnet-4-6'
|
||||
return { name: 'Anthropic', model, baseUrl: 'https://api.anthropic.com', isLocal: false }
|
||||
}
|
||||
|
||||
// ─── Box drawing ──────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -1,231 +0,0 @@
|
||||
import { PassThrough } from 'node:stream'
|
||||
|
||||
import { expect, test } from 'bun:test'
|
||||
import React from 'react'
|
||||
import stripAnsi from 'strip-ansi'
|
||||
|
||||
import { createRoot } from '../ink.js'
|
||||
import { AppStateProvider } from '../state/AppState.js'
|
||||
import TextInput from './TextInput.js'
|
||||
import VimTextInput from './VimTextInput.js'
|
||||
|
||||
const SYNC_START = '\x1B[?2026h'
|
||||
const SYNC_END = '\x1B[?2026l'
|
||||
|
||||
function extractLastFrame(output: string): string {
|
||||
let lastFrame: string | null = null
|
||||
let cursor = 0
|
||||
|
||||
while (cursor < output.length) {
|
||||
const start = output.indexOf(SYNC_START, cursor)
|
||||
if (start === -1) {
|
||||
break
|
||||
}
|
||||
|
||||
const contentStart = start + SYNC_START.length
|
||||
const end = output.indexOf(SYNC_END, contentStart)
|
||||
if (end === -1) {
|
||||
break
|
||||
}
|
||||
|
||||
const frame = output.slice(contentStart, end)
|
||||
if (frame.trim().length > 0) {
|
||||
lastFrame = frame
|
||||
}
|
||||
cursor = end + SYNC_END.length
|
||||
}
|
||||
|
||||
return lastFrame ?? output
|
||||
}
|
||||
|
||||
function createTestStreams(): {
|
||||
stdout: PassThrough
|
||||
stdin: PassThrough & {
|
||||
isTTY: boolean
|
||||
setRawMode: (mode: boolean) => void
|
||||
ref: () => void
|
||||
unref: () => void
|
||||
}
|
||||
getOutput: () => string
|
||||
} {
|
||||
let output = ''
|
||||
const stdout = new PassThrough()
|
||||
const stdin = new PassThrough() as PassThrough & {
|
||||
isTTY: boolean
|
||||
setRawMode: (mode: boolean) => void
|
||||
ref: () => void
|
||||
unref: () => void
|
||||
}
|
||||
|
||||
stdin.isTTY = true
|
||||
stdin.setRawMode = () => {}
|
||||
stdin.ref = () => {}
|
||||
stdin.unref = () => {}
|
||||
;(stdout as unknown as { columns: number }).columns = 120
|
||||
stdout.on('data', chunk => {
|
||||
output += chunk.toString()
|
||||
})
|
||||
|
||||
return {
|
||||
stdout,
|
||||
stdin,
|
||||
getOutput: () => output,
|
||||
}
|
||||
}
|
||||
|
||||
function DelayedControlledTextInput(): React.ReactNode {
|
||||
const [value, setValue] = React.useState('')
|
||||
const [cursorOffset, setCursorOffset] = React.useState(0)
|
||||
const valueTimerRef = React.useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const offsetTimerRef = React.useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
|
||||
React.useEffect(() => {
|
||||
return () => {
|
||||
if (valueTimerRef.current) {
|
||||
clearTimeout(valueTimerRef.current)
|
||||
}
|
||||
if (offsetTimerRef.current) {
|
||||
clearTimeout(offsetTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<AppStateProvider>
|
||||
<TextInput
|
||||
value={value}
|
||||
onChange={nextValue => {
|
||||
if (valueTimerRef.current) {
|
||||
clearTimeout(valueTimerRef.current)
|
||||
}
|
||||
valueTimerRef.current = setTimeout(() => {
|
||||
setValue(nextValue)
|
||||
}, 200)
|
||||
}}
|
||||
onSubmit={() => {}}
|
||||
placeholder="Type here..."
|
||||
columns={60}
|
||||
cursorOffset={cursorOffset}
|
||||
onChangeCursorOffset={nextOffset => {
|
||||
if (offsetTimerRef.current) {
|
||||
clearTimeout(offsetTimerRef.current)
|
||||
}
|
||||
offsetTimerRef.current = setTimeout(() => {
|
||||
setCursorOffset(nextOffset)
|
||||
}, 200)
|
||||
}}
|
||||
focus
|
||||
showCursor
|
||||
multiline
|
||||
/>
|
||||
</AppStateProvider>
|
||||
)
|
||||
}
|
||||
|
||||
function DelayedControlledVimTextInput(): React.ReactNode {
|
||||
const [value, setValue] = React.useState('')
|
||||
const [cursorOffset, setCursorOffset] = React.useState(0)
|
||||
const valueTimerRef = React.useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const offsetTimerRef = React.useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
|
||||
React.useEffect(() => {
|
||||
return () => {
|
||||
if (valueTimerRef.current) {
|
||||
clearTimeout(valueTimerRef.current)
|
||||
}
|
||||
if (offsetTimerRef.current) {
|
||||
clearTimeout(offsetTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<AppStateProvider>
|
||||
<VimTextInput
|
||||
value={value}
|
||||
onChange={nextValue => {
|
||||
if (valueTimerRef.current) {
|
||||
clearTimeout(valueTimerRef.current)
|
||||
}
|
||||
valueTimerRef.current = setTimeout(() => {
|
||||
setValue(nextValue)
|
||||
}, 200)
|
||||
}}
|
||||
onSubmit={() => {}}
|
||||
placeholder="Type here..."
|
||||
columns={60}
|
||||
cursorOffset={cursorOffset}
|
||||
onChangeCursorOffset={nextOffset => {
|
||||
if (offsetTimerRef.current) {
|
||||
clearTimeout(offsetTimerRef.current)
|
||||
}
|
||||
offsetTimerRef.current = setTimeout(() => {
|
||||
setCursorOffset(nextOffset)
|
||||
}, 200)
|
||||
}}
|
||||
initialMode="INSERT"
|
||||
focus
|
||||
showCursor
|
||||
multiline
|
||||
/>
|
||||
</AppStateProvider>
|
||||
)
|
||||
}
|
||||
|
||||
test('TextInput renders typed characters before delayed parent value commits', async () => {
|
||||
const { stdout, stdin, getOutput } = createTestStreams()
|
||||
const root = await createRoot({
|
||||
stdout: stdout as unknown as NodeJS.WriteStream,
|
||||
stdin: stdin as unknown as NodeJS.ReadStream,
|
||||
patchConsole: false,
|
||||
})
|
||||
|
||||
root.render(<DelayedControlledTextInput />)
|
||||
|
||||
await Bun.sleep(50)
|
||||
stdin.write('a')
|
||||
await Bun.sleep(25)
|
||||
stdin.write('b')
|
||||
await Bun.sleep(25)
|
||||
|
||||
const output = stripAnsi(extractLastFrame(getOutput()))
|
||||
|
||||
root.unmount()
|
||||
stdin.end()
|
||||
stdout.end()
|
||||
await Bun.sleep(25)
|
||||
|
||||
expect(output).toContain('ab')
|
||||
expect(output).not.toContain('Type here...')
|
||||
})
|
||||
|
||||
test('VimTextInput preserves rapid typed characters before delayed parent value commits', async () => {
|
||||
const { stdout, stdin, getOutput } = createTestStreams()
|
||||
const root = await createRoot({
|
||||
stdout: stdout as unknown as NodeJS.WriteStream,
|
||||
stdin: stdin as unknown as NodeJS.ReadStream,
|
||||
patchConsole: false,
|
||||
})
|
||||
|
||||
root.render(<DelayedControlledVimTextInput />)
|
||||
|
||||
await Bun.sleep(50)
|
||||
stdin.write('a')
|
||||
await Bun.sleep(25)
|
||||
stdin.write('s')
|
||||
await Bun.sleep(25)
|
||||
stdin.write('d')
|
||||
await Bun.sleep(25)
|
||||
stdin.write('f')
|
||||
await Bun.sleep(25)
|
||||
|
||||
const output = stripAnsi(extractLastFrame(getOutput()))
|
||||
|
||||
root.unmount()
|
||||
stdin.end()
|
||||
stdout.end()
|
||||
await Bun.sleep(25)
|
||||
|
||||
expect(output).toContain('asdf')
|
||||
expect(output).not.toContain('Type here...')
|
||||
})
|
||||
@@ -1,161 +1,113 @@
|
||||
import { PassThrough } from 'node:stream'
|
||||
import { describe, expect, it, mock } from 'bun:test'
|
||||
|
||||
import { afterEach, expect, mock, test } from 'bun:test'
|
||||
import React from 'react'
|
||||
import stripAnsi from 'strip-ansi'
|
||||
// We can't fully render ThemePicker due to complex dependencies
|
||||
// But we can test the theme options generation logic
|
||||
describe('ThemePicker', () => {
|
||||
describe('theme options', () => {
|
||||
it('generates correct theme options without AUTO_THEME feature flag', () => {
|
||||
// Since we can't easily mock bun:bundle, test the options structure
|
||||
// The real test would require integration testing
|
||||
const expectedOptions = [
|
||||
{ label: "Dark mode", value: "dark" },
|
||||
{ label: "Light mode", value: "light" },
|
||||
{ label: "Dark mode (colorblind-friendly)", value: "dark-daltonized" },
|
||||
{ label: "Light mode (colorblind-friendly)", value: "light-daltonized" },
|
||||
{ label: "Dark mode (ANSI colors only)", value: "dark-ansi" },
|
||||
{ label: "Light mode (ANSI colors only)", value: "light-ansi" },
|
||||
]
|
||||
expect(expectedOptions.length).toBe(6)
|
||||
})
|
||||
|
||||
import { createRoot, Text, useTheme } from '../ink.js'
|
||||
import { KeybindingSetup } from '../keybindings/KeybindingProviderSetup.js'
|
||||
import { AppStateProvider } from '../state/AppState.js'
|
||||
import { ThemeProvider } from './design-system/ThemeProvider.js'
|
||||
|
||||
mock.module('./StructuredDiff.js', () => ({
|
||||
StructuredDiff: function StructuredDiffPreview(): React.ReactNode {
|
||||
const [theme] = useTheme()
|
||||
return <Text>{`Preview theme: ${theme}`}</Text>
|
||||
},
|
||||
}))
|
||||
|
||||
mock.module('./StructuredDiff/colorDiff.js', () => ({
|
||||
getColorModuleUnavailableReason: () => 'env',
|
||||
getSyntaxTheme: () => null,
|
||||
}))
|
||||
|
||||
const SYNC_START = '\x1B[?2026h'
|
||||
const SYNC_END = '\x1B[?2026l'
|
||||
|
||||
function extractLastFrame(output: string): string {
|
||||
let lastFrame: string | null = null
|
||||
let cursor = 0
|
||||
|
||||
while (cursor < output.length) {
|
||||
const start = output.indexOf(SYNC_START, cursor)
|
||||
if (start === -1) {
|
||||
break
|
||||
}
|
||||
|
||||
const contentStart = start + SYNC_START.length
|
||||
const end = output.indexOf(SYNC_END, contentStart)
|
||||
if (end === -1) {
|
||||
break
|
||||
}
|
||||
|
||||
const frame = output.slice(contentStart, end)
|
||||
if (frame.trim().length > 0) {
|
||||
lastFrame = frame
|
||||
}
|
||||
cursor = end + SYNC_END.length
|
||||
}
|
||||
|
||||
return lastFrame ?? output
|
||||
}
|
||||
|
||||
function createTestStreams(): {
|
||||
stdout: PassThrough
|
||||
stdin: PassThrough & {
|
||||
isTTY: boolean
|
||||
setRawMode: (mode: boolean) => void
|
||||
ref: () => void
|
||||
unref: () => void
|
||||
}
|
||||
getOutput: () => string
|
||||
} {
|
||||
let output = ''
|
||||
const stdout = new PassThrough()
|
||||
const stdin = new PassThrough() as PassThrough & {
|
||||
isTTY: boolean
|
||||
setRawMode: (mode: boolean) => void
|
||||
ref: () => void
|
||||
unref: () => void
|
||||
}
|
||||
|
||||
stdin.isTTY = true
|
||||
stdin.setRawMode = () => {}
|
||||
stdin.ref = () => {}
|
||||
stdin.unref = () => {}
|
||||
;(stdout as unknown as { columns: number }).columns = 120
|
||||
stdout.on('data', chunk => {
|
||||
output += chunk.toString()
|
||||
it('includes auto theme when AUTO_THEME feature is enabled', () => {
|
||||
// Test the structure when auto is present
|
||||
const optionsWithAuto = [
|
||||
{ label: "Auto (match terminal)", value: "auto" },
|
||||
{ label: "Dark mode", value: "dark" },
|
||||
]
|
||||
expect(optionsWithAuto[0].value).toBe('auto')
|
||||
})
|
||||
})
|
||||
|
||||
return {
|
||||
stdout,
|
||||
stdin,
|
||||
getOutput: () => output,
|
||||
}
|
||||
}
|
||||
describe('handleRowFocus callback', () => {
|
||||
it('setPreviewTheme is called with theme setting', () => {
|
||||
const setPreviewTheme = mock()
|
||||
const handleRowFocus = (setting: string) => setPreviewTheme(setting)
|
||||
|
||||
async function waitForCondition(
|
||||
predicate: () => boolean,
|
||||
timeoutMs = 2000,
|
||||
): Promise<void> {
|
||||
const startedAt = Date.now()
|
||||
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
if (predicate()) {
|
||||
return
|
||||
}
|
||||
await Bun.sleep(10)
|
||||
}
|
||||
|
||||
throw new Error('Timed out waiting for ThemePicker test condition')
|
||||
}
|
||||
|
||||
async function waitForFrame(
|
||||
getOutput: () => string,
|
||||
predicate: (frame: string) => boolean,
|
||||
): Promise<string> {
|
||||
let frame = ''
|
||||
|
||||
await waitForCondition(() => {
|
||||
frame = stripAnsi(extractLastFrame(getOutput()))
|
||||
return predicate(frame)
|
||||
handleRowFocus('dark')
|
||||
expect(setPreviewTheme).toHaveBeenCalledWith('dark')
|
||||
})
|
||||
})
|
||||
|
||||
return frame
|
||||
}
|
||||
describe('handleSelect callback', () => {
|
||||
it('calls savePreview and onThemeSelect', () => {
|
||||
const savePreview = mock()
|
||||
const onThemeSelect = mock()
|
||||
const handleSelect = (setting: string) => {
|
||||
savePreview()
|
||||
onThemeSelect(setting)
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
test('updates the preview when keyboard focus moves to another theme', async () => {
|
||||
const { ThemePicker } = await import('./ThemePicker.js')
|
||||
const { stdout, stdin, getOutput } = createTestStreams()
|
||||
const root = await createRoot({
|
||||
stdout: stdout as unknown as NodeJS.WriteStream,
|
||||
stdin: stdin as unknown as NodeJS.ReadStream,
|
||||
patchConsole: false,
|
||||
})
|
||||
|
||||
root.render(
|
||||
<AppStateProvider>
|
||||
<KeybindingSetup>
|
||||
<ThemeProvider initialState="dark">
|
||||
<ThemePicker onThemeSelect={() => {}} />
|
||||
</ThemeProvider>
|
||||
</KeybindingSetup>
|
||||
</AppStateProvider>,
|
||||
)
|
||||
|
||||
try {
|
||||
const initialFrame = await waitForFrame(
|
||||
getOutput,
|
||||
frame => frame.includes('Preview theme: dark'),
|
||||
)
|
||||
expect(initialFrame).toContain('Preview theme: dark')
|
||||
|
||||
stdin.write('j')
|
||||
|
||||
const updatedFrame = await waitForFrame(
|
||||
getOutput,
|
||||
frame => frame.includes('Preview theme: light'),
|
||||
)
|
||||
expect(updatedFrame).toContain('Preview theme: light')
|
||||
} finally {
|
||||
root.unmount()
|
||||
stdin.end()
|
||||
stdout.end()
|
||||
await Bun.sleep(0)
|
||||
}
|
||||
handleSelect('light')
|
||||
expect(savePreview).toHaveBeenCalled()
|
||||
expect(onThemeSelect).toHaveBeenCalledWith('light')
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleCancel callback', () => {
|
||||
it('calls cancelPreview and gracefulShutdown when not skipExitHandling', () => {
|
||||
const cancelPreview = mock()
|
||||
const gracefulShutdown = mock()
|
||||
const handleCancel = (skipExitHandling: boolean, onCancelProp?: () => void) => {
|
||||
cancelPreview()
|
||||
if (skipExitHandling) {
|
||||
onCancelProp?.()
|
||||
} else {
|
||||
gracefulShutdown(0)
|
||||
}
|
||||
}
|
||||
|
||||
handleCancel(false)
|
||||
expect(cancelPreview).toHaveBeenCalled()
|
||||
expect(gracefulShutdown).toHaveBeenCalledWith(0)
|
||||
})
|
||||
|
||||
it('calls onCancelProp when skipExitHandling is true', () => {
|
||||
const cancelPreview = mock()
|
||||
const onCancelProp = mock()
|
||||
const handleCancel = (skipExitHandling: boolean, onCancelProp?: () => void) => {
|
||||
cancelPreview()
|
||||
if (skipExitHandling) {
|
||||
onCancelProp?.()
|
||||
}
|
||||
}
|
||||
|
||||
handleCancel(true, onCancelProp)
|
||||
expect(cancelPreview).toHaveBeenCalled()
|
||||
expect(onCancelProp).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('syntax hint logic', () => {
|
||||
it('shows disabled hint when syntax highlighting is disabled', () => {
|
||||
const syntaxHighlightingDisabled = true
|
||||
const syntaxToggleShortcut = 'Ctrl+T'
|
||||
|
||||
const hint = syntaxHighlightingDisabled
|
||||
? `Syntax highlighting disabled (${syntaxToggleShortcut} to enable)`
|
||||
: `Syntax highlighting enabled (${syntaxToggleShortcut} to disable)`
|
||||
|
||||
expect(hint).toContain('disabled')
|
||||
})
|
||||
|
||||
it('shows enabled hint when syntax highlighting is active', () => {
|
||||
const syntaxHighlightingDisabled = false
|
||||
const syntaxToggleShortcut = 'Ctrl+T'
|
||||
|
||||
const hint = !syntaxHighlightingDisabled
|
||||
? `Syntax highlighting enabled (${syntaxToggleShortcut} to disable)`
|
||||
: `Syntax highlighting disabled (${syntaxToggleShortcut} to enable)`
|
||||
|
||||
expect(hint).toContain('enabled')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { c as _c } from "react-compiler-runtime";
|
||||
import { feature } from 'bun:bundle';
|
||||
import React, { createContext, useContext, useEffect, useMemo, useState } from 'react';
|
||||
import useStdin from '../../ink/hooks/use-stdin.js';
|
||||
@@ -119,8 +120,21 @@ export function ThemeProvider({
|
||||
* accepts any ThemeSetting (including 'auto').
|
||||
*/
|
||||
export function useTheme() {
|
||||
const { currentTheme, setThemeSetting } = useContext(ThemeContext);
|
||||
return [currentTheme, setThemeSetting] as const;
|
||||
const $ = _c(3);
|
||||
const {
|
||||
currentTheme,
|
||||
setThemeSetting
|
||||
} = useContext(ThemeContext);
|
||||
let t0;
|
||||
if ($[0] !== currentTheme || $[1] !== setThemeSetting) {
|
||||
t0 = [currentTheme, setThemeSetting];
|
||||
$[0] = currentTheme;
|
||||
$[1] = setThemeSetting;
|
||||
$[2] = t0;
|
||||
} else {
|
||||
t0 = $[2];
|
||||
}
|
||||
return t0;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -131,10 +145,25 @@ export function useThemeSetting() {
|
||||
return useContext(ThemeContext).themeSetting;
|
||||
}
|
||||
export function usePreviewTheme() {
|
||||
const { setPreviewTheme, savePreview, cancelPreview } = useContext(ThemeContext);
|
||||
return {
|
||||
const $ = _c(4);
|
||||
const {
|
||||
setPreviewTheme,
|
||||
savePreview,
|
||||
cancelPreview,
|
||||
};
|
||||
cancelPreview
|
||||
} = useContext(ThemeContext);
|
||||
let t0;
|
||||
if ($[0] !== cancelPreview || $[1] !== savePreview || $[2] !== setPreviewTheme) {
|
||||
t0 = {
|
||||
setPreviewTheme,
|
||||
savePreview,
|
||||
cancelPreview
|
||||
};
|
||||
$[0] = cancelPreview;
|
||||
$[1] = savePreview;
|
||||
$[2] = setPreviewTheme;
|
||||
$[3] = t0;
|
||||
} else {
|
||||
t0 = $[3];
|
||||
}
|
||||
return t0;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ export function optionForPermissionSaveDestination(saveDestination: EditableSett
|
||||
case 'userSettings':
|
||||
return {
|
||||
label: 'User settings',
|
||||
description: `Saved in ~/.openclaude/settings.json`,
|
||||
description: `Saved in at ~/.claude/settings.json`,
|
||||
value: saveDestination
|
||||
};
|
||||
}
|
||||
|
||||
@@ -33,14 +33,14 @@ export const IMAGE_TARGET_RAW_SIZE = (API_IMAGE_MAX_BASE64_SIZE * 3) / 4 // 3.75
|
||||
*
|
||||
* Note: The API internally resizes images larger than 1568px (source:
|
||||
* encoding/full_encoding.py), but this is handled server-side and doesn't
|
||||
* cause errors. These client-side limits (1568px) are slightly larger to
|
||||
* cause errors. These client-side limits (2000px) are slightly larger to
|
||||
* preserve quality when beneficial.
|
||||
*
|
||||
* The API_IMAGE_MAX_BASE64_SIZE (5MB) is the actual hard limit that causes
|
||||
* API errors if exceeded.
|
||||
*/
|
||||
export const IMAGE_MAX_WIDTH = 1568
|
||||
export const IMAGE_MAX_HEIGHT = 1568
|
||||
export const IMAGE_MAX_WIDTH = 2000
|
||||
export const IMAGE_MAX_HEIGHT = 2000
|
||||
|
||||
// =============================================================================
|
||||
// PDF LIMITS
|
||||
|
||||
@@ -2,11 +2,8 @@ import { afterEach, expect, test } from 'bun:test'
|
||||
|
||||
import { getSystemPrompt, DEFAULT_AGENT_PROMPT } from './prompts.js'
|
||||
import { CLI_SYSPROMPT_PREFIXES, getCLISyspromptPrefix } from './system.js'
|
||||
import { CLAUDE_CODE_GUIDE_AGENT } from '../tools/AgentTool/built-in/claudeCodeGuideAgent.js'
|
||||
import { GENERAL_PURPOSE_AGENT } from '../tools/AgentTool/built-in/generalPurposeAgent.js'
|
||||
import { EXPLORE_AGENT } from '../tools/AgentTool/built-in/exploreAgent.js'
|
||||
import { PLAN_AGENT } from '../tools/AgentTool/built-in/planAgent.js'
|
||||
import { STATUSLINE_SETUP_AGENT } from '../tools/AgentTool/built-in/statuslineSetup.js'
|
||||
|
||||
const originalSimpleEnv = process.env.CLAUDE_CODE_SIMPLE
|
||||
|
||||
@@ -16,12 +13,10 @@ afterEach(() => {
|
||||
|
||||
test('CLI identity prefixes describe OpenClaude instead of Claude Code', () => {
|
||||
expect(getCLISyspromptPrefix()).toContain('OpenClaude')
|
||||
expect(getCLISyspromptPrefix()).not.toContain('Claude Code')
|
||||
expect(getCLISyspromptPrefix()).not.toContain("Anthropic's official CLI for Claude")
|
||||
|
||||
for (const prefix of CLI_SYSPROMPT_PREFIXES) {
|
||||
expect(prefix).toContain('OpenClaude')
|
||||
expect(prefix).not.toContain('Claude Code')
|
||||
expect(prefix).not.toContain("Anthropic's official CLI for Claude")
|
||||
}
|
||||
})
|
||||
@@ -32,53 +27,22 @@ test('simple mode identity describes OpenClaude instead of Claude Code', async (
|
||||
const prompt = await getSystemPrompt([], 'gpt-4o')
|
||||
|
||||
expect(prompt[0]).toContain('OpenClaude')
|
||||
expect(prompt[0]).not.toContain('Claude Code')
|
||||
expect(prompt[0]).not.toContain("Anthropic's official CLI for Claude")
|
||||
})
|
||||
|
||||
test('built-in agent prompts describe OpenClaude instead of Claude Code', () => {
|
||||
expect(DEFAULT_AGENT_PROMPT).toContain('OpenClaude')
|
||||
expect(DEFAULT_AGENT_PROMPT).not.toContain('Claude Code')
|
||||
expect(DEFAULT_AGENT_PROMPT).not.toContain("Anthropic's official CLI for Claude")
|
||||
|
||||
const generalPrompt = GENERAL_PURPOSE_AGENT.getSystemPrompt({
|
||||
toolUseContext: { options: {} as never },
|
||||
})
|
||||
expect(generalPrompt).toContain('OpenClaude')
|
||||
expect(generalPrompt).not.toContain('Claude Code')
|
||||
expect(generalPrompt).not.toContain("Anthropic's official CLI for Claude")
|
||||
|
||||
const explorePrompt = EXPLORE_AGENT.getSystemPrompt({
|
||||
toolUseContext: { options: {} as never },
|
||||
})
|
||||
expect(explorePrompt).toContain('OpenClaude')
|
||||
expect(explorePrompt).not.toContain('Claude Code')
|
||||
expect(explorePrompt).not.toContain("Anthropic's official CLI for Claude")
|
||||
|
||||
const planPrompt = PLAN_AGENT.getSystemPrompt({
|
||||
toolUseContext: { options: {} as never },
|
||||
})
|
||||
expect(planPrompt).toContain('OpenClaude')
|
||||
expect(planPrompt).not.toContain('Claude Code')
|
||||
|
||||
const statuslinePrompt = STATUSLINE_SETUP_AGENT.getSystemPrompt({
|
||||
toolUseContext: { options: {} as never },
|
||||
})
|
||||
expect(statuslinePrompt).toContain('OpenClaude')
|
||||
expect(statuslinePrompt).not.toContain('Claude Code')
|
||||
|
||||
const guidePrompt = CLAUDE_CODE_GUIDE_AGENT.getSystemPrompt({
|
||||
toolUseContext: {
|
||||
options: {
|
||||
commands: [],
|
||||
agentDefinitions: { activeAgents: [] },
|
||||
mcpClients: [],
|
||||
} as never,
|
||||
},
|
||||
})
|
||||
expect(guidePrompt).toContain('OpenClaude')
|
||||
expect(guidePrompt).toContain('You are the OpenClaude guide agent.')
|
||||
expect(guidePrompt).toContain('**OpenClaude** (the CLI tool)')
|
||||
expect(guidePrompt).not.toContain('You are the Claude guide agent.')
|
||||
expect(guidePrompt).not.toContain('**Claude Code** (the CLI tool)')
|
||||
})
|
||||
|
||||
@@ -214,7 +214,7 @@ function getSimpleDoingTasksSection(): string {
|
||||
]
|
||||
|
||||
const userHelpSubitems = [
|
||||
`/help: Get help with using OpenClaude`,
|
||||
`/help: Get help with using Claude Code`,
|
||||
`To give feedback, users should ${MACRO.ISSUES_EXPLAINER}`,
|
||||
]
|
||||
|
||||
@@ -242,7 +242,7 @@ function getSimpleDoingTasksSection(): string {
|
||||
: []),
|
||||
...(process.env.USER_TYPE === 'ant'
|
||||
? [
|
||||
`If the user reports a bug, slowness, or unexpected behavior with OpenClaude itself (as opposed to asking you to fix their own code), recommend the appropriate slash command: /issue for model-related problems (odd outputs, wrong tool choices, hallucinations, refusals), or /share to upload the full session transcript for product bugs, crashes, slowness, or general issues. Only recommend these when the user is describing a problem with OpenClaude.`,
|
||||
`If the user reports a bug, slowness, or unexpected behavior with Claude Code itself (as opposed to asking you to fix their own code), recommend the appropriate slash command: /issue for model-related problems (odd outputs, wrong tool choices, hallucinations, refusals), or /share to upload the full session transcript for product bugs, crashes, slowness, or general issues. Only recommend these when the user is describing a problem with Claude Code.`,
|
||||
]
|
||||
: []),
|
||||
`If the user asks for help or wants to give feedback inform them of the following:`,
|
||||
@@ -449,7 +449,7 @@ export async function getSystemPrompt(
|
||||
): Promise<string[]> {
|
||||
if (isEnvTruthy(process.env.CLAUDE_CODE_SIMPLE)) {
|
||||
return [
|
||||
`You are OpenClaude, an open-source coding agent and CLI.\n\nCWD: ${getCwd()}\nDate: ${getSessionStartDate()}`,
|
||||
`You are OpenClaude, an open-source fork of Claude Code.\n\nCWD: ${getCwd()}\nDate: ${getSessionStartDate()}`,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -696,10 +696,10 @@ export async function computeSimpleEnvInfo(
|
||||
: `The most recent Claude model family is Claude 4.5/4.6. Model IDs — Opus 4.6: '${CLAUDE_4_5_OR_4_6_MODEL_IDS.opus}', Sonnet 4.6: '${CLAUDE_4_5_OR_4_6_MODEL_IDS.sonnet}', Haiku 4.5: '${CLAUDE_4_5_OR_4_6_MODEL_IDS.haiku}'. When building AI applications, default to the latest and most capable Claude models.`,
|
||||
process.env.USER_TYPE === 'ant' && isUndercover()
|
||||
? null
|
||||
: `OpenClaude is available as a CLI in the terminal and can be used across local development environments and IDE workflows.`,
|
||||
: `Claude Code is available as a CLI in the terminal, desktop app (Mac/Windows), web app (claude.ai/code), and IDE extensions (VS Code, JetBrains).`,
|
||||
process.env.USER_TYPE === 'ant' && isUndercover()
|
||||
? null
|
||||
: `Fast mode for OpenClaude uses the same ${FRONTIER_MODEL_NAME} model with faster output. It does NOT switch to a different model. It can be toggled with /fast.`,
|
||||
: `Fast mode for Claude Code uses the same ${FRONTIER_MODEL_NAME} model with faster output. It does NOT switch to a different model. It can be toggled with /fast.`,
|
||||
].filter(item => item !== null)
|
||||
|
||||
return [
|
||||
@@ -755,7 +755,7 @@ export function getUnameSR(): string {
|
||||
return `${osType()} ${osRelease()}`
|
||||
}
|
||||
|
||||
export const DEFAULT_AGENT_PROMPT = `You are an agent for OpenClaude, an open-source coding agent and CLI. Given the user's message, you should use the tools available to complete the task. Complete the task fully—don't gold-plate, but don't leave it half-done. When you complete the task, respond with a concise report covering what was done and any key findings — the caller will relay this to the user, so it only needs the essentials.`
|
||||
export const DEFAULT_AGENT_PROMPT = `You are an agent for OpenClaude, an open-source fork of Claude Code. Given the user's message, you should use the tools available to complete the task. Complete the task fully—don't gold-plate, but don't leave it half-done. When you complete the task, respond with a concise report covering what was done and any key findings — the caller will relay this to the user, so it only needs the essentials.`
|
||||
|
||||
export async function enhanceSystemPromptWithEnvDetails(
|
||||
existingSystemPrompt: string[],
|
||||
|
||||
@@ -8,11 +8,11 @@ import { getAPIProvider } from '../utils/model/providers.js'
|
||||
import { getWorkload } from '../utils/workloadContext.js'
|
||||
|
||||
const DEFAULT_PREFIX =
|
||||
`You are OpenClaude, an open-source coding agent and CLI.`
|
||||
`You are OpenClaude, an open-source fork of Claude Code.`
|
||||
const AGENT_SDK_CLAUDE_CODE_PRESET_PREFIX =
|
||||
`You are OpenClaude, an open-source coding agent and CLI running within the Claude Agent SDK.`
|
||||
`You are OpenClaude, an open-source fork of Claude Code, running within the Claude Agent SDK.`
|
||||
const AGENT_SDK_PREFIX =
|
||||
`You are OpenClaude, built on the Claude Agent SDK.`
|
||||
`You are a Claude agent running in OpenClaude, built on the Claude Agent SDK.`
|
||||
|
||||
const CLI_SYSPROMPT_PREFIX_VALUES = [
|
||||
DEFAULT_PREFIX,
|
||||
|
||||
@@ -181,7 +181,7 @@ function formatCost(cost: number, maxDecimalPlaces: number = 4): string {
|
||||
function formatModelUsage(): string {
|
||||
const modelUsageMap = getModelUsage()
|
||||
if (Object.keys(modelUsageMap).length === 0) {
|
||||
return 'Usage: 0 input, 0 output'
|
||||
return 'Usage: 0 input, 0 output, 0 cache read, 0 cache write'
|
||||
}
|
||||
|
||||
// Accumulate usage by short name
|
||||
@@ -211,19 +211,15 @@ function formatModelUsage(): string {
|
||||
|
||||
let result = 'Usage by model:'
|
||||
for (const [shortName, usage] of Object.entries(usageByShortName)) {
|
||||
let usageString =
|
||||
const usageString =
|
||||
` ${formatNumber(usage.inputTokens)} input, ` +
|
||||
`${formatNumber(usage.outputTokens)} output`
|
||||
if (usage.cacheReadInputTokens > 0) {
|
||||
usageString += `, ${formatNumber(usage.cacheReadInputTokens)} cache read`
|
||||
}
|
||||
if (usage.cacheCreationInputTokens > 0) {
|
||||
usageString += `, ${formatNumber(usage.cacheCreationInputTokens)} cache write`
|
||||
}
|
||||
if (usage.webSearchRequests > 0) {
|
||||
usageString += `, ${formatNumber(usage.webSearchRequests)} web search`
|
||||
}
|
||||
usageString += ` (${formatCost(usage.costUSD)})`
|
||||
`${formatNumber(usage.outputTokens)} output, ` +
|
||||
`${formatNumber(usage.cacheReadInputTokens)} cache read, ` +
|
||||
`${formatNumber(usage.cacheCreationInputTokens)} cache write` +
|
||||
(usage.webSearchRequests > 0
|
||||
? `, ${formatNumber(usage.webSearchRequests)} web search`
|
||||
: '') +
|
||||
` (${formatCost(usage.costUSD)})`
|
||||
result += `\n` + `${shortName}:`.padStart(21) + usageString
|
||||
}
|
||||
return result
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useSyncExternalStore } from 'react'
|
||||
import { useCallback, useEffect } from 'react'
|
||||
import type { Command } from '../commands.js'
|
||||
import { useNotifications } from '../context/notifications.js'
|
||||
import {
|
||||
@@ -7,11 +7,6 @@ import {
|
||||
} from '../services/analytics/index.js'
|
||||
import { reinitializeLspServerManager } from '../services/lsp/manager.js'
|
||||
import { useAppState, useSetAppState } from '../state/AppState.js'
|
||||
import {
|
||||
getPluginCommandsState,
|
||||
setPluginCommandsState,
|
||||
subscribePluginCommands,
|
||||
} from '../state/pluginCommandsStore.js'
|
||||
import type { AgentDefinition } from '../tools/AgentTool/loadAgentsDir.js'
|
||||
import { count } from '../utils/array.js'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
@@ -44,11 +39,6 @@ export function useManagePlugins({
|
||||
}: {
|
||||
enabled?: boolean
|
||||
} = {}) {
|
||||
const pluginCommands = useSyncExternalStore(
|
||||
subscribePluginCommands,
|
||||
getPluginCommandsState,
|
||||
getPluginCommandsState,
|
||||
)
|
||||
const setAppState = useSetAppState()
|
||||
const needsRefresh = useAppState(s => s.plugins.needsRefresh)
|
||||
const { addNotification } = useNotifications()
|
||||
@@ -84,7 +74,6 @@ export function useManagePlugins({
|
||||
|
||||
try {
|
||||
commands = await getPluginCommands()
|
||||
setPluginCommandsState(commands)
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error)
|
||||
@@ -93,7 +82,6 @@ export function useManagePlugins({
|
||||
source: 'plugin-commands',
|
||||
error: `Failed to load plugin commands: ${errorMessage}`,
|
||||
})
|
||||
setPluginCommandsState([])
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -185,7 +173,7 @@ export function useManagePlugins({
|
||||
...prevState.plugins,
|
||||
enabled,
|
||||
disabled,
|
||||
commands: [],
|
||||
commands,
|
||||
errors: mergedErrors,
|
||||
},
|
||||
}
|
||||
@@ -238,7 +226,6 @@ export function useManagePlugins({
|
||||
logError(errorObj)
|
||||
logForDebugging(`Error loading plugins: ${error}`)
|
||||
// Set empty state on error, but preserve LSP errors and add the new error
|
||||
setPluginCommandsState([])
|
||||
setAppState(prevState => {
|
||||
// Keep existing LSP/non-plugin-loading errors
|
||||
const existingLspErrors = prevState.plugins.errors.filter(
|
||||
@@ -297,11 +284,6 @@ export function useManagePlugins({
|
||||
})
|
||||
}, [initialPluginLoad, enabled])
|
||||
|
||||
useEffect(() => {
|
||||
if (enabled) return
|
||||
setPluginCommandsState([])
|
||||
}, [enabled])
|
||||
|
||||
// Plugin state changed on disk (background reconcile, /plugin menu,
|
||||
// external settings edit). Show a notification; user runs /reload-plugins
|
||||
// to apply. The previous auto-refresh here had a stale-cache bug (only
|
||||
@@ -319,6 +301,4 @@ export function useManagePlugins({
|
||||
// Do NOT auto-refresh. Do NOT reset needsRefresh — /reload-plugins
|
||||
// consumes it via refreshActivePlugins().
|
||||
}, [enabled, needsRefresh, addNotification])
|
||||
|
||||
return enabled ? pluginCommands : []
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { useLayoutEffect, useRef, useState } from 'react'
|
||||
import { isInputModeCharacter } from 'src/components/PromptInput/inputModes.js'
|
||||
import { useNotifications } from 'src/context/notifications.js'
|
||||
import stripAnsi from 'strip-ansi'
|
||||
@@ -101,74 +100,9 @@ export function useTextInput({
|
||||
prewarmModifiers()
|
||||
}
|
||||
|
||||
// Keep a local text/cursor mirror so consecutive keystrokes can advance
|
||||
// immediately even if the controlled parent value hasn't committed yet.
|
||||
const [renderState, setRenderState] = useState(() => ({
|
||||
value: originalValue,
|
||||
offset: externalOffset,
|
||||
}))
|
||||
const liveValueRef = useRef(originalValue)
|
||||
const liveOffsetRef = useRef(externalOffset)
|
||||
const lastSeenPropsRef = useRef({
|
||||
value: originalValue,
|
||||
offset: externalOffset,
|
||||
})
|
||||
const updateRenderedInput = (nextValue: string, nextOffset: number): void => {
|
||||
liveValueRef.current = nextValue
|
||||
liveOffsetRef.current = nextOffset
|
||||
setRenderState(prev =>
|
||||
prev.value === nextValue && prev.offset === nextOffset
|
||||
? prev
|
||||
: { value: nextValue, offset: nextOffset },
|
||||
)
|
||||
}
|
||||
useLayoutEffect(() => {
|
||||
if (
|
||||
lastSeenPropsRef.current.value === originalValue &&
|
||||
lastSeenPropsRef.current.offset === externalOffset
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
lastSeenPropsRef.current = {
|
||||
value: originalValue,
|
||||
offset: externalOffset,
|
||||
}
|
||||
updateRenderedInput(originalValue, externalOffset)
|
||||
}, [originalValue, externalOffset])
|
||||
|
||||
const value = renderState.value
|
||||
const offset = renderState.offset
|
||||
const getLiveValue = (): string => liveValueRef.current
|
||||
const getLiveCursor = (): Cursor =>
|
||||
Cursor.fromText(liveValueRef.current, columns, liveOffsetRef.current)
|
||||
const setValue = (nextValue: string, nextOffset = liveOffsetRef.current): void => {
|
||||
const previousValue = liveValueRef.current
|
||||
const previousOffset = liveOffsetRef.current
|
||||
|
||||
if (previousValue === nextValue && previousOffset === nextOffset) {
|
||||
return
|
||||
}
|
||||
|
||||
updateRenderedInput(nextValue, nextOffset)
|
||||
|
||||
if (previousValue !== nextValue) {
|
||||
onChange(nextValue)
|
||||
}
|
||||
|
||||
if (previousOffset !== nextOffset) {
|
||||
onOffsetChange(nextOffset)
|
||||
}
|
||||
}
|
||||
const setOffset = (nextOffset: number): void => {
|
||||
if (nextOffset === liveOffsetRef.current) {
|
||||
return
|
||||
}
|
||||
|
||||
updateRenderedInput(liveValueRef.current, nextOffset)
|
||||
onOffsetChange(nextOffset)
|
||||
}
|
||||
const cursor = Cursor.fromText(value, columns, offset)
|
||||
const offset = externalOffset
|
||||
const setOffset = onOffsetChange
|
||||
const cursor = Cursor.fromText(originalValue, columns, offset)
|
||||
const { addNotification, removeNotification } = useNotifications()
|
||||
|
||||
const handleCtrlC = useDoublePress(
|
||||
@@ -177,11 +111,9 @@ export function useTextInput({
|
||||
},
|
||||
() => onExit?.(),
|
||||
() => {
|
||||
const currentValue = getLiveValue()
|
||||
if (currentValue) {
|
||||
updateRenderedInput('', 0)
|
||||
if (originalValue) {
|
||||
onChange('')
|
||||
onOffsetChange(0)
|
||||
setOffset(0)
|
||||
onHistoryReset?.()
|
||||
}
|
||||
},
|
||||
@@ -193,8 +125,7 @@ export function useTextInput({
|
||||
// not dialog dismissal, and needs the double-press safety mechanism.
|
||||
const handleEscape = useDoublePress(
|
||||
(show: boolean) => {
|
||||
const currentValue = getLiveValue()
|
||||
if (!currentValue || !show) {
|
||||
if (!originalValue || !show) {
|
||||
return
|
||||
}
|
||||
addNotification({
|
||||
@@ -205,19 +136,17 @@ export function useTextInput({
|
||||
})
|
||||
},
|
||||
() => {
|
||||
const currentValue = getLiveValue()
|
||||
// Remove the "Esc again to clear" notification immediately
|
||||
removeNotification('escape-again-to-clear')
|
||||
onClearInput?.()
|
||||
if (currentValue) {
|
||||
if (originalValue) {
|
||||
// Track double-escape usage for feature discovery
|
||||
// Save to history before clearing
|
||||
if (currentValue.trim() !== '') {
|
||||
addToHistory(currentValue)
|
||||
if (originalValue.trim() !== '') {
|
||||
addToHistory(originalValue)
|
||||
}
|
||||
updateRenderedInput('', 0)
|
||||
onChange('')
|
||||
onOffsetChange(0)
|
||||
setOffset(0)
|
||||
onHistoryReset?.()
|
||||
}
|
||||
},
|
||||
@@ -225,13 +154,13 @@ export function useTextInput({
|
||||
|
||||
const handleEmptyCtrlD = useDoublePress(
|
||||
show => {
|
||||
if (getLiveValue() !== '') {
|
||||
if (originalValue !== '') {
|
||||
return
|
||||
}
|
||||
onExitMessage?.(show, 'Ctrl-D')
|
||||
},
|
||||
() => {
|
||||
if (getLiveValue() !== '') {
|
||||
if (originalValue !== '') {
|
||||
return
|
||||
}
|
||||
onExit?.()
|
||||
@@ -239,7 +168,6 @@ export function useTextInput({
|
||||
)
|
||||
|
||||
function handleCtrlD(): MaybeCursor {
|
||||
const cursor = getLiveCursor()
|
||||
if (cursor.text === '') {
|
||||
// When input is empty, handle double-press
|
||||
handleEmptyCtrlD()
|
||||
@@ -250,28 +178,24 @@ export function useTextInput({
|
||||
}
|
||||
|
||||
function killToLineEnd(): Cursor {
|
||||
const cursor = getLiveCursor()
|
||||
const { cursor: newCursor, killed } = cursor.deleteToLineEnd()
|
||||
pushToKillRing(killed, 'append')
|
||||
return newCursor
|
||||
}
|
||||
|
||||
function killToLineStart(): Cursor {
|
||||
const cursor = getLiveCursor()
|
||||
const { cursor: newCursor, killed } = cursor.deleteToLineStart()
|
||||
pushToKillRing(killed, 'prepend')
|
||||
return newCursor
|
||||
}
|
||||
|
||||
function killWordBefore(): Cursor {
|
||||
const cursor = getLiveCursor()
|
||||
const { cursor: newCursor, killed } = cursor.deleteWordBefore()
|
||||
pushToKillRing(killed, 'prepend')
|
||||
return newCursor
|
||||
}
|
||||
|
||||
function yank(): Cursor {
|
||||
const cursor = getLiveCursor()
|
||||
const text = getLastKill()
|
||||
if (text.length > 0) {
|
||||
const startOffset = cursor.offset
|
||||
@@ -283,7 +207,6 @@ export function useTextInput({
|
||||
}
|
||||
|
||||
function handleYankPop(): Cursor {
|
||||
const cursor = getLiveCursor()
|
||||
const popResult = yankPop()
|
||||
if (!popResult) {
|
||||
return cursor
|
||||
@@ -299,16 +222,13 @@ export function useTextInput({
|
||||
}
|
||||
|
||||
const handleCtrl = mapInput([
|
||||
['a', () => getLiveCursor().startOfLine()],
|
||||
['b', () => getLiveCursor().left()],
|
||||
['a', () => cursor.startOfLine()],
|
||||
['b', () => cursor.left()],
|
||||
['c', handleCtrlC],
|
||||
['d', handleCtrlD],
|
||||
['e', () => getLiveCursor().endOfLine()],
|
||||
['f', () => getLiveCursor().right()],
|
||||
['h', () => {
|
||||
const cursor = getLiveCursor()
|
||||
return cursor.deleteTokenBefore() ?? cursor.backspace()
|
||||
}],
|
||||
['e', () => cursor.endOfLine()],
|
||||
['f', () => cursor.right()],
|
||||
['h', () => cursor.deleteTokenBefore() ?? cursor.backspace()],
|
||||
['k', killToLineEnd],
|
||||
['n', () => downOrHistoryDown()],
|
||||
['p', () => upOrHistoryUp()],
|
||||
@@ -318,15 +238,13 @@ export function useTextInput({
|
||||
])
|
||||
|
||||
const handleMeta = mapInput([
|
||||
['b', () => getLiveCursor().prevWord()],
|
||||
['f', () => getLiveCursor().nextWord()],
|
||||
['d', () => getLiveCursor().deleteWordAfter()],
|
||||
['b', () => cursor.prevWord()],
|
||||
['f', () => cursor.nextWord()],
|
||||
['d', () => cursor.deleteWordAfter()],
|
||||
['y', handleYankPop],
|
||||
])
|
||||
|
||||
function handleEnter(key: Key) {
|
||||
const cursor = getLiveCursor()
|
||||
const currentValue = getLiveValue()
|
||||
if (
|
||||
multiline &&
|
||||
cursor.offset > 0 &&
|
||||
@@ -345,11 +263,10 @@ export function useTextInput({
|
||||
if (env.terminal === 'Apple_Terminal' && isModifierPressed('shift')) {
|
||||
return cursor.insert('\n')
|
||||
}
|
||||
onSubmit?.(currentValue)
|
||||
onSubmit?.(originalValue)
|
||||
}
|
||||
|
||||
function upOrHistoryUp() {
|
||||
const cursor = getLiveCursor()
|
||||
if (disableCursorMovementForUpDownKeys) {
|
||||
onHistoryUp?.()
|
||||
return cursor
|
||||
@@ -374,7 +291,6 @@ export function useTextInput({
|
||||
return cursor
|
||||
}
|
||||
function downOrHistoryDown() {
|
||||
const cursor = getLiveCursor()
|
||||
if (disableCursorMovementForUpDownKeys) {
|
||||
onHistoryDown?.()
|
||||
return cursor
|
||||
@@ -399,7 +315,7 @@ export function useTextInput({
|
||||
return cursor
|
||||
}
|
||||
|
||||
function mapKey(key: Key, cursor: Cursor): InputMapper {
|
||||
function mapKey(key: Key): InputMapper {
|
||||
switch (true) {
|
||||
case key.escape:
|
||||
return () => {
|
||||
@@ -513,7 +429,6 @@ export function useTextInput({
|
||||
}
|
||||
|
||||
function onInput(input: string, key: Key): void {
|
||||
const currentCursor = getLiveCursor()
|
||||
// Note: Image paste shortcut (chat:imagePaste) is handled via useKeybindings in PromptInput
|
||||
|
||||
// Apply filter if provided
|
||||
@@ -531,15 +446,18 @@ export function useTextInput({
|
||||
|
||||
// Apply all DEL characters as backspace operations synchronously
|
||||
// Try to delete tokens first, fall back to character backspace
|
||||
let nextCursor = currentCursor
|
||||
let currentCursor = cursor
|
||||
for (let i = 0; i < delCount; i++) {
|
||||
nextCursor =
|
||||
nextCursor.deleteTokenBefore() ?? nextCursor.backspace()
|
||||
currentCursor =
|
||||
currentCursor.deleteTokenBefore() ?? currentCursor.backspace()
|
||||
}
|
||||
|
||||
// Update state once with the final result
|
||||
if (!currentCursor.equals(nextCursor)) {
|
||||
setValue(nextCursor.text, nextCursor.offset)
|
||||
if (!cursor.equals(currentCursor)) {
|
||||
if (cursor.text !== currentCursor.text) {
|
||||
onChange(currentCursor.text)
|
||||
}
|
||||
setOffset(currentCursor.offset)
|
||||
}
|
||||
resetKillAccumulation()
|
||||
resetYankState()
|
||||
@@ -556,10 +474,13 @@ export function useTextInput({
|
||||
resetYankState()
|
||||
}
|
||||
|
||||
const nextCursor = mapKey(key, currentCursor)(filteredInput)
|
||||
const nextCursor = mapKey(key)(filteredInput)
|
||||
if (nextCursor) {
|
||||
if (!currentCursor.equals(nextCursor)) {
|
||||
setValue(nextCursor.text, nextCursor.offset)
|
||||
if (!cursor.equals(nextCursor)) {
|
||||
if (cursor.text !== nextCursor.text) {
|
||||
onChange(nextCursor.text)
|
||||
}
|
||||
setOffset(nextCursor.offset)
|
||||
}
|
||||
// SSH-coalesced Enter: on slow links, "o" + Enter can arrive as one
|
||||
// chunk "o\r". parseKeypress only matches s === '\r', so it hit the
|
||||
@@ -591,7 +512,6 @@ export function useTextInput({
|
||||
|
||||
return {
|
||||
onInput,
|
||||
value,
|
||||
renderedValue: cursor.render(
|
||||
cursorChar,
|
||||
mask,
|
||||
@@ -600,7 +520,6 @@ export function useTextInput({
|
||||
maxVisibleLines,
|
||||
),
|
||||
offset,
|
||||
setValue,
|
||||
setOffset,
|
||||
cursorLine: cursorPos.line - cursor.getViewportStartLine(maxVisibleLines),
|
||||
cursorColumn: cursorPos.column,
|
||||
|
||||
@@ -70,14 +70,14 @@ export function useVimInput(props: UseVimInputProps): VimInputState {
|
||||
// Vim behavior: move cursor left by 1 when exiting insert mode
|
||||
// (unless at beginning of line or at offset 0)
|
||||
const offset = textInput.offset
|
||||
if (offset > 0 && textInput.value[offset - 1] !== '\n') {
|
||||
if (offset > 0 && props.value[offset - 1] !== '\n') {
|
||||
textInput.setOffset(offset - 1)
|
||||
}
|
||||
|
||||
vimStateRef.current = { mode: 'NORMAL', command: { type: 'idle' } }
|
||||
setMode('NORMAL')
|
||||
onModeChange?.('NORMAL')
|
||||
}, [onModeChange, textInput])
|
||||
}, [onModeChange, textInput, props.value])
|
||||
|
||||
function createOperatorContext(
|
||||
cursor: Cursor,
|
||||
@@ -85,8 +85,8 @@ export function useVimInput(props: UseVimInputProps): VimInputState {
|
||||
): OperatorContext {
|
||||
return {
|
||||
cursor,
|
||||
text: textInput.value,
|
||||
setText: (newText: string) => textInput.setValue(newText),
|
||||
text: props.value,
|
||||
setText: (newText: string) => props.onChange(newText),
|
||||
setOffset: (offset: number) => textInput.setOffset(offset),
|
||||
enterInsert: (offset: number) => switchToInsertMode(offset),
|
||||
getRegister: () => persistentRef.current.register,
|
||||
@@ -110,18 +110,15 @@ export function useVimInput(props: UseVimInputProps): VimInputState {
|
||||
const change = persistentRef.current.lastChange
|
||||
if (!change) return
|
||||
|
||||
const cursor = Cursor.fromText(
|
||||
textInput.value,
|
||||
props.columns,
|
||||
textInput.offset,
|
||||
)
|
||||
const cursor = Cursor.fromText(props.value, props.columns, textInput.offset)
|
||||
const ctx = createOperatorContext(cursor, true)
|
||||
|
||||
switch (change.type) {
|
||||
case 'insert':
|
||||
if (change.text) {
|
||||
const newCursor = cursor.insert(change.text)
|
||||
textInput.setValue(newCursor.text, newCursor.offset)
|
||||
props.onChange(newCursor.text)
|
||||
textInput.setOffset(newCursor.offset)
|
||||
}
|
||||
break
|
||||
|
||||
@@ -182,11 +179,7 @@ export function useVimInput(props: UseVimInputProps): VimInputState {
|
||||
// lookups expect single chars and a prepended space would break them.
|
||||
const filtered = inputFilter ? inputFilter(rawInput, key) : rawInput
|
||||
const input = state.mode === 'INSERT' ? filtered : rawInput
|
||||
const cursor = Cursor.fromText(
|
||||
textInput.value,
|
||||
props.columns,
|
||||
textInput.offset,
|
||||
)
|
||||
const cursor = Cursor.fromText(props.value, props.columns, textInput.offset)
|
||||
|
||||
if (key.ctrl) {
|
||||
textInput.onInput(input, key)
|
||||
|
||||
@@ -115,10 +115,7 @@ export default class App extends PureComponent<Props, State> {
|
||||
keyParseState = INITIAL_STATE;
|
||||
// Timer for flushing incomplete escape sequences
|
||||
incompleteEscapeTimer: NodeJS.Timeout | null = null;
|
||||
// Default to readable-mode stdin (legacy Ink behavior). The data-mode path
|
||||
// is kept as an explicit opt-in because some terminals can enter a state
|
||||
// where startup input appears frozen when data mode is the default.
|
||||
stdinMode: 'readable' | 'data' = process.env.OPENCLAUDE_USE_DATA_STDIN === '1' || process.env.OPENCLAUDE_USE_READABLE_STDIN === '0' ? 'data' : 'readable';
|
||||
stdinMode: 'readable' | 'data' = process.env.OPENCLAUDE_USE_READABLE_STDIN === '1' ? 'readable' : 'data';
|
||||
// Timeout durations for incomplete sequences (ms)
|
||||
readonly NORMAL_TIMEOUT = 50; // Short timeout for regular esc sequences
|
||||
readonly PASTE_TIMEOUT = 500; // Longer timeout for paste operations
|
||||
|
||||
@@ -33,7 +33,7 @@ import createRenderer, { type Renderer } from './renderer.js';
|
||||
import { CellWidth, CharPool, cellAt, createScreen, HyperlinkPool, isEmptyCellAt, migrateScreenPools, StylePool } from './screen.js';
|
||||
import { applySearchHighlight } from './searchHighlight.js';
|
||||
import { applySelectionOverlay, captureScrolledRows, clearSelection, createSelectionState, extendSelection, type FocusMove, findPlainTextUrlAt, getSelectedText, hasSelection, moveFocus, type SelectionState, selectLineAt, selectWordAt, shiftAnchor, shiftSelection, shiftSelectionForFollow, startSelection, updateSelection } from './selection.js';
|
||||
import { shouldSkipMainScreenSyncMarkers, shouldUseMainScreenRewrite, SYNC_OUTPUT_SUPPORTED, supportsExtendedKeys, type Terminal, writeDiffToTerminal } from './terminal.js';
|
||||
import { SYNC_OUTPUT_SUPPORTED, supportsExtendedKeys, type Terminal, writeDiffToTerminal } from './terminal.js';
|
||||
import { CURSOR_HOME, cursorMove, cursorPosition, DISABLE_KITTY_KEYBOARD, DISABLE_MODIFY_OTHER_KEYS, ENABLE_KITTY_KEYBOARD, ENABLE_MODIFY_OTHER_KEYS, ERASE_SCREEN } from './termio/csi.js';
|
||||
import { DBP, DFE, DISABLE_MOUSE_TRACKING, ENABLE_MOUSE_TRACKING, ENTER_ALT_SCREEN, EXIT_ALT_SCREEN, SHOW_CURSOR } from './termio/dec.js';
|
||||
import { CLEAR_ITERM2_PROGRESS, CLEAR_TAB_STATUS, setClipboard, supportsTabStatus, wrapForMultiplexer } from './termio/osc.js';
|
||||
@@ -609,13 +609,12 @@ export default class Ink {
|
||||
};
|
||||
}
|
||||
const tDiff = performance.now();
|
||||
const rewriteMainScreen = !this.altScreenActive && shouldUseMainScreenRewrite();
|
||||
const diff = this.log.render(prevFrame, frame, this.altScreenActive,
|
||||
// DECSTBM needs BSU/ESU atomicity — without it the outer terminal
|
||||
// renders the scrolled-but-not-yet-repainted intermediate state.
|
||||
// tmux is the main case (re-emits DECSTBM with its own timing and
|
||||
// doesn't implement DEC 2026, so SYNC_OUTPUT_SUPPORTED is false).
|
||||
SYNC_OUTPUT_SUPPORTED, rewriteMainScreen);
|
||||
SYNC_OUTPUT_SUPPORTED);
|
||||
const diffMs = performance.now() - tDiff;
|
||||
// Swap buffers
|
||||
this.backFrame = this.frontFrame;
|
||||
@@ -760,8 +759,7 @@ export default class Ink {
|
||||
}
|
||||
}
|
||||
const tWrite = performance.now();
|
||||
const skipSyncMarkers = this.altScreenActive ? !SYNC_OUTPUT_SUPPORTED : rewriteMainScreen || shouldSkipMainScreenSyncMarkers();
|
||||
writeDiffToTerminal(this.terminal, optimized, skipSyncMarkers);
|
||||
writeDiffToTerminal(this.terminal, optimized, this.altScreenActive && !SYNC_OUTPUT_SUPPORTED);
|
||||
const writeMs = performance.now() - tWrite;
|
||||
|
||||
// Update blit safety for the NEXT frame. The frame just rendered
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
import { expect, test } from 'bun:test'
|
||||
|
||||
import type { Frame } from './frame.ts'
|
||||
import { LogUpdate } from './log-update.ts'
|
||||
import {
|
||||
CellWidth,
|
||||
CharPool,
|
||||
createScreen,
|
||||
HyperlinkPool,
|
||||
setCellAt,
|
||||
StylePool,
|
||||
} from './screen.ts'
|
||||
|
||||
function collectStdout(diff: ReturnType<LogUpdate['render']>): string {
|
||||
return diff
|
||||
.filter((patch): patch is Extract<(typeof diff)[number], { type: 'stdout' }> => patch.type === 'stdout')
|
||||
.map(patch => patch.content)
|
||||
.join('')
|
||||
}
|
||||
|
||||
function createHarness() {
|
||||
const stylePool = new StylePool()
|
||||
const charPool = new CharPool()
|
||||
const hyperlinkPool = new HyperlinkPool()
|
||||
|
||||
return {
|
||||
stylePool,
|
||||
charPool,
|
||||
hyperlinkPool,
|
||||
log: new LogUpdate({ isTTY: true, stylePool }),
|
||||
}
|
||||
}
|
||||
|
||||
function frameFromLines(
|
||||
stylePool: StylePool,
|
||||
charPool: CharPool,
|
||||
hyperlinkPool: HyperlinkPool,
|
||||
lines: string[],
|
||||
cursor = { x: 0, y: lines.length, visible: true },
|
||||
): Frame {
|
||||
const width = lines.reduce((max, line) => Math.max(max, line.length), 0)
|
||||
const screen = createScreen(width, lines.length, stylePool, charPool, hyperlinkPool)
|
||||
|
||||
for (const [y, line] of lines.entries()) {
|
||||
for (const [x, char] of [...line].entries()) {
|
||||
setCellAt(screen, x, y, {
|
||||
char,
|
||||
styleId: stylePool.none,
|
||||
width: CellWidth.Narrow,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
screen,
|
||||
viewport: {
|
||||
width: Math.max(width, 1),
|
||||
height: 10,
|
||||
},
|
||||
cursor,
|
||||
}
|
||||
}
|
||||
|
||||
test('ghostty main-screen rewrite paints prompt content without full terminal reset when width is stable', () => {
|
||||
const { stylePool, charPool, hyperlinkPool, log } = createHarness()
|
||||
const prev = frameFromLines(stylePool, charPool, hyperlinkPool, [' '])
|
||||
const next = frameFromLines(stylePool, charPool, hyperlinkPool, ['prompt'])
|
||||
|
||||
const diff = log.render(prev, next, false, true, true)
|
||||
const stdout = collectStdout(diff)
|
||||
|
||||
expect(diff.some(patch => patch.type === 'clearTerminal')).toBe(false)
|
||||
expect(diff.some(patch => patch.type === 'clear' && patch.count === 1)).toBe(
|
||||
true,
|
||||
)
|
||||
expect(stdout).toContain('prompt')
|
||||
})
|
||||
|
||||
test('ghostty main-screen rewrite clears only the changed prompt tail before repainting', () => {
|
||||
const { stylePool, charPool, hyperlinkPool, log } = createHarness()
|
||||
const prev = frameFromLines(
|
||||
stylePool,
|
||||
charPool,
|
||||
hyperlinkPool,
|
||||
['status', '> abc'],
|
||||
)
|
||||
const next = frameFromLines(
|
||||
stylePool,
|
||||
charPool,
|
||||
hyperlinkPool,
|
||||
['status', '> abcd'],
|
||||
)
|
||||
|
||||
const diff = log.render(prev, next, false, true, true)
|
||||
const stdout = collectStdout(diff)
|
||||
|
||||
expect(diff.some(patch => patch.type === 'clearTerminal')).toBe(false)
|
||||
expect(diff.some(patch => patch.type === 'clear' && patch.count === 1)).toBe(
|
||||
true,
|
||||
)
|
||||
expect(stdout).toContain('abcd')
|
||||
})
|
||||
|
||||
test('ghostty main-screen rewrite falls back to incremental diff for larger changes', () => {
|
||||
const { stylePool, charPool, hyperlinkPool, log } = createHarness()
|
||||
const prev = frameFromLines(
|
||||
stylePool,
|
||||
charPool,
|
||||
hyperlinkPool,
|
||||
['row 0', 'row 1', 'row 2', 'row 3', 'row 4', '> abc'],
|
||||
)
|
||||
const next = frameFromLines(
|
||||
stylePool,
|
||||
charPool,
|
||||
hyperlinkPool,
|
||||
['row 0 updated', 'row 1', 'row 2', 'row 3', 'row 4', '> abcd'],
|
||||
)
|
||||
|
||||
const diff = log.render(prev, next, false, true, true)
|
||||
const stdout = collectStdout(diff)
|
||||
|
||||
expect(diff.some(patch => patch.type === 'clear')).toBe(false)
|
||||
expect(stdout).toContain('updated')
|
||||
expect(stdout).toContain('abcd')
|
||||
})
|
||||
@@ -125,7 +125,6 @@ export class LogUpdate {
|
||||
next: Frame,
|
||||
altScreen = false,
|
||||
decstbmSafe = true,
|
||||
rewriteMainScreen = false,
|
||||
): Diff {
|
||||
if (!this.options.isTTY) {
|
||||
return this.renderFullFrame(next)
|
||||
@@ -147,13 +146,6 @@ export class LogUpdate {
|
||||
return fullResetSequence_CAUSES_FLICKER(next, 'resize', stylePool)
|
||||
}
|
||||
|
||||
if (!altScreen && rewriteMainScreen) {
|
||||
const rewriteStartY = findMainScreenRewriteStart(prev.screen, next.screen)
|
||||
if (rewriteStartY !== null) {
|
||||
return rewriteMainScreenFrame(prev, next, stylePool, rewriteStartY)
|
||||
}
|
||||
}
|
||||
|
||||
// DECSTBM scroll optimization: when a ScrollBox's scrollTop changed,
|
||||
// shift content with a hardware scroll (CSI top;bot r + CSI n S/T)
|
||||
// instead of rewriting the whole scroll region. The shiftRows on
|
||||
@@ -428,8 +420,34 @@ export class LogUpdate {
|
||||
// Main screen: if cursor needs to be past the last line of content
|
||||
// (typical: cursor.y = screen.height), emit \n to create that line
|
||||
// since cursor movement can't create new lines.
|
||||
if (!altScreen) {
|
||||
restoreMainScreenCursor(screen, next)
|
||||
if (altScreen) {
|
||||
// no-op; next frame's CSI H anchors cursor
|
||||
} else if (next.cursor.y >= next.screen.height) {
|
||||
// Move to column 0 of current line, then emit newlines to reach target row
|
||||
screen.txn(prev => {
|
||||
const rowsToCreate = next.cursor.y - prev.y
|
||||
if (rowsToCreate > 0) {
|
||||
// Use CR to resolve pending wrap (if any) without advancing
|
||||
// to the next line, then LF to create each new row.
|
||||
const patches: Diff = new Array<Diff[number]>(1 + rowsToCreate)
|
||||
patches[0] = CARRIAGE_RETURN
|
||||
for (let i = 0; i < rowsToCreate; i++) {
|
||||
patches[1 + i] = NEWLINE
|
||||
}
|
||||
return [patches, { dx: -prev.x, dy: rowsToCreate }]
|
||||
}
|
||||
// At or past target row - need to move cursor to correct position
|
||||
const dy = next.cursor.y - prev.y
|
||||
if (dy !== 0 || prev.x !== next.cursor.x) {
|
||||
// Use CR to clear pending wrap (if any), then cursor move
|
||||
const patches: Diff = [CARRIAGE_RETURN]
|
||||
patches.push({ type: 'cursorMove', x: next.cursor.x, y: dy })
|
||||
return [patches, { dx: next.cursor.x - prev.x, dy }]
|
||||
}
|
||||
return [[], { dx: 0, dy: 0 }]
|
||||
})
|
||||
} else {
|
||||
moveCursorTo(screen, next.cursor.x, next.cursor.y)
|
||||
}
|
||||
|
||||
const elapsed = performance.now() - startTime
|
||||
@@ -449,77 +467,6 @@ export class LogUpdate {
|
||||
}
|
||||
}
|
||||
|
||||
function rewriteMainScreenFrame(
|
||||
prev: Frame,
|
||||
next: Frame,
|
||||
stylePool: StylePool,
|
||||
startY: number,
|
||||
): Diff {
|
||||
const diff: Diff = []
|
||||
const clearCount = prev.screen.height - startY
|
||||
|
||||
if (clearCount > 0) {
|
||||
const clearStartY = prev.screen.height - 1
|
||||
const clearCursor = new VirtualScreen(prev.cursor, next.viewport.width)
|
||||
moveCursorTo(clearCursor, 0, clearStartY)
|
||||
diff.push(...clearCursor.diff)
|
||||
diff.push({ type: 'clear', count: clearCount })
|
||||
}
|
||||
|
||||
const screen = new VirtualScreen(
|
||||
clearCount > 0 ? { x: 0, y: startY } : prev.cursor,
|
||||
next.viewport.width,
|
||||
)
|
||||
renderFrameSlice(screen, next, startY, next.screen.height, stylePool)
|
||||
restoreMainScreenCursor(screen, next)
|
||||
|
||||
return [...diff, ...screen.diff]
|
||||
}
|
||||
|
||||
const MAX_MAIN_SCREEN_REWRITE_ROWS = 6
|
||||
|
||||
function findMainScreenRewriteStart(prev: Screen, next: Screen): number | null {
|
||||
const commonHeight = Math.min(prev.height, next.height)
|
||||
let firstChangedY = commonHeight
|
||||
|
||||
for (let y = 0; y < commonHeight; y += 1) {
|
||||
if (!rowsEqual(prev, next, y)) {
|
||||
firstChangedY = y
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const rewriteRows = Math.max(prev.height, next.height) - firstChangedY
|
||||
if (rewriteRows <= 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return rewriteRows <= MAX_MAIN_SCREEN_REWRITE_ROWS ? firstChangedY : null
|
||||
}
|
||||
|
||||
function rowsEqual(prev: Screen, next: Screen, y: number): boolean {
|
||||
if (prev.width !== next.width) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (prev.softWrap[y] !== next.softWrap[y]) {
|
||||
return false
|
||||
}
|
||||
|
||||
const rowStart = y * prev.width
|
||||
const rowEnd = rowStart + prev.width
|
||||
for (let index = rowStart; index < rowEnd; index += 1) {
|
||||
if (
|
||||
prev.cells64[index] !== next.cells64[index] ||
|
||||
prev.noSelect[index] !== next.noSelect[index]
|
||||
) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function transitionHyperlink(
|
||||
diff: Diff,
|
||||
current: Hyperlink,
|
||||
@@ -675,37 +622,6 @@ function renderFrameSlice(
|
||||
return screen
|
||||
}
|
||||
|
||||
function restoreMainScreenCursor(screen: VirtualScreen, next: Frame): void {
|
||||
if (next.cursor.y >= next.screen.height) {
|
||||
// Move to column 0 of current line, then emit newlines to reach target row
|
||||
screen.txn(prev => {
|
||||
const rowsToCreate = next.cursor.y - prev.y
|
||||
if (rowsToCreate > 0) {
|
||||
// Use CR to resolve pending wrap (if any) without advancing
|
||||
// to the next line, then LF to create each new row.
|
||||
const patches: Diff = new Array<Diff[number]>(1 + rowsToCreate)
|
||||
patches[0] = CARRIAGE_RETURN
|
||||
for (let i = 0; i < rowsToCreate; i++) {
|
||||
patches[1 + i] = NEWLINE
|
||||
}
|
||||
return [patches, { dx: -prev.x, dy: rowsToCreate }]
|
||||
}
|
||||
// At or past target row - need to move cursor to correct position
|
||||
const dy = next.cursor.y - prev.y
|
||||
if (dy !== 0 || prev.x !== next.cursor.x) {
|
||||
// Use CR to clear pending wrap (if any), then cursor move
|
||||
const patches: Diff = [CARRIAGE_RETURN]
|
||||
patches.push({ type: 'cursorMove', x: next.cursor.x, y: dy })
|
||||
return [patches, { dx: next.cursor.x - prev.x, dy }]
|
||||
}
|
||||
return [[], { dx: 0, dy: 0 }]
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
moveCursorTo(screen, next.cursor.x, next.cursor.y)
|
||||
}
|
||||
|
||||
type Delta = { dx: number; dy: number }
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,369 +0,0 @@
|
||||
import { PassThrough } from 'node:stream'
|
||||
|
||||
import { expect, test } from 'bun:test'
|
||||
import React from 'react'
|
||||
|
||||
import type { DOMElement, ElementNames } from './dom.ts'
|
||||
import instances from './instances.ts'
|
||||
import { LayoutEdge } from './layout/node.ts'
|
||||
import type { ParsedKey } from './parse-keypress.ts'
|
||||
import { createRoot } from './root.ts'
|
||||
|
||||
type TestStdin = PassThrough & {
|
||||
isTTY: boolean
|
||||
setRawMode: (mode: boolean) => void
|
||||
ref: () => void
|
||||
unref: () => void
|
||||
}
|
||||
|
||||
const RAW_TEXT_STYLE = {
|
||||
flexDirection: 'row',
|
||||
flexGrow: 0,
|
||||
flexShrink: 1,
|
||||
textWrap: 'wrap',
|
||||
} as const
|
||||
|
||||
function createTestStreams(): {
|
||||
stdout: PassThrough
|
||||
stdin: TestStdin
|
||||
} {
|
||||
const stdout = new PassThrough()
|
||||
const stdin = new PassThrough() as TestStdin
|
||||
|
||||
stdin.isTTY = true
|
||||
stdin.setRawMode = () => {}
|
||||
stdin.ref = () => {}
|
||||
stdin.unref = () => {}
|
||||
|
||||
;(stdout as unknown as { columns: number }).columns = 120
|
||||
;(stdout as unknown as { rows: number }).rows = 24
|
||||
;(stdout as unknown as { isTTY: boolean }).isTTY = true
|
||||
|
||||
return { stdout, stdin }
|
||||
}
|
||||
|
||||
async function waitForCondition(
|
||||
predicate: () => boolean,
|
||||
errorMessage: string,
|
||||
timeoutMs = 2000,
|
||||
): Promise<void> {
|
||||
const startedAt = Date.now()
|
||||
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
if (predicate()) {
|
||||
return
|
||||
}
|
||||
|
||||
await Bun.sleep(10)
|
||||
}
|
||||
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
function getRootNode(stdout: PassThrough): DOMElement {
|
||||
const instance = getInkInstance(stdout)
|
||||
|
||||
if (!instance.rootNode) {
|
||||
throw new Error('Ink instance root node not found')
|
||||
}
|
||||
|
||||
return instance.rootNode
|
||||
}
|
||||
|
||||
function getInkInstance(stdout: PassThrough): {
|
||||
rootNode?: DOMElement
|
||||
dispatchKeyboardEvent: (parsedKey: ParsedKey) => void
|
||||
} {
|
||||
const instance = instances.get(
|
||||
stdout as unknown as NodeJS.WriteStream,
|
||||
) as
|
||||
| {
|
||||
rootNode?: DOMElement
|
||||
dispatchKeyboardEvent: (parsedKey: ParsedKey) => void
|
||||
}
|
||||
| undefined
|
||||
|
||||
if (!instance) {
|
||||
throw new Error('Ink instance not found')
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
function findElement(
|
||||
node: DOMElement,
|
||||
nodeName: ElementNames,
|
||||
): DOMElement | undefined {
|
||||
if (node.nodeName === nodeName) {
|
||||
return node
|
||||
}
|
||||
|
||||
for (const child of node.childNodes) {
|
||||
if (child.nodeName === '#text') {
|
||||
continue
|
||||
}
|
||||
|
||||
const found = findElement(child, nodeName)
|
||||
if (found) {
|
||||
return found
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function requireElement(stdout: PassThrough, nodeName: ElementNames): DOMElement {
|
||||
const found = findElement(getRootNode(stdout), nodeName)
|
||||
|
||||
if (!found) {
|
||||
throw new Error(`Expected to find ${nodeName} in Ink root tree`)
|
||||
}
|
||||
|
||||
return found
|
||||
}
|
||||
|
||||
async function createHarness(): Promise<{
|
||||
stdout: PassThrough
|
||||
stdin: TestStdin
|
||||
root: Awaited<ReturnType<typeof createRoot>>
|
||||
dispose: () => Promise<void>
|
||||
}> {
|
||||
const { stdout, stdin } = createTestStreams()
|
||||
const root = await createRoot({
|
||||
stdout: stdout as unknown as NodeJS.WriteStream,
|
||||
stdin: stdin as unknown as NodeJS.ReadStream,
|
||||
patchConsole: false,
|
||||
})
|
||||
|
||||
return {
|
||||
stdout,
|
||||
stdin,
|
||||
root,
|
||||
dispose: async () => {
|
||||
root.unmount()
|
||||
stdin.end()
|
||||
stdout.end()
|
||||
await Bun.sleep(25)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
test('raw ink-box updates keyboard handlers and attributes in place across rerenders', async () => {
|
||||
const calls: string[] = []
|
||||
const firstHandler = () => calls.push('first')
|
||||
const secondHandler = () => calls.push('second')
|
||||
const harness = await createHarness()
|
||||
|
||||
try {
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
autoFocus: true,
|
||||
onKeyDown: firstHandler,
|
||||
tabIndex: 0,
|
||||
},
|
||||
'first render',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const firstBox = requireElement(harness.stdout, 'ink-box')
|
||||
expect(firstBox.attributes.tabIndex).toBe(0)
|
||||
expect(firstBox._eventHandlers?.onKeyDown).toBe(firstHandler)
|
||||
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
autoFocus: true,
|
||||
onKeyDown: secondHandler,
|
||||
tabIndex: 1,
|
||||
},
|
||||
'second render',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const secondBox = requireElement(harness.stdout, 'ink-box')
|
||||
expect(secondBox).toBe(firstBox)
|
||||
expect(secondBox.attributes.tabIndex).toBe(1)
|
||||
expect(secondBox._eventHandlers?.onKeyDown).toBe(secondHandler)
|
||||
|
||||
getInkInstance(harness.stdout).dispatchKeyboardEvent({
|
||||
kind: 'key',
|
||||
name: 'a',
|
||||
fn: false,
|
||||
ctrl: false,
|
||||
meta: false,
|
||||
shift: false,
|
||||
option: false,
|
||||
super: false,
|
||||
sequence: 'a',
|
||||
raw: 'a',
|
||||
isPasted: false,
|
||||
})
|
||||
|
||||
await waitForCondition(
|
||||
() => calls.length === 1,
|
||||
'Timed out waiting for rerendered onKeyDown handler to fire',
|
||||
)
|
||||
|
||||
expect(calls).toEqual(['second'])
|
||||
} finally {
|
||||
await harness.dispose()
|
||||
}
|
||||
})
|
||||
|
||||
test('raw ink-text updates textStyles in place across rerenders', async () => {
|
||||
const harness = await createHarness()
|
||||
|
||||
try {
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-text',
|
||||
{
|
||||
style: RAW_TEXT_STYLE,
|
||||
textStyles: { color: 'ansi:red' },
|
||||
},
|
||||
'host text',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const firstText = requireElement(harness.stdout, 'ink-text')
|
||||
expect(firstText.textStyles).toEqual({ color: 'ansi:red' })
|
||||
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-text',
|
||||
{
|
||||
style: RAW_TEXT_STYLE,
|
||||
textStyles: { color: 'ansi:blue' },
|
||||
},
|
||||
'host text',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const secondText = requireElement(harness.stdout, 'ink-text')
|
||||
expect(secondText).toBe(firstText)
|
||||
expect(secondText.textStyles).toEqual({ color: 'ansi:blue' })
|
||||
} finally {
|
||||
await harness.dispose()
|
||||
}
|
||||
})
|
||||
|
||||
test('raw ink-box removes event handler when set to undefined', async () => {
|
||||
const calls: string[] = []
|
||||
const handler = () => calls.push('fired')
|
||||
const harness = await createHarness()
|
||||
|
||||
try {
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
autoFocus: true,
|
||||
onKeyDown: handler,
|
||||
tabIndex: 0,
|
||||
},
|
||||
'with handler',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const box = requireElement(harness.stdout, 'ink-box')
|
||||
expect(box._eventHandlers?.onKeyDown).toBe(handler)
|
||||
|
||||
// Remove the handler
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
autoFocus: true,
|
||||
tabIndex: 0,
|
||||
},
|
||||
'without handler',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const sameBox = requireElement(harness.stdout, 'ink-box')
|
||||
expect(sameBox).toBe(box)
|
||||
expect(sameBox._eventHandlers?.onKeyDown).toBeUndefined()
|
||||
|
||||
// Dispatch a key event and verify the removed handler is NOT called
|
||||
getInkInstance(harness.stdout).dispatchKeyboardEvent({
|
||||
kind: 'key',
|
||||
name: 'a',
|
||||
fn: false,
|
||||
ctrl: false,
|
||||
meta: false,
|
||||
shift: false,
|
||||
option: false,
|
||||
super: false,
|
||||
sequence: 'a',
|
||||
raw: 'a',
|
||||
isPasted: false,
|
||||
})
|
||||
|
||||
await Bun.sleep(50)
|
||||
expect(calls).toEqual([])
|
||||
} finally {
|
||||
await harness.dispose()
|
||||
}
|
||||
})
|
||||
|
||||
test('raw ink-box updates layout style in place across rerenders', async () => {
|
||||
const harness = await createHarness()
|
||||
|
||||
try {
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
style: { flexDirection: 'row', paddingLeft: 1 },
|
||||
},
|
||||
'styled box',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const box = requireElement(harness.stdout, 'ink-box')
|
||||
expect(box.style.flexDirection).toBe('row')
|
||||
expect(box.style.paddingLeft).toBe(1)
|
||||
|
||||
harness.root.render(
|
||||
React.createElement(
|
||||
'ink-box',
|
||||
{
|
||||
style: { flexDirection: 'column', paddingLeft: 2 },
|
||||
},
|
||||
'styled box',
|
||||
),
|
||||
)
|
||||
|
||||
await Bun.sleep(25)
|
||||
|
||||
const sameBox = requireElement(harness.stdout, 'ink-box')
|
||||
expect(sameBox).toBe(box)
|
||||
expect(sameBox.style.flexDirection).toBe('column')
|
||||
expect(sameBox.style.paddingLeft).toBe(2)
|
||||
|
||||
// Verify the update reached the layout engine, not just the style object
|
||||
const yogaNode = sameBox.yogaNode!
|
||||
expect(yogaNode).toBeDefined()
|
||||
yogaNode.calculateLayout(120)
|
||||
expect(yogaNode.getComputedPadding(LayoutEdge.Left)).toBe(2)
|
||||
} finally {
|
||||
await harness.dispose()
|
||||
}
|
||||
})
|
||||
@@ -449,25 +449,17 @@ const reconciler = createReconciler<
|
||||
},
|
||||
commitUpdate(
|
||||
node: DOMElement,
|
||||
updatePayload: UpdatePayload | null,
|
||||
_type: ElementNames,
|
||||
oldProps: Props,
|
||||
newProps: Props,
|
||||
_oldProps: Props,
|
||||
_newProps: Props,
|
||||
): void {
|
||||
// React 19 mutation mode calls commitUpdate as
|
||||
// (instance, type, oldProps, newProps, fiber) and does not pass the
|
||||
// prepareUpdate() payload here. This renderer used to treat the second
|
||||
// argument as updatePayload, which left mounted ink-* nodes with stale
|
||||
// attributes, event handlers, and textStyles until something forced a
|
||||
// remount. Recompute the prop/style diff here so host nodes update
|
||||
// correctly in place on rerender.
|
||||
const props = diff(oldProps, newProps)
|
||||
const style = diff(oldProps['style'] as Styles, newProps['style'] as Styles)
|
||||
const nextStyle = newProps['style'] as Styles | undefined
|
||||
|
||||
if (!props && !style) {
|
||||
if (!updatePayload) {
|
||||
return
|
||||
}
|
||||
|
||||
const { props, style, nextStyle } = updatePayload
|
||||
|
||||
if (props) {
|
||||
for (const [key, value] of Object.entries(props)) {
|
||||
if (key === 'style') {
|
||||
|
||||
@@ -135,13 +135,6 @@ export function setXtversionName(name: string): void {
|
||||
if (xtversionName === undefined) xtversionName = name
|
||||
}
|
||||
|
||||
export function isGhosttyTerminal(): boolean {
|
||||
if (process.env.NODE_ENV === 'test') return false
|
||||
if (process.env.TERM_PROGRAM === 'ghostty') return true
|
||||
if (process.env.TERM === 'xterm-ghostty') return true
|
||||
return xtversionName?.toLowerCase().startsWith('ghostty') ?? false
|
||||
}
|
||||
|
||||
/** True if running in an xterm.js-based terminal (VS Code, Cursor, Windsurf
|
||||
* integrated terminals). Combines TERM_PROGRAM env check (fast, sync, but
|
||||
* not forwarded over SSH) with the XTVERSION probe result (async, survives
|
||||
@@ -152,20 +145,6 @@ export function isXtermJs(): boolean {
|
||||
return xtversionName?.startsWith('xterm.js') ?? false
|
||||
}
|
||||
|
||||
/** Ghostty currently repaints main-screen prompt updates more reliably
|
||||
* without DEC 2026 synchronized output. Prefer explicit terminal identity
|
||||
* (TERM_PROGRAM/TERM or XTVERSION) in real sessions, but keep tests
|
||||
* deterministic by disabling the env-based detection under NODE_ENV=test. */
|
||||
export function shouldSkipMainScreenSyncMarkers(): boolean {
|
||||
return isGhosttyTerminal()
|
||||
}
|
||||
|
||||
/** Ghostty's main-screen prompt updates are currently more reliable when we
|
||||
* bypass the incremental diff path and rewrite the visible prompt block. */
|
||||
export function shouldUseMainScreenRewrite(): boolean {
|
||||
return isGhosttyTerminal()
|
||||
}
|
||||
|
||||
// Terminals known to correctly implement the Kitty keyboard protocol
|
||||
// (CSI >1u) and/or xterm modifyOtherKeys (CSI >4;2m) for ctrl+shift+<letter>
|
||||
// disambiguation. We previously enabled unconditionally (#23350), assuming
|
||||
|
||||
@@ -13,7 +13,6 @@ const execFileNoThrowMock = mock(
|
||||
|
||||
mock.module('../../utils/execFileNoThrow.js', () => ({
|
||||
execFileNoThrow: execFileNoThrowMock,
|
||||
execFileNoThrowWithCwd: execFileNoThrowMock,
|
||||
}))
|
||||
|
||||
mock.module('../../utils/tempfile.js', () => ({
|
||||
|
||||
@@ -617,6 +617,7 @@ export function REPL({
|
||||
const toolPermissionContext = useAppState(s => s.toolPermissionContext);
|
||||
const verbose = useAppState(s => s.verbose);
|
||||
const mcp = useAppState(s => s.mcp);
|
||||
const plugins = useAppState(s => s.plugins);
|
||||
const agentDefinitions = useAppState(s => s.agentDefinitions);
|
||||
const fileHistory = useAppState(s => s.fileHistory);
|
||||
const initialMessage = useAppState(s => s.initialMessage);
|
||||
@@ -779,7 +780,7 @@ export function REPL({
|
||||
}, [localTools, initialTools]);
|
||||
|
||||
// Initialize plugin management
|
||||
const pluginCommands = useManagePlugins({
|
||||
useManagePlugins({
|
||||
enabled: !isRemoteSession
|
||||
});
|
||||
const tasksV2 = useTasksV2WithCollapseEffect();
|
||||
@@ -825,16 +826,10 @@ export function REPL({
|
||||
}, [mainThreadAgentDefinition, mergedTools]);
|
||||
|
||||
// Merge commands from local state, plugins, and MCP
|
||||
const commandsWithPlugins = useMergedCommands(localCommands, pluginCommands as Command[]);
|
||||
const commandsWithPlugins = useMergedCommands(localCommands, plugins.commands as Command[]);
|
||||
const mergedCommands = useMergedCommands(commandsWithPlugins, mcp.commands as Command[]);
|
||||
// Keep plugin commands out of render-time command props. Feeding the full
|
||||
// execution set into PromptInput/Messages reintroduced the startup repaint
|
||||
// freeze, while transcript rendering still round-trips plugin skills via the
|
||||
// SkillTool's `skill` payload without needing plugin command objects here.
|
||||
const renderMergedCommands = useMergedCommands(localCommands, mcp.commands as Command[]);
|
||||
// Filter out all commands if disableSlashCommands is true
|
||||
const commands = useMemo(() => disableSlashCommands ? [] : mergedCommands, [disableSlashCommands, mergedCommands]);
|
||||
const renderCommands = useMemo(() => disableSlashCommands ? [] : renderMergedCommands, [disableSlashCommands, renderMergedCommands]);
|
||||
useIdeLogging(isRemoteSession ? EMPTY_MCP_CLIENTS : mcp.clients);
|
||||
useIdeSelection(isRemoteSession ? EMPTY_MCP_CLIENTS : mcp.clients, setIDESelection);
|
||||
const [streamMode, setStreamMode] = useState<SpinnerMode>('responding');
|
||||
@@ -4432,7 +4427,7 @@ export function REPL({
|
||||
// and transcript-mode are mutually exclusive (this early return), so
|
||||
// only one ScrollBox is ever mounted at a time.
|
||||
const transcriptScrollRef = isFullscreenEnvEnabled() && !disableVirtualScroll && !dumpMode ? scrollRef : undefined;
|
||||
const transcriptMessagesElement = <Messages messages={transcriptMessages} tools={tools} commands={renderCommands} verbose={true} toolJSX={null} toolUseConfirmQueue={[]} inProgressToolUseIDs={inProgressToolUseIDs} isMessageSelectorVisible={false} conversationId={conversationId} screen={screen} agentDefinitions={agentDefinitions} streamingToolUses={transcriptStreamingToolUses} showAllInTranscript={showAllInTranscript} onOpenRateLimitOptions={handleOpenRateLimitOptions} isLoading={isLoading} hidePastThinking={true} streamingThinking={streamingThinking} scrollRef={transcriptScrollRef} jumpRef={jumpRef} onSearchMatchesChange={onSearchMatchesChange} scanElement={scanElement} setPositions={setPositions} disableRenderCap={dumpMode} />;
|
||||
const transcriptMessagesElement = <Messages messages={transcriptMessages} tools={tools} commands={commands} verbose={true} toolJSX={null} toolUseConfirmQueue={[]} inProgressToolUseIDs={inProgressToolUseIDs} isMessageSelectorVisible={false} conversationId={conversationId} screen={screen} agentDefinitions={agentDefinitions} streamingToolUses={transcriptStreamingToolUses} showAllInTranscript={showAllInTranscript} onOpenRateLimitOptions={handleOpenRateLimitOptions} isLoading={isLoading} hidePastThinking={true} streamingThinking={streamingThinking} scrollRef={transcriptScrollRef} jumpRef={jumpRef} onSearchMatchesChange={onSearchMatchesChange} scanElement={scanElement} setPositions={setPositions} disableRenderCap={dumpMode} />;
|
||||
const transcriptToolJSX = toolJSX && <Box flexDirection="column" width="100%">
|
||||
{toolJSX.jsx}
|
||||
</Box>;
|
||||
@@ -4600,7 +4595,7 @@ export function REPL({
|
||||
jumpToNew(scrollRef.current);
|
||||
}} scrollable={<>
|
||||
<TeammateViewHeader />
|
||||
<Messages messages={displayedMessages} tools={tools} commands={renderCommands} verbose={verbose} toolJSX={toolJSX} toolUseConfirmQueue={toolUseConfirmQueue} inProgressToolUseIDs={viewedTeammateTask ? viewedTeammateTask.inProgressToolUseIDs ?? new Set() : inProgressToolUseIDs} isMessageSelectorVisible={isMessageSelectorVisible} conversationId={conversationId} screen={screen} streamingToolUses={streamingToolUses} showAllInTranscript={showAllInTranscript} agentDefinitions={agentDefinitions} onOpenRateLimitOptions={handleOpenRateLimitOptions} isLoading={isLoading} streamingText={isLoading && !viewedAgentTask ? visibleStreamingText : null} isBriefOnly={viewedAgentTask ? false : isBriefOnly} unseenDivider={viewedAgentTask ? undefined : unseenDivider} scrollRef={isFullscreenEnvEnabled() ? scrollRef : undefined} trackStickyPrompt={isFullscreenEnvEnabled() ? true : undefined} cursor={cursor} setCursor={setCursor} cursorNavRef={cursorNavRef} />
|
||||
<Messages messages={displayedMessages} tools={tools} commands={commands} verbose={verbose} toolJSX={toolJSX} toolUseConfirmQueue={toolUseConfirmQueue} inProgressToolUseIDs={viewedTeammateTask ? viewedTeammateTask.inProgressToolUseIDs ?? new Set() : inProgressToolUseIDs} isMessageSelectorVisible={isMessageSelectorVisible} conversationId={conversationId} screen={screen} streamingToolUses={streamingToolUses} showAllInTranscript={showAllInTranscript} agentDefinitions={agentDefinitions} onOpenRateLimitOptions={handleOpenRateLimitOptions} isLoading={isLoading} streamingText={isLoading && !viewedAgentTask ? visibleStreamingText : null} isBriefOnly={viewedAgentTask ? false : isBriefOnly} unseenDivider={viewedAgentTask ? undefined : unseenDivider} scrollRef={isFullscreenEnvEnabled() ? scrollRef : undefined} trackStickyPrompt={isFullscreenEnvEnabled() ? true : undefined} cursor={cursor} setCursor={setCursor} cursorNavRef={cursorNavRef} />
|
||||
<AwsAuthStatusBox />
|
||||
{/* Hide the processing placeholder while a modal is showing —
|
||||
it would sit at the last visible transcript row right above
|
||||
@@ -4933,7 +4928,7 @@ export function REPL({
|
||||
{"external" === 'ant' && skillImprovementSurvey.suggestion && <SkillImprovementSurvey isOpen={skillImprovementSurvey.isOpen} skillName={skillImprovementSurvey.suggestion.skillName} updates={skillImprovementSurvey.suggestion.updates} handleSelect={skillImprovementSurvey.handleSelect} inputValue={inputValue} setInputValue={setInputValue} />}
|
||||
{showIssueFlagBanner && <IssueFlagBanner />}
|
||||
{ }
|
||||
<PromptInput debug={debug} ideSelection={ideSelection} hasSuppressedDialogs={!!hasSuppressedDialogs} isLocalJSXCommandActive={isShowingLocalJSXCommand} getToolUseContext={getToolUseContext} toolPermissionContext={toolPermissionContext} setToolPermissionContext={setToolPermissionContext} apiKeyStatus={apiKeyStatus} commands={renderCommands} agents={agentDefinitions.activeAgents} isLoading={isLoading} onExit={handleExit} verbose={verbose} messages={messages} onAutoUpdaterResult={setAutoUpdaterResult} autoUpdaterResult={autoUpdaterResult} input={inputValue} onInputChange={setInputValue} mode={inputMode} onModeChange={setInputMode} stashedPrompt={stashedPrompt} setStashedPrompt={setStashedPrompt} submitCount={submitCount} onShowMessageSelector={handleShowMessageSelector} onMessageActionsEnter={
|
||||
<PromptInput debug={debug} ideSelection={ideSelection} hasSuppressedDialogs={!!hasSuppressedDialogs} isLocalJSXCommandActive={isShowingLocalJSXCommand} getToolUseContext={getToolUseContext} toolPermissionContext={toolPermissionContext} setToolPermissionContext={setToolPermissionContext} apiKeyStatus={apiKeyStatus} commands={commands} agents={agentDefinitions.activeAgents} isLoading={isLoading} onExit={handleExit} verbose={verbose} messages={messages} onAutoUpdaterResult={setAutoUpdaterResult} autoUpdaterResult={autoUpdaterResult} input={inputValue} onInputChange={setInputValue} mode={inputMode} onModeChange={setInputMode} stashedPrompt={stashedPrompt} setStashedPrompt={setStashedPrompt} submitCount={submitCount} onShowMessageSelector={handleShowMessageSelector} onMessageActionsEnter={
|
||||
// Works during isLoading — edit cancels first; uuid selection survives appends.
|
||||
feature('MESSAGE_ACTIONS') && isFullscreenEnvEnabled() && !disableMessageActions ? enterMessageActions : undefined} mcpClients={mcpClients} pastedContents={pastedContents} setPastedContents={setPastedContents} vimMode={vimMode} setVimMode={setVimMode} showBashesDialog={showBashesDialog} setShowBashesDialog={setShowBashesDialog} onSubmit={onSubmit} onAgentSubmit={onAgentSubmit} isSearchingHistory={isSearchingHistory} setIsSearchingHistory={setIsSearchingHistory} helpOpen={isHelpOpen} setHelpOpen={setIsHelpOpen} insertTextRef={feature('VOICE_MODE') ? insertTextRef : undefined} voiceInterimRange={voice.interimRange} />
|
||||
<SessionBackgroundHint onBackgroundSession={handleBackgroundSession} isLoading={isLoading} />
|
||||
|
||||
@@ -14,7 +14,6 @@ type ShimClient = {
|
||||
const originalFetch = globalThis.fetch
|
||||
const originalMacro = (globalThis as Record<string, unknown>).MACRO
|
||||
const originalEnv = {
|
||||
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
|
||||
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GEMINI_MODEL: process.env.GEMINI_MODEL,
|
||||
@@ -26,15 +25,6 @@ const originalEnv = {
|
||||
OPENAI_MODEL: process.env.OPENAI_MODEL,
|
||||
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
|
||||
ANTHROPIC_AUTH_TOKEN: process.env.ANTHROPIC_AUTH_TOKEN,
|
||||
ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS,
|
||||
}
|
||||
|
||||
function restoreEnv(key: string, value: string | undefined): void {
|
||||
if (value === undefined) {
|
||||
delete process.env[key]
|
||||
} else {
|
||||
process.env[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -45,31 +35,27 @@ beforeEach(() => {
|
||||
process.env.GEMINI_BASE_URL = 'https://gemini.example/v1beta/openai'
|
||||
process.env.GEMINI_AUTH_MODE = 'api-key'
|
||||
|
||||
delete process.env.CLAUDE_CODE_USE_OPENAI
|
||||
delete process.env.GOOGLE_API_KEY
|
||||
delete process.env.OPENAI_API_KEY
|
||||
delete process.env.OPENAI_BASE_URL
|
||||
delete process.env.OPENAI_MODEL
|
||||
delete process.env.ANTHROPIC_API_KEY
|
||||
delete process.env.ANTHROPIC_AUTH_TOKEN
|
||||
delete process.env.ANTHROPIC_CUSTOM_HEADERS
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
;(globalThis as Record<string, unknown>).MACRO = originalMacro
|
||||
restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI)
|
||||
restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI)
|
||||
restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY)
|
||||
restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL)
|
||||
restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL)
|
||||
restoreEnv('GEMINI_AUTH_MODE', originalEnv.GEMINI_AUTH_MODE)
|
||||
restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY)
|
||||
restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY)
|
||||
restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL)
|
||||
restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL)
|
||||
restoreEnv('ANTHROPIC_API_KEY', originalEnv.ANTHROPIC_API_KEY)
|
||||
restoreEnv('ANTHROPIC_AUTH_TOKEN', originalEnv.ANTHROPIC_AUTH_TOKEN)
|
||||
restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS)
|
||||
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI
|
||||
process.env.GEMINI_API_KEY = originalEnv.GEMINI_API_KEY
|
||||
process.env.GEMINI_MODEL = originalEnv.GEMINI_MODEL
|
||||
process.env.GEMINI_BASE_URL = originalEnv.GEMINI_BASE_URL
|
||||
process.env.GEMINI_AUTH_MODE = originalEnv.GEMINI_AUTH_MODE
|
||||
process.env.GOOGLE_API_KEY = originalEnv.GOOGLE_API_KEY
|
||||
process.env.OPENAI_API_KEY = originalEnv.OPENAI_API_KEY
|
||||
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
|
||||
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
|
||||
process.env.ANTHROPIC_API_KEY = originalEnv.ANTHROPIC_API_KEY
|
||||
process.env.ANTHROPIC_AUTH_TOKEN = originalEnv.ANTHROPIC_AUTH_TOKEN
|
||||
globalThis.fetch = originalFetch
|
||||
})
|
||||
|
||||
@@ -136,135 +122,3 @@ test('routes Gemini provider requests through the OpenAI-compatible shim', async
|
||||
model: 'gemini-2.0-flash',
|
||||
})
|
||||
})
|
||||
|
||||
test('strips Anthropic-specific custom headers before sending OpenAI-compatible shim requests', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
process.env.CLAUDE_CODE_USE_OPENAI = '1'
|
||||
process.env.OPENAI_API_KEY = 'openai-test-key'
|
||||
process.env.OPENAI_BASE_URL = 'http://example.test/v1'
|
||||
process.env.OPENAI_MODEL = 'gpt-4o'
|
||||
process.env.ANTHROPIC_CUSTOM_HEADERS = [
|
||||
'anthropic-version: 2023-06-01',
|
||||
'anthropic-beta: prompt-caching-2024-07-31',
|
||||
'x-anthropic-additional-protection: true',
|
||||
'x-claude-remote-session-id: remote-123',
|
||||
'x-app: cli',
|
||||
'x-safe-header: keep-me',
|
||||
].join('\n')
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
id: 'chatcmpl-openai',
|
||||
model: 'gpt-4o',
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: 'ok',
|
||||
},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: 8,
|
||||
completion_tokens: 3,
|
||||
total_tokens: 11,
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
)
|
||||
}) as FetchType
|
||||
|
||||
const client = (await getAnthropicClient({
|
||||
maxRetries: 0,
|
||||
model: 'gpt-4o',
|
||||
})) as unknown as ShimClient
|
||||
|
||||
await client.beta.messages.create({
|
||||
model: 'gpt-4o',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: false,
|
||||
})
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-app')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
expect(capturedHeaders?.get('authorization')).toBe('Bearer openai-test-key')
|
||||
})
|
||||
|
||||
test('strips Anthropic-specific custom headers on providerOverride shim requests too', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
process.env.ANTHROPIC_CUSTOM_HEADERS = [
|
||||
'anthropic-version: 2023-06-01',
|
||||
'anthropic-beta: prompt-caching-2024-07-31',
|
||||
'x-claude-remote-session-id: remote-123',
|
||||
'x-safe-header: keep-me',
|
||||
].join('\n')
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
id: 'chatcmpl-provider-override',
|
||||
model: 'gpt-4o',
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: 'ok',
|
||||
},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: 8,
|
||||
completion_tokens: 3,
|
||||
total_tokens: 11,
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
)
|
||||
}) as FetchType
|
||||
|
||||
const client = (await getAnthropicClient({
|
||||
maxRetries: 0,
|
||||
providerOverride: {
|
||||
model: 'gpt-4o',
|
||||
baseURL: 'http://example.test/v1',
|
||||
apiKey: 'provider-test-key',
|
||||
},
|
||||
})) as unknown as ShimClient
|
||||
|
||||
await client.beta.messages.create({
|
||||
model: 'unused',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: false,
|
||||
})
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-test-key')
|
||||
})
|
||||
|
||||
@@ -177,8 +177,7 @@ export async function getAnthropicClient({
|
||||
if (
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
) {
|
||||
const { createOpenAIShimClient } = await import('./openaiShim.js')
|
||||
return createOpenAIShimClient({
|
||||
|
||||
@@ -465,37 +465,6 @@ describe('Codex request translation', () => {
|
||||
])
|
||||
})
|
||||
|
||||
test('strips leaked reasoning preamble from completed Codex text responses', () => {
|
||||
const message = convertCodexResponseToAnthropicMessage(
|
||||
{
|
||||
id: 'resp_1',
|
||||
model: 'gpt-5.4',
|
||||
output: [
|
||||
{
|
||||
type: 'message',
|
||||
role: 'assistant',
|
||||
content: [
|
||||
{
|
||||
type: 'output_text',
|
||||
text:
|
||||
'The user just said "hey" - a simple greeting. I should respond briefly and friendly.\n\nHey! How can I help you today?',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
usage: { input_tokens: 12, output_tokens: 4 },
|
||||
},
|
||||
'gpt-5.4',
|
||||
)
|
||||
|
||||
expect(message.content).toEqual([
|
||||
{
|
||||
type: 'text',
|
||||
text: 'Hey! How can I help you today?',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
test('translates Codex SSE text stream into Anthropic events', async () => {
|
||||
const responseText = [
|
||||
'event: response.output_item.added',
|
||||
@@ -526,44 +495,4 @@ describe('Codex request translation', () => {
|
||||
'message_stop',
|
||||
])
|
||||
})
|
||||
|
||||
test('strips leaked reasoning preamble from Codex SSE text stream', async () => {
|
||||
const responseText = [
|
||||
'event: response.output_item.added',
|
||||
'data: {"type":"response.output_item.added","item":{"id":"msg_1","type":"message","status":"in_progress","content":[],"role":"assistant"},"output_index":0,"sequence_number":0}',
|
||||
'',
|
||||
'event: response.content_part.added',
|
||||
'data: {"type":"response.content_part.added","content_index":0,"item_id":"msg_1","output_index":0,"part":{"type":"output_text","text":""},"sequence_number":1}',
|
||||
'',
|
||||
'event: response.output_text.delta',
|
||||
'data: {"type":"response.output_text.delta","content_index":0,"delta":"The user just said \\"hey\\" - a simple greeting. I should respond briefly and friendly.\\n\\nHey! How can I help you today?","item_id":"msg_1","output_index":0,"sequence_number":2}',
|
||||
'',
|
||||
'event: response.output_item.done',
|
||||
'data: {"type":"response.output_item.done","item":{"id":"msg_1","type":"message","status":"completed","content":[{"type":"output_text","text":"The user just said \\"hey\\" - a simple greeting. I should respond briefly and friendly.\\n\\nHey! How can I help you today?"}],"role":"assistant"},"output_index":0,"sequence_number":3}',
|
||||
'',
|
||||
'event: response.completed',
|
||||
'data: {"type":"response.completed","response":{"id":"resp_1","status":"completed","model":"gpt-5.4","output":[{"type":"message","role":"assistant","content":[{"type":"output_text","text":"The user just said \\"hey\\" - a simple greeting. I should respond briefly and friendly.\\n\\nHey! How can I help you today?"}]}],"usage":{"input_tokens":2,"output_tokens":1}},"sequence_number":4}',
|
||||
'',
|
||||
].join('\n')
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(responseText))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const textDeltas: string[] = []
|
||||
for await (const event of codexStreamToAnthropic(
|
||||
new Response(stream),
|
||||
'gpt-5.4',
|
||||
)) {
|
||||
const delta = (event as { delta?: { type?: string; text?: string } }).delta
|
||||
if (delta?.type === 'text_delta' && typeof delta.text === 'string') {
|
||||
textDeltas.push(delta.text)
|
||||
}
|
||||
}
|
||||
|
||||
expect(textDeltas).toEqual(['Hey! How can I help you today?'])
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,11 +4,6 @@ import type {
|
||||
ResolvedProviderRequest,
|
||||
} from './providerConfig.js'
|
||||
import { sanitizeSchemaForOpenAICompat } from './openaiSchemaSanitizer.js'
|
||||
import {
|
||||
looksLikeLeakedReasoningPrefix,
|
||||
shouldBufferPotentialReasoningPrefix,
|
||||
stripLeakedReasoningPreamble,
|
||||
} from './reasoningLeakSanitizer.js'
|
||||
|
||||
export interface AnthropicUsage {
|
||||
input_tokens: number
|
||||
@@ -80,17 +75,12 @@ type CodexSseEvent = {
|
||||
function makeUsage(usage?: {
|
||||
input_tokens?: number
|
||||
output_tokens?: number
|
||||
input_tokens_details?: { cached_tokens?: number }
|
||||
prompt_tokens_details?: { cached_tokens?: number }
|
||||
}): AnthropicUsage {
|
||||
return {
|
||||
input_tokens: usage?.input_tokens ?? 0,
|
||||
output_tokens: usage?.output_tokens ?? 0,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens:
|
||||
usage?.input_tokens_details?.cached_tokens ??
|
||||
usage?.prompt_tokens_details?.cached_tokens ??
|
||||
0,
|
||||
cache_read_input_tokens: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -688,34 +678,17 @@ export async function* codexStreamToAnthropic(
|
||||
{ index: number; toolUseId: string }
|
||||
>()
|
||||
let activeTextBlockIndex: number | null = null
|
||||
let activeTextBuffer = ''
|
||||
let textBufferMode: 'none' | 'pending' | 'strip' = 'none'
|
||||
let nextContentBlockIndex = 0
|
||||
let sawToolUse = false
|
||||
let finalResponse: Record<string, any> | undefined
|
||||
|
||||
const closeActiveTextBlock = async function* () {
|
||||
if (activeTextBlockIndex === null) return
|
||||
if (textBufferMode !== 'none') {
|
||||
const sanitized = stripLeakedReasoningPreamble(activeTextBuffer)
|
||||
if (sanitized) {
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: activeTextBlockIndex,
|
||||
delta: {
|
||||
type: 'text_delta',
|
||||
text: sanitized,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
yield {
|
||||
type: 'content_block_stop',
|
||||
index: activeTextBlockIndex,
|
||||
}
|
||||
activeTextBlockIndex = null
|
||||
activeTextBuffer = ''
|
||||
textBufferMode = 'none'
|
||||
}
|
||||
|
||||
const startTextBlockIfNeeded = async function* () {
|
||||
@@ -791,36 +764,7 @@ export async function* codexStreamToAnthropic(
|
||||
|
||||
if (event.event === 'response.output_text.delta') {
|
||||
yield* startTextBlockIfNeeded()
|
||||
activeTextBuffer += payload.delta ?? ''
|
||||
if (activeTextBlockIndex !== null) {
|
||||
if (
|
||||
textBufferMode === 'strip' ||
|
||||
looksLikeLeakedReasoningPrefix(activeTextBuffer)
|
||||
) {
|
||||
textBufferMode = 'strip'
|
||||
continue
|
||||
}
|
||||
|
||||
if (textBufferMode === 'pending') {
|
||||
if (shouldBufferPotentialReasoningPrefix(activeTextBuffer)) {
|
||||
continue
|
||||
}
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: activeTextBlockIndex,
|
||||
delta: {
|
||||
type: 'text_delta',
|
||||
text: activeTextBuffer,
|
||||
},
|
||||
}
|
||||
textBufferMode = 'none'
|
||||
continue
|
||||
}
|
||||
|
||||
if (shouldBufferPotentialReasoningPrefix(activeTextBuffer)) {
|
||||
textBufferMode = 'pending'
|
||||
continue
|
||||
}
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: activeTextBlockIndex,
|
||||
@@ -895,16 +839,8 @@ export async function* codexStreamToAnthropic(
|
||||
stop_sequence: null,
|
||||
},
|
||||
usage: {
|
||||
// Subtract cached tokens: OpenAI includes them in input_tokens,
|
||||
// but Anthropic convention treats input_tokens as non-cached only.
|
||||
input_tokens: (finalResponse?.usage?.input_tokens ?? 0) -
|
||||
(finalResponse?.usage?.input_tokens_details?.cached_tokens ??
|
||||
finalResponse?.usage?.prompt_tokens_details?.cached_tokens ?? 0),
|
||||
input_tokens: finalResponse?.usage?.input_tokens ?? 0,
|
||||
output_tokens: finalResponse?.usage?.output_tokens ?? 0,
|
||||
cache_read_input_tokens:
|
||||
finalResponse?.usage?.input_tokens_details?.cached_tokens ??
|
||||
finalResponse?.usage?.prompt_tokens_details?.cached_tokens ??
|
||||
0,
|
||||
},
|
||||
}
|
||||
yield { type: 'message_stop' }
|
||||
@@ -923,7 +859,7 @@ export function convertCodexResponseToAnthropicMessage(
|
||||
if (part?.type === 'output_text') {
|
||||
content.push({
|
||||
type: 'text',
|
||||
text: stripLeakedReasoningPreamble(part.text ?? ''),
|
||||
text: part.text ?? '',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,6 @@ const originalEnv = {
|
||||
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
OPENAI_MODEL: process.env.OPENAI_MODEL,
|
||||
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
|
||||
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
|
||||
GH_TOKEN: process.env.GH_TOKEN,
|
||||
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
|
||||
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_API_KEY: process.env.GOOGLE_API_KEY,
|
||||
@@ -19,7 +15,6 @@ const originalEnv = {
|
||||
GEMINI_BASE_URL: process.env.GEMINI_BASE_URL,
|
||||
GEMINI_MODEL: process.env.GEMINI_MODEL,
|
||||
GOOGLE_CLOUD_PROJECT: process.env.GOOGLE_CLOUD_PROJECT,
|
||||
ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS,
|
||||
}
|
||||
|
||||
const originalFetch = globalThis.fetch
|
||||
@@ -75,10 +70,6 @@ beforeEach(() => {
|
||||
process.env.OPENAI_BASE_URL = 'http://example.test/v1'
|
||||
process.env.OPENAI_API_KEY = 'test-key'
|
||||
delete process.env.OPENAI_MODEL
|
||||
delete process.env.CLAUDE_CODE_USE_GITHUB
|
||||
delete process.env.GITHUB_TOKEN
|
||||
delete process.env.GH_TOKEN
|
||||
delete process.env.CLAUDE_CODE_USE_OPENAI
|
||||
delete process.env.CLAUDE_CODE_USE_GEMINI
|
||||
delete process.env.GEMINI_API_KEY
|
||||
delete process.env.GOOGLE_API_KEY
|
||||
@@ -87,17 +78,12 @@ beforeEach(() => {
|
||||
delete process.env.GEMINI_BASE_URL
|
||||
delete process.env.GEMINI_MODEL
|
||||
delete process.env.GOOGLE_CLOUD_PROJECT
|
||||
delete process.env.ANTHROPIC_CUSTOM_HEADERS
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL)
|
||||
restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY)
|
||||
restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL)
|
||||
restoreEnv('CLAUDE_CODE_USE_GITHUB', originalEnv.CLAUDE_CODE_USE_GITHUB)
|
||||
restoreEnv('GITHUB_TOKEN', originalEnv.GITHUB_TOKEN)
|
||||
restoreEnv('GH_TOKEN', originalEnv.GH_TOKEN)
|
||||
restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI)
|
||||
restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI)
|
||||
restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY)
|
||||
restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY)
|
||||
@@ -106,227 +92,9 @@ afterEach(() => {
|
||||
restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL)
|
||||
restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL)
|
||||
restoreEnv('GOOGLE_CLOUD_PROJECT', originalEnv.GOOGLE_CLOUD_PROJECT)
|
||||
restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS)
|
||||
globalThis.fetch = originalFetch
|
||||
})
|
||||
|
||||
test('strips canonical Anthropic headers from direct shim defaultHeaders', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
id: 'chatcmpl-1',
|
||||
model: 'gpt-4o',
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: 'ok',
|
||||
},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: 8,
|
||||
completion_tokens: 3,
|
||||
total_tokens: 11,
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
)
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({
|
||||
defaultHeaders: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
'x-anthropic-additional-protection': 'true',
|
||||
'x-claude-remote-session-id': 'remote-123',
|
||||
'x-app': 'cli',
|
||||
'x-client-app': 'sdk',
|
||||
'x-safe-header': 'keep-me',
|
||||
},
|
||||
}) as OpenAIShimClient
|
||||
|
||||
await client.beta.messages.create({
|
||||
model: 'gpt-4o',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: false,
|
||||
})
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-app')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-client-app')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
})
|
||||
|
||||
test('strips canonical Anthropic headers from per-request shim headers too', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
id: 'chatcmpl-1',
|
||||
model: 'gpt-4o',
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: 'ok',
|
||||
},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: 8,
|
||||
completion_tokens: 3,
|
||||
total_tokens: 11,
|
||||
},
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
)
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
||||
|
||||
await client.beta.messages.create(
|
||||
{
|
||||
model: 'gpt-4o',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: false,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
'x-safe-header': 'keep-me',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
})
|
||||
|
||||
test('strips Anthropic-specific headers on GitHub Codex transport requests', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
process.env.CLAUDE_CODE_USE_GITHUB = '1'
|
||||
process.env.OPENAI_API_KEY = 'github-test-key'
|
||||
delete process.env.OPENAI_BASE_URL
|
||||
delete process.env.OPENAI_MODEL
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response('', {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
},
|
||||
})
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
||||
|
||||
await client.beta.messages.create(
|
||||
{
|
||||
model: 'github:gpt-5-codex',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: true,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': 'prompt-caching-2024-07-31',
|
||||
'x-anthropic-additional-protection': 'true',
|
||||
'x-safe-header': 'keep-me',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
expect(capturedHeaders?.get('authorization')).toBe('Bearer github-test-key')
|
||||
expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7')
|
||||
})
|
||||
|
||||
test('strips Anthropic-specific headers on GitHub Codex transport with providerOverride API key', async () => {
|
||||
let capturedHeaders: Headers | undefined
|
||||
|
||||
process.env.CLAUDE_CODE_USE_GITHUB = '1'
|
||||
process.env.OPENAI_API_KEY = 'env-should-not-win'
|
||||
delete process.env.OPENAI_BASE_URL
|
||||
delete process.env.OPENAI_MODEL
|
||||
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
capturedHeaders = new Headers(init?.headers)
|
||||
|
||||
return new Response('', {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
},
|
||||
})
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({
|
||||
providerOverride: {
|
||||
model: 'github:gpt-5-codex',
|
||||
baseURL: 'https://api.githubcopilot.com',
|
||||
apiKey: 'provider-override-key',
|
||||
},
|
||||
}) as OpenAIShimClient
|
||||
|
||||
await client.beta.messages.create(
|
||||
{
|
||||
model: 'ignored',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hello' }],
|
||||
max_tokens: 64,
|
||||
stream: true,
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
'x-claude-remote-session-id': 'remote-123',
|
||||
'x-safe-header': 'keep-me',
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
|
||||
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
|
||||
expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-override-key')
|
||||
expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7')
|
||||
})
|
||||
|
||||
test('preserves usage from final OpenAI stream chunk with empty choices', async () => {
|
||||
globalThis.fetch = (async (_input, init) => {
|
||||
const url = typeof _input === 'string' ? _input : _input.url
|
||||
@@ -2178,7 +1946,7 @@ test('coalesces consecutive assistant messages preserving tool_calls (issue #202
|
||||
expect(assistantMsgs?.[0]?.tool_calls?.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test('non-streaming: reasoning_content emitted as thinking block only when content is null', async () => {
|
||||
test('non-streaming: reasoning_content emitted as thinking block, used as text when content is null', async () => {
|
||||
globalThis.fetch = (async (_input, _init) => {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
@@ -2220,6 +1988,7 @@ test('non-streaming: reasoning_content emitted as thinking block only when conte
|
||||
|
||||
expect(result.content).toEqual([
|
||||
{ type: 'thinking', thinking: 'Let me think about this step by step.' },
|
||||
{ type: 'text', text: 'Let me think about this step by step.' },
|
||||
])
|
||||
})
|
||||
|
||||
@@ -2265,6 +2034,7 @@ test('non-streaming: empty string content does not fall through to reasoning_con
|
||||
|
||||
expect(result.content).toEqual([
|
||||
{ type: 'thinking', thinking: 'Chain of thought here.' },
|
||||
{ type: 'text', text: 'Chain of thought here.' },
|
||||
])
|
||||
})
|
||||
|
||||
@@ -2314,46 +2084,6 @@ test('non-streaming: real content takes precedence over reasoning_content', asyn
|
||||
])
|
||||
})
|
||||
|
||||
test('non-streaming: strips leaked reasoning preamble from assistant content', async () => {
|
||||
globalThis.fetch = (async () => {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
id: 'chatcmpl-1',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content:
|
||||
'The user just said "hey" - a simple greeting. I should respond briefly and friendly.\n\nHey! How can I help you today?',
|
||||
},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: 10,
|
||||
completion_tokens: 20,
|
||||
total_tokens: 30,
|
||||
},
|
||||
}),
|
||||
{ headers: { 'Content-Type': 'application/json' } },
|
||||
)
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
||||
const result = (await client.beta.messages.create({
|
||||
model: 'gpt-5-mini',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hey' }],
|
||||
max_tokens: 64,
|
||||
stream: false,
|
||||
})) as { content: Array<Record<string, unknown>> }
|
||||
|
||||
expect(result.content).toEqual([
|
||||
{ type: 'text', text: 'Hey! How can I help you today?' },
|
||||
])
|
||||
})
|
||||
|
||||
test('streaming: thinking block closed before tool call', async () => {
|
||||
globalThis.fetch = (async (_input, _init) => {
|
||||
const chunks = makeStreamChunks([
|
||||
@@ -2445,134 +2175,3 @@ test('streaming: thinking block closed before tool call', async () => {
|
||||
}
|
||||
expect(thinkingStart?.content_block?.type).toBe('thinking')
|
||||
})
|
||||
|
||||
test('streaming: strips leaked reasoning preamble from assistant content deltas', async () => {
|
||||
globalThis.fetch = (async () => {
|
||||
const chunks = makeStreamChunks([
|
||||
{
|
||||
id: 'chatcmpl-1',
|
||||
object: 'chat.completion.chunk',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
role: 'assistant',
|
||||
content:
|
||||
'The user just said "hey" - a simple greeting. I should respond briefly and friendly.\n\nHey! How can I help you today?',
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'chatcmpl-1',
|
||||
object: 'chat.completion.chunk',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
|
||||
return makeSseResponse(chunks)
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
||||
const result = await client.beta.messages
|
||||
.create({
|
||||
model: 'gpt-5-mini',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hey' }],
|
||||
max_tokens: 64,
|
||||
stream: true,
|
||||
})
|
||||
.withResponse()
|
||||
|
||||
const textDeltas: string[] = []
|
||||
for await (const event of result.data) {
|
||||
const delta = (event as { delta?: { type?: string; text?: string } }).delta
|
||||
if (delta?.type === 'text_delta' && typeof delta.text === 'string') {
|
||||
textDeltas.push(delta.text)
|
||||
}
|
||||
}
|
||||
|
||||
expect(textDeltas).toEqual(['Hey! How can I help you today?'])
|
||||
})
|
||||
|
||||
test('streaming: strips leaked reasoning preamble when split across multiple content chunks', async () => {
|
||||
globalThis.fetch = (async () => {
|
||||
const chunks = makeStreamChunks([
|
||||
{
|
||||
id: 'chatcmpl-1',
|
||||
object: 'chat.completion.chunk',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
role: 'assistant',
|
||||
content: 'The user said "hey" - this is a simple greeting. ',
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'chatcmpl-1',
|
||||
object: 'chat.completion.chunk',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
content:
|
||||
'I should respond in a friendly, concise way.\n\nHey! How can I help you today?',
|
||||
},
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'chatcmpl-1',
|
||||
object: 'chat.completion.chunk',
|
||||
model: 'gpt-5-mini',
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {},
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
|
||||
return makeSseResponse(chunks)
|
||||
}) as FetchType
|
||||
|
||||
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
||||
|
||||
const result = await client.beta.messages
|
||||
.create({
|
||||
model: 'gpt-5-mini',
|
||||
system: 'test system',
|
||||
messages: [{ role: 'user', content: 'hey' }],
|
||||
max_tokens: 64,
|
||||
stream: true,
|
||||
})
|
||||
.withResponse()
|
||||
|
||||
const textDeltas: string[] = []
|
||||
for await (const event of result.data) {
|
||||
const delta = (event as { delta?: { type?: string; text?: string } }).delta
|
||||
if (delta?.type === 'text_delta' && typeof delta.text === 'string') {
|
||||
textDeltas.push(delta.text)
|
||||
}
|
||||
}
|
||||
|
||||
expect(textDeltas).toEqual(['Hey! How can I help you today?'])
|
||||
})
|
||||
|
||||
@@ -26,11 +26,6 @@ import { isEnvTruthy } from '../../utils/envUtils.js'
|
||||
import { resolveGeminiCredential } from '../../utils/geminiAuth.js'
|
||||
import { hydrateGeminiAccessTokenFromSecureStorage } from '../../utils/geminiCredentials.js'
|
||||
import { hydrateGithubModelsTokenFromSecureStorage } from '../../utils/githubModelsCredentials.js'
|
||||
import {
|
||||
looksLikeLeakedReasoningPrefix,
|
||||
shouldBufferPotentialReasoningPrefix,
|
||||
stripLeakedReasoningPreamble,
|
||||
} from './reasoningLeakSanitizer.js'
|
||||
import {
|
||||
codexStreamToAnthropic,
|
||||
collectCodexCompletedResponse,
|
||||
@@ -61,7 +56,6 @@ type SecretValueSource = Partial<{
|
||||
GEMINI_API_KEY: string
|
||||
GOOGLE_API_KEY: string
|
||||
GEMINI_ACCESS_TOKEN: string
|
||||
MISTRAL_API_KEY: string
|
||||
}>
|
||||
|
||||
const GITHUB_COPILOT_BASE = 'https://api.githubcopilot.com'
|
||||
@@ -81,36 +75,6 @@ function isGithubModelsMode(): boolean {
|
||||
return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
}
|
||||
|
||||
function isMistralMode(): boolean {
|
||||
return isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
}
|
||||
|
||||
function filterAnthropicHeaders(
|
||||
headers: Record<string, string> | undefined,
|
||||
): Record<string, string> {
|
||||
if (!headers) return {}
|
||||
|
||||
const filtered: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
const lower = key.toLowerCase()
|
||||
if (
|
||||
lower.startsWith('x-anthropic') ||
|
||||
lower.startsWith('anthropic-') ||
|
||||
lower.startsWith('x-claude') ||
|
||||
lower === 'x-app' ||
|
||||
lower === 'x-client-app' ||
|
||||
lower === 'authorization' ||
|
||||
lower === 'x-api-key' ||
|
||||
lower === 'api-key'
|
||||
) {
|
||||
continue
|
||||
}
|
||||
filtered[key] = value
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
function hasGeminiApiHost(baseUrl: string | undefined): boolean {
|
||||
if (!baseUrl) return false
|
||||
|
||||
@@ -569,14 +533,11 @@ function convertChunkUsage(
|
||||
): Partial<AnthropicUsage> | undefined {
|
||||
if (!usage) return undefined
|
||||
|
||||
const cached = usage.prompt_tokens_details?.cached_tokens ?? 0
|
||||
return {
|
||||
// Subtract cached tokens: OpenAI includes them in prompt_tokens,
|
||||
// but Anthropic convention treats input_tokens as non-cached only.
|
||||
input_tokens: (usage.prompt_tokens ?? 0) - cached,
|
||||
input_tokens: usage.prompt_tokens ?? 0,
|
||||
output_tokens: usage.completion_tokens ?? 0,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: cached,
|
||||
cache_read_input_tokens: usage.prompt_tokens_details?.cached_tokens ?? 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -627,8 +588,6 @@ async function* openaiStreamToAnthropic(
|
||||
let hasEmittedContentStart = false
|
||||
let hasEmittedThinkingStart = false
|
||||
let hasClosedThinking = false
|
||||
let activeTextBuffer = ''
|
||||
let textBufferMode: 'none' | 'pending' | 'strip' = 'none'
|
||||
let lastStopReason: 'tool_use' | 'max_tokens' | 'end_turn' | null = null
|
||||
let hasEmittedFinalUsage = false
|
||||
let hasProcessedFinishReason = false
|
||||
@@ -659,30 +618,6 @@ async function* openaiStreamToAnthropic(
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
const closeActiveContentBlock = async function* () {
|
||||
if (!hasEmittedContentStart) return
|
||||
|
||||
if (textBufferMode !== 'none') {
|
||||
const sanitized = stripLeakedReasoningPreamble(activeTextBuffer)
|
||||
if (sanitized) {
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: contentBlockIndex,
|
||||
delta: { type: 'text_delta', text: sanitized },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
yield {
|
||||
type: 'content_block_stop',
|
||||
index: contentBlockIndex,
|
||||
}
|
||||
contentBlockIndex++
|
||||
hasEmittedContentStart = false
|
||||
activeTextBuffer = ''
|
||||
textBufferMode = 'none'
|
||||
}
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
@@ -737,7 +672,6 @@ async function* openaiStreamToAnthropic(
|
||||
contentBlockIndex++
|
||||
hasClosedThinking = true
|
||||
}
|
||||
activeTextBuffer += delta.content
|
||||
if (!hasEmittedContentStart) {
|
||||
yield {
|
||||
type: 'content_block_start',
|
||||
@@ -746,35 +680,6 @@ async function* openaiStreamToAnthropic(
|
||||
}
|
||||
hasEmittedContentStart = true
|
||||
}
|
||||
|
||||
if (
|
||||
textBufferMode === 'strip' ||
|
||||
looksLikeLeakedReasoningPrefix(activeTextBuffer)
|
||||
) {
|
||||
textBufferMode = 'strip'
|
||||
continue
|
||||
}
|
||||
|
||||
if (textBufferMode === 'pending') {
|
||||
if (shouldBufferPotentialReasoningPrefix(activeTextBuffer)) {
|
||||
continue
|
||||
}
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: contentBlockIndex,
|
||||
delta: {
|
||||
type: 'text_delta',
|
||||
text: activeTextBuffer,
|
||||
},
|
||||
}
|
||||
textBufferMode = 'none'
|
||||
continue
|
||||
}
|
||||
|
||||
if (shouldBufferPotentialReasoningPrefix(activeTextBuffer)) {
|
||||
textBufferMode = 'pending'
|
||||
continue
|
||||
}
|
||||
yield {
|
||||
type: 'content_block_delta',
|
||||
index: contentBlockIndex,
|
||||
@@ -793,7 +698,12 @@ async function* openaiStreamToAnthropic(
|
||||
hasClosedThinking = true
|
||||
}
|
||||
if (hasEmittedContentStart) {
|
||||
yield* closeActiveContentBlock()
|
||||
yield {
|
||||
type: 'content_block_stop',
|
||||
index: contentBlockIndex,
|
||||
}
|
||||
contentBlockIndex++
|
||||
hasEmittedContentStart = false
|
||||
}
|
||||
|
||||
const toolBlockIndex = contentBlockIndex
|
||||
@@ -876,7 +786,10 @@ async function* openaiStreamToAnthropic(
|
||||
}
|
||||
// Close any open content blocks
|
||||
if (hasEmittedContentStart) {
|
||||
yield* closeActiveContentBlock()
|
||||
yield {
|
||||
type: 'content_block_stop',
|
||||
index: contentBlockIndex,
|
||||
}
|
||||
}
|
||||
// Close active tool calls
|
||||
for (const [, tc] of activeToolCalls) {
|
||||
@@ -1023,7 +936,7 @@ class OpenAIShimMessages {
|
||||
private providerOverride?: { model: string; baseURL: string; apiKey: string }
|
||||
|
||||
constructor(defaultHeaders: Record<string, string>, reasoningEffort?: 'low' | 'medium' | 'high' | 'xhigh', providerOverride?: { model: string; baseURL: string; apiKey: string }) {
|
||||
this.defaultHeaders = filterAnthropicHeaders(defaultHeaders)
|
||||
this.defaultHeaders = defaultHeaders
|
||||
this.reasoningEffort = reasoningEffort
|
||||
this.providerOverride = providerOverride
|
||||
}
|
||||
@@ -1133,7 +1046,7 @@ class OpenAIShimMessages {
|
||||
params,
|
||||
defaultHeaders: {
|
||||
...this.defaultHeaders,
|
||||
...filterAnthropicHeaders(options?.headers),
|
||||
...(options?.headers ?? {}),
|
||||
...COPILOT_HEADERS,
|
||||
},
|
||||
signal: options?.signal,
|
||||
@@ -1165,7 +1078,7 @@ class OpenAIShimMessages {
|
||||
params,
|
||||
defaultHeaders: {
|
||||
...this.defaultHeaders,
|
||||
...filterAnthropicHeaders(options?.headers),
|
||||
...(options?.headers ?? {}),
|
||||
},
|
||||
signal: options?.signal,
|
||||
})
|
||||
@@ -1192,7 +1105,6 @@ class OpenAIShimMessages {
|
||||
model: request.resolvedModel,
|
||||
messages: openaiMessages,
|
||||
stream: params.stream ?? false,
|
||||
store: false,
|
||||
}
|
||||
// Convert max_tokens to max_completion_tokens for OpenAI API compatibility.
|
||||
// Azure OpenAI requires max_completion_tokens and does not accept max_tokens.
|
||||
@@ -1215,22 +1127,15 @@ class OpenAIShimMessages {
|
||||
}
|
||||
|
||||
const isGithub = isGithubModelsMode()
|
||||
const isMistral = isMistralMode()
|
||||
|
||||
const githubEndpointType = getGithubEndpointType(request.baseUrl)
|
||||
const isGithubCopilot = isGithub && githubEndpointType === 'copilot'
|
||||
const isGithubModels = isGithub && (githubEndpointType === 'models' || githubEndpointType === 'custom')
|
||||
|
||||
if ((isGithub || isMistral) && body.max_completion_tokens !== undefined) {
|
||||
if (isGithub && body.max_completion_tokens !== undefined) {
|
||||
body.max_tokens = body.max_completion_tokens
|
||||
delete body.max_completion_tokens
|
||||
}
|
||||
|
||||
// mistral also doesn't recognize body.store
|
||||
if (isMistral) {
|
||||
delete body.store
|
||||
}
|
||||
|
||||
if (params.temperature !== undefined) body.temperature = params.temperature
|
||||
if (params.top_p !== undefined) body.top_p = params.top_p
|
||||
|
||||
@@ -1265,11 +1170,12 @@ class OpenAIShimMessages {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
...this.defaultHeaders,
|
||||
...filterAnthropicHeaders(options?.headers),
|
||||
...(options?.headers ?? {}),
|
||||
}
|
||||
|
||||
const isGemini = isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
const apiKey = this.providerOverride?.apiKey ?? process.env.OPENAI_API_KEY ?? ''
|
||||
const isGemini = isGeminiMode()
|
||||
const apiKey =
|
||||
this.providerOverride?.apiKey ?? process.env.OPENAI_API_KEY ?? ''
|
||||
// Detect Azure endpoints by hostname (not raw URL) to prevent bypass via
|
||||
// path segments like https://evil.com/cognitiveservices.azure.com/
|
||||
let isAzure = false
|
||||
@@ -1373,7 +1279,6 @@ class OpenAIShimMessages {
|
||||
}>,
|
||||
),
|
||||
stream: params.stream ?? false,
|
||||
store: false,
|
||||
}
|
||||
|
||||
if (!Array.isArray(responsesBody.input) || responsesBody.input.length === 0) {
|
||||
@@ -1478,9 +1383,9 @@ class OpenAIShimMessages {
|
||||
const choice = data.choices?.[0]
|
||||
const content: Array<Record<string, unknown>> = []
|
||||
|
||||
// Some reasoning models (e.g. GLM-5) put their chain-of-thought in
|
||||
// reasoning_content while content stays null. Preserve it as a thinking
|
||||
// block, but do not surface it as visible assistant text.
|
||||
// Some reasoning models (e.g. GLM-5) put their reply in reasoning_content
|
||||
// while content stays null — emit reasoning as a thinking block, then
|
||||
// fall back to it for visible text if content is empty.
|
||||
const reasoningText = choice?.message?.reasoning_content
|
||||
if (typeof reasoningText === 'string' && reasoningText) {
|
||||
content.push({ type: 'thinking', thinking: reasoningText })
|
||||
@@ -1488,12 +1393,9 @@ class OpenAIShimMessages {
|
||||
const rawContent =
|
||||
choice?.message?.content !== '' && choice?.message?.content != null
|
||||
? choice?.message?.content
|
||||
: null
|
||||
: choice?.message?.reasoning_content
|
||||
if (typeof rawContent === 'string' && rawContent) {
|
||||
content.push({
|
||||
type: 'text',
|
||||
text: stripLeakedReasoningPreamble(rawContent),
|
||||
})
|
||||
content.push({ type: 'text', text: rawContent })
|
||||
} else if (Array.isArray(rawContent) && rawContent.length > 0) {
|
||||
const parts: string[] = []
|
||||
for (const part of rawContent) {
|
||||
@@ -1508,10 +1410,7 @@ class OpenAIShimMessages {
|
||||
}
|
||||
const joined = parts.join('\n')
|
||||
if (joined) {
|
||||
content.push({
|
||||
type: 'text',
|
||||
text: stripLeakedReasoningPreamble(joined),
|
||||
})
|
||||
content.push({ type: 'text', text: joined })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1601,13 +1500,6 @@ export function createOpenAIShimClient(options: {
|
||||
if (process.env.GEMINI_MODEL && !process.env.OPENAI_MODEL) {
|
||||
process.env.OPENAI_MODEL = process.env.GEMINI_MODEL
|
||||
}
|
||||
} else if (isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
|
||||
process.env.OPENAI_BASE_URL =
|
||||
process.env.MISTRAL_BASE_URL ?? 'https://api.mistral.ai/v1'
|
||||
process.env.OPENAI_API_KEY = process.env.MISTRAL_API_KEY
|
||||
if (process.env.MISTRAL_MODEL) {
|
||||
process.env.OPENAI_MODEL = process.env.MISTRAL_MODEL
|
||||
}
|
||||
} else if (isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)) {
|
||||
process.env.OPENAI_BASE_URL ??= GITHUB_COPILOT_BASE
|
||||
process.env.OPENAI_API_KEY ??=
|
||||
|
||||
@@ -7,7 +7,6 @@ import { isEnvTruthy } from '../../utils/envUtils.js'
|
||||
|
||||
export const DEFAULT_OPENAI_BASE_URL = 'https://api.openai.com/v1'
|
||||
export const DEFAULT_CODEX_BASE_URL = 'https://chatgpt.com/backend-api/codex'
|
||||
export const DEFAULT_MISTRAL_BASE_URL = 'https://api.mistral.ai/v1'
|
||||
/** Default GitHub Copilot API model when user selects copilot / github:copilot */
|
||||
export const DEFAULT_GITHUB_MODELS_API_MODEL = 'gpt-4o'
|
||||
|
||||
@@ -358,20 +357,15 @@ export function resolveProviderRequest(options?: {
|
||||
reasoningEffortOverride?: ReasoningEffort
|
||||
}): ResolvedProviderRequest {
|
||||
const isGithubMode = isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
const isMistralMode = isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
const requestedModel =
|
||||
options?.model?.trim() ||
|
||||
(isMistralMode
|
||||
? process.env.MISTRAL_MODEL?.trim()
|
||||
: process.env.OPENAI_MODEL?.trim()) ||
|
||||
process.env.OPENAI_MODEL?.trim() ||
|
||||
options?.fallbackModel?.trim() ||
|
||||
(isGithubMode ? 'github:copilot' : 'gpt-4o')
|
||||
const descriptor = parseModelDescriptor(requestedModel)
|
||||
const rawBaseUrl =
|
||||
asEnvUrl(options?.baseUrl) ??
|
||||
asEnvUrl(
|
||||
isMistralMode ? (process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL) : process.env.OPENAI_BASE_URL,
|
||||
) ??
|
||||
asEnvUrl(process.env.OPENAI_BASE_URL) ??
|
||||
asEnvUrl(process.env.OPENAI_API_BASE)
|
||||
|
||||
const githubEndpointType = isGithubMode
|
||||
@@ -424,7 +418,6 @@ export function resolveProviderRequest(options?: {
|
||||
export function getAdditionalModelOptionsCacheScope(): string | null {
|
||||
if (!isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
|
||||
if (!isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) &&
|
||||
!isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) &&
|
||||
!isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) &&
|
||||
!isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK) &&
|
||||
!isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX) &&
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
|
||||
import {
|
||||
looksLikeLeakedReasoningPrefix,
|
||||
shouldBufferPotentialReasoningPrefix,
|
||||
stripLeakedReasoningPreamble,
|
||||
} from './reasoningLeakSanitizer.ts'
|
||||
|
||||
describe('reasoning leak sanitizer', () => {
|
||||
test('strips explicit internal reasoning preambles', () => {
|
||||
const text =
|
||||
'The user just said "hey" - a simple greeting. I should respond briefly and friendly.\n\nHey! How can I help you today?'
|
||||
|
||||
expect(looksLikeLeakedReasoningPrefix(text)).toBe(true)
|
||||
expect(stripLeakedReasoningPreamble(text)).toBe(
|
||||
'Hey! How can I help you today?',
|
||||
)
|
||||
})
|
||||
|
||||
test('does not strip normal user-facing advice that mentions "the user should"', () => {
|
||||
const text =
|
||||
'The user should reset their password immediately.\n\nHere are the steps...'
|
||||
|
||||
expect(looksLikeLeakedReasoningPrefix(text)).toBe(false)
|
||||
expect(shouldBufferPotentialReasoningPrefix(text)).toBe(false)
|
||||
expect(stripLeakedReasoningPreamble(text)).toBe(text)
|
||||
})
|
||||
|
||||
test('does not strip legitimate first-person advice about responding to an incident', () => {
|
||||
const text =
|
||||
'I need to respond to this security incident immediately. The system is compromised.\n\nHere are the remediation steps...'
|
||||
|
||||
expect(looksLikeLeakedReasoningPrefix(text)).toBe(false)
|
||||
expect(shouldBufferPotentialReasoningPrefix(text)).toBe(false)
|
||||
expect(stripLeakedReasoningPreamble(text)).toBe(text)
|
||||
})
|
||||
|
||||
test('does not strip legitimate first-person advice about answering a support ticket', () => {
|
||||
const text =
|
||||
'I need to answer the support ticket before end of day. The customer is waiting.\n\nHere is the response I drafted...'
|
||||
|
||||
expect(looksLikeLeakedReasoningPrefix(text)).toBe(false)
|
||||
expect(shouldBufferPotentialReasoningPrefix(text)).toBe(false)
|
||||
expect(stripLeakedReasoningPreamble(text)).toBe(text)
|
||||
})
|
||||
})
|
||||
@@ -1,54 +0,0 @@
|
||||
const EXPLICIT_REASONING_START_RE =
|
||||
/^\s*(i should\b|i need to\b|let me think\b|the task\b|the request\b)/i
|
||||
|
||||
const EXPLICIT_REASONING_META_RE =
|
||||
/\b(user|request|question|prompt|message|task|greeting|small talk|briefly|friendly|concise)\b/i
|
||||
|
||||
const USER_META_START_RE =
|
||||
/^\s*the user\s+(just\s+)?(said|asked|is asking|wants|wanted|mentioned|seems|appears)\b/i
|
||||
|
||||
const USER_REASONING_RE =
|
||||
/^\s*the user\s+(just\s+)?(said|asked|is asking|wants|wanted|mentioned|seems|appears)\b[\s\S]*\b(i should|i need to|let me think|respond|reply|answer|greeting|small talk|briefly|friendly|concise)\b/i
|
||||
|
||||
export function shouldBufferPotentialReasoningPrefix(text: string): boolean {
|
||||
const normalized = text.trim()
|
||||
if (!normalized) return false
|
||||
|
||||
if (looksLikeLeakedReasoningPrefix(normalized)) {
|
||||
return true
|
||||
}
|
||||
|
||||
const hasParagraphBoundary = /\n\s*\n/.test(normalized)
|
||||
if (hasParagraphBoundary) {
|
||||
return false
|
||||
}
|
||||
|
||||
return (
|
||||
EXPLICIT_REASONING_START_RE.test(normalized) ||
|
||||
USER_META_START_RE.test(normalized)
|
||||
)
|
||||
}
|
||||
|
||||
export function looksLikeLeakedReasoningPrefix(text: string): boolean {
|
||||
const normalized = text.trim()
|
||||
if (!normalized) return false
|
||||
return (
|
||||
(EXPLICIT_REASONING_START_RE.test(normalized) &&
|
||||
EXPLICIT_REASONING_META_RE.test(normalized)) ||
|
||||
USER_REASONING_RE.test(normalized)
|
||||
)
|
||||
}
|
||||
|
||||
export function stripLeakedReasoningPreamble(text: string): string {
|
||||
const normalized = text.replace(/\r\n/g, '\n')
|
||||
const parts = normalized.split(/\n\s*\n/)
|
||||
if (parts.length < 2) return text
|
||||
|
||||
const first = parts[0]?.trim() ?? ''
|
||||
if (!looksLikeLeakedReasoningPrefix(first)) {
|
||||
return text
|
||||
}
|
||||
|
||||
const remainder = parts.slice(1).join('\n\n').trim()
|
||||
return remainder || text
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
import { readdir, readFile, writeFile } from 'fs/promises'
|
||||
import { basename, relative } from 'path'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
|
||||
async function listMarkdownFiles(dir: string): Promise<string[]> {
|
||||
const entries = await readdir(dir, { withFileTypes: true })
|
||||
const files: string[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = `${dir}/${entry.name}`
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...(await listMarkdownFiles(fullPath)))
|
||||
} else if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
files.push(fullPath)
|
||||
}
|
||||
}
|
||||
|
||||
return files.sort()
|
||||
}
|
||||
|
||||
async function getPageTitle(path: string): Promise<string> {
|
||||
const content = await readFile(path, 'utf8')
|
||||
const titleLine = content
|
||||
.split('\n')
|
||||
.map(line => line.trim())
|
||||
.find(line => line.startsWith('# '))
|
||||
|
||||
return titleLine ? titleLine.replace(/^#\s+/, '') : basename(path, '.md')
|
||||
}
|
||||
|
||||
export async function rebuildWikiIndex(cwd: string): Promise<void> {
|
||||
const paths = getWikiPaths(cwd)
|
||||
const pageFiles = await listMarkdownFiles(paths.pagesDir)
|
||||
const sourceFiles = await listMarkdownFiles(paths.sourcesDir)
|
||||
|
||||
const pageLinks = await Promise.all(
|
||||
pageFiles.map(async file => {
|
||||
const rel = relative(paths.root, file)
|
||||
const title = await getPageTitle(file)
|
||||
return `- [${title}](./${rel.replace(/\\/g, '/')})`
|
||||
}),
|
||||
)
|
||||
|
||||
const sourceLinks = sourceFiles.map(file => {
|
||||
const rel = relative(paths.root, file).replace(/\\/g, '/')
|
||||
const title = basename(file, '.md')
|
||||
return `- [${title}](./${rel})`
|
||||
})
|
||||
|
||||
const content = `# ${basename(cwd)} Wiki
|
||||
|
||||
This wiki is maintained by OpenClaude as a durable project knowledge layer.
|
||||
|
||||
## Core Pages
|
||||
|
||||
${pageLinks.length > 0 ? pageLinks.join('\n') : '- No pages yet'}
|
||||
|
||||
## Sources
|
||||
|
||||
${sourceLinks.length > 0 ? sourceLinks.join('\n') : '- No sources yet'}
|
||||
|
||||
## Recent Updates
|
||||
|
||||
- See [log.md](./log.md)
|
||||
`
|
||||
|
||||
await writeFile(paths.indexFile, content, 'utf8')
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
import { afterEach, expect, test } from 'bun:test'
|
||||
import { mkdtemp, readFile, rm, writeFile } from 'fs/promises'
|
||||
import { tmpdir } from 'os'
|
||||
import { join } from 'path'
|
||||
import { ingestLocalWikiSource } from './ingest.js'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
|
||||
const tempDirs: string[] = []
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
tempDirs.splice(0).map(dir => rm(dir, { recursive: true, force: true })),
|
||||
)
|
||||
})
|
||||
|
||||
async function makeProjectDir(): Promise<string> {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'openclaude-wiki-ingest-'))
|
||||
tempDirs.push(dir)
|
||||
return dir
|
||||
}
|
||||
|
||||
test('ingestLocalWikiSource creates a source note and updates log/index', async () => {
|
||||
const cwd = await makeProjectDir()
|
||||
const sourcePath = join(cwd, 'notes.md')
|
||||
await writeFile(
|
||||
sourcePath,
|
||||
'# Design Notes\n\nThis subsystem coordinates provider routing and session state.\nIt should be documented for future contributors.\n',
|
||||
'utf8',
|
||||
)
|
||||
|
||||
const result = await ingestLocalWikiSource(cwd, 'notes.md')
|
||||
const paths = getWikiPaths(cwd)
|
||||
|
||||
expect(result.sourceFile).toBe('notes.md')
|
||||
expect(result.title).toBe('Design Notes')
|
||||
expect(result.sourceNote.startsWith('.openclaude/wiki/sources/')).toBe(true)
|
||||
|
||||
const sourceNote = await readFile(join(cwd, result.sourceNote), 'utf8')
|
||||
expect(sourceNote).toContain('# Design Notes')
|
||||
expect(sourceNote).toContain('Path: `notes.md`')
|
||||
|
||||
const log = await readFile(paths.logFile, 'utf8')
|
||||
expect(log).toContain('Ingested `notes.md`')
|
||||
|
||||
const index = await readFile(paths.indexFile, 'utf8')
|
||||
expect(index).toContain('./sources/')
|
||||
expect(index).toContain(result.sourceNote.replace('.openclaude/wiki/', './'))
|
||||
})
|
||||
@@ -1,93 +0,0 @@
|
||||
import { appendFile, readFile, stat, writeFile } from 'fs/promises'
|
||||
import { basename, extname, isAbsolute, relative, resolve } from 'path'
|
||||
import { initializeWiki } from './init.js'
|
||||
import { rebuildWikiIndex } from './indexBuilder.js'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
import type { WikiIngestResult } from './types.js'
|
||||
import {
|
||||
extractTitleFromText,
|
||||
sanitizeWikiSlug,
|
||||
summarizeText,
|
||||
} from './utils.js'
|
||||
|
||||
function buildSourceNote(params: {
|
||||
title: string
|
||||
sourcePath: string
|
||||
ingestedAt: string
|
||||
summary: string
|
||||
excerpt: string
|
||||
}): string {
|
||||
const { title, sourcePath, ingestedAt, summary, excerpt } = params
|
||||
|
||||
return `# ${title}
|
||||
|
||||
## Source
|
||||
|
||||
- Path: \`${sourcePath}\`
|
||||
- Ingested at: ${ingestedAt}
|
||||
|
||||
## Summary
|
||||
|
||||
${summary}
|
||||
|
||||
## Excerpt
|
||||
|
||||
\`\`\`
|
||||
${excerpt}
|
||||
\`\`\`
|
||||
|
||||
## Linked Pages
|
||||
|
||||
- [Architecture](../pages/architecture.md)
|
||||
`
|
||||
}
|
||||
|
||||
function buildLogEntry(sourcePath: string, title: string, ingestedAt: string): string {
|
||||
return `- ${ingestedAt}: Ingested \`${sourcePath}\` into source note "${title}"`
|
||||
}
|
||||
|
||||
export async function ingestLocalWikiSource(
|
||||
cwd: string,
|
||||
rawPath: string,
|
||||
): Promise<WikiIngestResult> {
|
||||
await initializeWiki(cwd)
|
||||
|
||||
const resolvedPath = isAbsolute(rawPath) ? rawPath : resolve(cwd, rawPath)
|
||||
const fileInfo = await stat(resolvedPath)
|
||||
if (!fileInfo.isFile()) {
|
||||
throw new Error(`Not a file: ${resolvedPath}`)
|
||||
}
|
||||
|
||||
const content = await readFile(resolvedPath, 'utf8')
|
||||
const relSourcePath = relative(cwd, resolvedPath).replace(/\\/g, '/')
|
||||
const ingestedAt = new Date().toISOString()
|
||||
const baseName = basename(resolvedPath, extname(resolvedPath))
|
||||
const title = extractTitleFromText(baseName, content)
|
||||
const summary = summarizeText(content)
|
||||
const excerpt = content.split('\n').slice(0, 20).join('\n').trim()
|
||||
const slug = sanitizeWikiSlug(`${baseName}-${Date.now()}`) || `source-${Date.now()}`
|
||||
|
||||
const paths = getWikiPaths(cwd)
|
||||
const sourceNotePath = `${paths.sourcesDir}/${slug}.md`
|
||||
|
||||
await writeFile(
|
||||
sourceNotePath,
|
||||
buildSourceNote({
|
||||
title,
|
||||
sourcePath: relSourcePath,
|
||||
ingestedAt,
|
||||
summary,
|
||||
excerpt,
|
||||
}),
|
||||
'utf8',
|
||||
)
|
||||
await appendFile(paths.logFile, `${buildLogEntry(relSourcePath, title, ingestedAt)}\n`, 'utf8')
|
||||
await rebuildWikiIndex(cwd)
|
||||
|
||||
return {
|
||||
sourceFile: relSourcePath,
|
||||
sourceNote: relative(cwd, sourceNotePath).replace(/\\/g, '/'),
|
||||
summary,
|
||||
title,
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import { afterEach, expect, test } from 'bun:test'
|
||||
import { mkdtemp, readFile, rm } from 'fs/promises'
|
||||
import { tmpdir } from 'os'
|
||||
import { join } from 'path'
|
||||
import { initializeWiki } from './init.js'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
|
||||
const tempDirs: string[] = []
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
tempDirs.splice(0).map(dir => rm(dir, { recursive: true, force: true })),
|
||||
)
|
||||
})
|
||||
|
||||
async function makeProjectDir(): Promise<string> {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'openclaude-wiki-init-'))
|
||||
tempDirs.push(dir)
|
||||
return dir
|
||||
}
|
||||
|
||||
test('initializeWiki creates the expected wiki scaffold', async () => {
|
||||
const cwd = await makeProjectDir()
|
||||
const result = await initializeWiki(cwd)
|
||||
const paths = getWikiPaths(cwd)
|
||||
|
||||
expect(result.alreadyExisted).toBe(false)
|
||||
expect(result.createdFiles).toEqual([
|
||||
'.openclaude/wiki/schema.md',
|
||||
'.openclaude/wiki/index.md',
|
||||
'.openclaude/wiki/log.md',
|
||||
'.openclaude/wiki/pages/architecture.md',
|
||||
])
|
||||
expect(await readFile(paths.schemaFile, 'utf8')).toContain(
|
||||
'# OpenClaude Wiki Schema',
|
||||
)
|
||||
expect(await readFile(paths.indexFile, 'utf8')).toContain('Wiki')
|
||||
expect(await readFile(paths.logFile, 'utf8')).toContain(
|
||||
'Wiki initialized by OpenClaude',
|
||||
)
|
||||
expect(await readFile(join(paths.pagesDir, 'architecture.md'), 'utf8')).toContain(
|
||||
'# Architecture',
|
||||
)
|
||||
})
|
||||
|
||||
test('initializeWiki is idempotent and preserves existing files', async () => {
|
||||
const cwd = await makeProjectDir()
|
||||
|
||||
await initializeWiki(cwd)
|
||||
const second = await initializeWiki(cwd)
|
||||
|
||||
expect(second.alreadyExisted).toBe(true)
|
||||
expect(second.createdFiles).toEqual([])
|
||||
})
|
||||
@@ -1,140 +0,0 @@
|
||||
import { mkdir, writeFile } from 'fs/promises'
|
||||
import { basename, relative } from 'path'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
import type { WikiInitResult } from './types.js'
|
||||
|
||||
function buildSchemaTemplate(projectName: string): string {
|
||||
return `# OpenClaude Wiki Schema
|
||||
|
||||
This wiki stores durable, human-readable project knowledge for ${projectName}.
|
||||
|
||||
## Goals
|
||||
|
||||
- Keep useful project knowledge in markdown, not only in chat history
|
||||
- Prefer synthesized facts over raw copy-paste
|
||||
- Keep source attribution explicit
|
||||
- Make pages easy for both humans and agents to update
|
||||
|
||||
## Structure
|
||||
|
||||
- \`index.md\`: top-level navigation and major topics
|
||||
- \`log.md\`: append-only update log
|
||||
- \`pages/\`: durable topic and architecture pages
|
||||
- \`sources/\`: source ingestion notes and summaries
|
||||
|
||||
## Page Rules
|
||||
|
||||
- Keep pages focused on one topic
|
||||
- Use stable headings such as:
|
||||
- \`## Summary\`
|
||||
- \`## Key Facts\`
|
||||
- \`## Relationships\`
|
||||
- \`## Open Questions\`
|
||||
- \`## Sources\`
|
||||
- Add or update facts only when they are grounded in project files or explicit source notes
|
||||
- Prefer editing an existing page over creating duplicates
|
||||
`
|
||||
}
|
||||
|
||||
function buildIndexTemplate(projectName: string): string {
|
||||
return `# ${projectName} Wiki
|
||||
|
||||
This wiki is maintained by OpenClaude as a durable project knowledge layer.
|
||||
|
||||
## Core Pages
|
||||
|
||||
- [Architecture](./pages/architecture.md)
|
||||
|
||||
## Sources
|
||||
|
||||
- Source notes live in [sources/](./sources/)
|
||||
|
||||
## Recent Updates
|
||||
|
||||
- See [log.md](./log.md)
|
||||
`
|
||||
}
|
||||
|
||||
function buildLogTemplate(timestamp: string): string {
|
||||
return `# Wiki Update Log
|
||||
|
||||
- ${timestamp}: Wiki initialized by OpenClaude
|
||||
`
|
||||
}
|
||||
|
||||
function buildArchitectureTemplate(projectName: string): string {
|
||||
return `# Architecture
|
||||
|
||||
## Summary
|
||||
|
||||
High-level architecture notes for ${projectName}.
|
||||
|
||||
## Key Facts
|
||||
|
||||
- This page is the starting point for durable architecture knowledge.
|
||||
|
||||
## Relationships
|
||||
|
||||
- Link this page to major subsystems as the wiki grows.
|
||||
|
||||
## Open Questions
|
||||
|
||||
- What are the most important runtime subsystems?
|
||||
- Which files best represent the system architecture?
|
||||
|
||||
## Sources
|
||||
|
||||
- Wiki bootstrap
|
||||
`
|
||||
}
|
||||
|
||||
async function ensureFile(
|
||||
filePath: string,
|
||||
content: string,
|
||||
createdFiles: string[],
|
||||
): Promise<void> {
|
||||
try {
|
||||
await writeFile(filePath, content, { encoding: 'utf8', flag: 'wx' })
|
||||
createdFiles.push(filePath)
|
||||
} catch (error: unknown) {
|
||||
if (
|
||||
typeof error === 'object' &&
|
||||
error !== null &&
|
||||
'code' in error &&
|
||||
error.code === 'EEXIST'
|
||||
) {
|
||||
return
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function initializeWiki(cwd: string): Promise<WikiInitResult> {
|
||||
const paths = getWikiPaths(cwd)
|
||||
const createdDirectories: string[] = []
|
||||
const createdFiles: string[] = []
|
||||
|
||||
for (const dir of [paths.root, paths.pagesDir, paths.sourcesDir]) {
|
||||
await mkdir(dir, { recursive: true })
|
||||
createdDirectories.push(dir)
|
||||
}
|
||||
|
||||
const projectName = basename(cwd)
|
||||
const timestamp = new Date().toISOString()
|
||||
|
||||
await ensureFile(paths.schemaFile, buildSchemaTemplate(projectName), createdFiles)
|
||||
await ensureFile(paths.indexFile, buildIndexTemplate(projectName), createdFiles)
|
||||
await ensureFile(paths.logFile, buildLogTemplate(timestamp), createdFiles)
|
||||
await ensureFile(
|
||||
`${paths.pagesDir}/architecture.md`,
|
||||
buildArchitectureTemplate(projectName),
|
||||
createdFiles,
|
||||
)
|
||||
|
||||
return {
|
||||
root: paths.root,
|
||||
createdFiles: createdFiles.map(file => relative(cwd, file)),
|
||||
createdDirectories: createdDirectories.map(dir => relative(cwd, dir)),
|
||||
alreadyExisted: createdFiles.length === 0,
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import { join } from 'path'
|
||||
import type { WikiPaths } from './types.js'
|
||||
|
||||
export const OPENCLAUDE_DIRNAME = '.openclaude'
|
||||
export const WIKI_DIRNAME = 'wiki'
|
||||
|
||||
export function getWikiPaths(cwd: string): WikiPaths {
|
||||
const root = join(cwd, OPENCLAUDE_DIRNAME, WIKI_DIRNAME)
|
||||
|
||||
return {
|
||||
root,
|
||||
pagesDir: join(root, 'pages'),
|
||||
sourcesDir: join(root, 'sources'),
|
||||
schemaFile: join(root, 'schema.md'),
|
||||
indexFile: join(root, 'index.md'),
|
||||
logFile: join(root, 'log.md'),
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import { afterEach, expect, test } from 'bun:test'
|
||||
import { mkdtemp, mkdir, rm, writeFile } from 'fs/promises'
|
||||
import { tmpdir } from 'os'
|
||||
import { join } from 'path'
|
||||
import { initializeWiki } from './init.js'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
import { getWikiStatus } from './status.js'
|
||||
|
||||
const tempDirs: string[] = []
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
tempDirs.splice(0).map(dir => rm(dir, { recursive: true, force: true })),
|
||||
)
|
||||
})
|
||||
|
||||
async function makeProjectDir(): Promise<string> {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'openclaude-wiki-status-'))
|
||||
tempDirs.push(dir)
|
||||
return dir
|
||||
}
|
||||
|
||||
test('getWikiStatus reports uninitialized wiki state', async () => {
|
||||
const cwd = await makeProjectDir()
|
||||
const status = await getWikiStatus(cwd)
|
||||
|
||||
expect(status.initialized).toBe(false)
|
||||
expect(status.pageCount).toBe(0)
|
||||
expect(status.sourceCount).toBe(0)
|
||||
expect(status.lastUpdatedAt).toBeNull()
|
||||
})
|
||||
|
||||
test('getWikiStatus counts pages and sources for initialized wiki', async () => {
|
||||
const cwd = await makeProjectDir()
|
||||
await initializeWiki(cwd)
|
||||
const paths = getWikiPaths(cwd)
|
||||
|
||||
await writeFile(join(paths.pagesDir, 'commands.md'), '# Commands\n', 'utf8')
|
||||
await mkdir(join(paths.sourcesDir, 'external'), { recursive: true })
|
||||
await writeFile(
|
||||
join(paths.sourcesDir, 'external', 'spec.md'),
|
||||
'# Spec\n',
|
||||
'utf8',
|
||||
)
|
||||
|
||||
const status = await getWikiStatus(cwd)
|
||||
|
||||
expect(status.initialized).toBe(true)
|
||||
expect(status.pageCount).toBe(2)
|
||||
expect(status.sourceCount).toBe(1)
|
||||
expect(status.hasSchema).toBe(true)
|
||||
expect(status.hasIndex).toBe(true)
|
||||
expect(status.hasLog).toBe(true)
|
||||
expect(status.lastUpdatedAt).not.toBeNull()
|
||||
})
|
||||
@@ -1,82 +0,0 @@
|
||||
import { readdir, stat } from 'fs/promises'
|
||||
import { getWikiPaths } from './paths.js'
|
||||
import type { WikiStatus } from './types.js'
|
||||
|
||||
async function pathExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await stat(path)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function listMarkdownFiles(dir: string): Promise<string[]> {
|
||||
if (!(await pathExists(dir))) {
|
||||
return []
|
||||
}
|
||||
|
||||
const entries = await readdir(dir, { withFileTypes: true })
|
||||
const files: string[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = `${dir}/${entry.name}`
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...(await listMarkdownFiles(fullPath)))
|
||||
} else if (entry.isFile() && entry.name.endsWith('.md')) {
|
||||
files.push(fullPath)
|
||||
}
|
||||
}
|
||||
|
||||
return files
|
||||
}
|
||||
|
||||
async function getLastUpdatedAt(pathsToCheck: string[]): Promise<string | null> {
|
||||
const mtimes: number[] = []
|
||||
|
||||
for (const path of pathsToCheck) {
|
||||
try {
|
||||
const info = await stat(path)
|
||||
mtimes.push(info.mtimeMs)
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (mtimes.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return new Date(Math.max(...mtimes)).toISOString()
|
||||
}
|
||||
|
||||
export async function getWikiStatus(cwd: string): Promise<WikiStatus> {
|
||||
const paths = getWikiPaths(cwd)
|
||||
|
||||
const [hasRoot, hasSchema, hasIndex, hasLog, pages, sources] =
|
||||
await Promise.all([
|
||||
pathExists(paths.root),
|
||||
pathExists(paths.schemaFile),
|
||||
pathExists(paths.indexFile),
|
||||
pathExists(paths.logFile),
|
||||
listMarkdownFiles(paths.pagesDir),
|
||||
listMarkdownFiles(paths.sourcesDir),
|
||||
])
|
||||
|
||||
return {
|
||||
initialized: hasRoot && hasSchema && hasIndex && hasLog,
|
||||
root: paths.root,
|
||||
pageCount: pages.length,
|
||||
sourceCount: sources.length,
|
||||
hasSchema,
|
||||
hasIndex,
|
||||
hasLog,
|
||||
lastUpdatedAt: await getLastUpdatedAt([
|
||||
paths.schemaFile,
|
||||
paths.indexFile,
|
||||
paths.logFile,
|
||||
...pages,
|
||||
...sources,
|
||||
]),
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
export type WikiPaths = {
|
||||
root: string
|
||||
pagesDir: string
|
||||
sourcesDir: string
|
||||
schemaFile: string
|
||||
indexFile: string
|
||||
logFile: string
|
||||
}
|
||||
|
||||
export type WikiInitResult = {
|
||||
root: string
|
||||
createdFiles: string[]
|
||||
createdDirectories: string[]
|
||||
alreadyExisted: boolean
|
||||
}
|
||||
|
||||
export type WikiStatus = {
|
||||
initialized: boolean
|
||||
root: string
|
||||
pageCount: number
|
||||
sourceCount: number
|
||||
hasSchema: boolean
|
||||
hasIndex: boolean
|
||||
hasLog: boolean
|
||||
lastUpdatedAt: string | null
|
||||
}
|
||||
|
||||
export type WikiIngestResult = {
|
||||
sourceFile: string
|
||||
sourceNote: string
|
||||
summary: string
|
||||
title: string
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
export function sanitizeWikiSlug(value: string): string {
|
||||
return value
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '')
|
||||
.replace(/-{2,}/g, '-')
|
||||
}
|
||||
|
||||
export function summarizeText(input: string, maxLength = 280): string {
|
||||
const normalized = input.replace(/\s+/g, ' ').trim()
|
||||
if (!normalized) {
|
||||
return 'No summary available.'
|
||||
}
|
||||
|
||||
if (normalized.length <= maxLength) {
|
||||
return normalized
|
||||
}
|
||||
|
||||
return `${normalized.slice(0, maxLength - 1).trimEnd()}…`
|
||||
}
|
||||
|
||||
export function extractTitleFromText(
|
||||
fallbackName: string,
|
||||
content: string,
|
||||
): string {
|
||||
const firstNonEmptyLine = content
|
||||
.split('\n')
|
||||
.map(line => line.trim())
|
||||
.find(Boolean)
|
||||
|
||||
if (!firstNonEmptyLine) {
|
||||
return fallbackName
|
||||
}
|
||||
|
||||
return firstNonEmptyLine.replace(/^#+\s*/, '') || fallbackName
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import type { Command } from '../commands.js'
|
||||
import { createStore } from './store.js'
|
||||
|
||||
const pluginCommandsStore = createStore<Command[]>([])
|
||||
|
||||
export const getPluginCommandsState = (): Command[] =>
|
||||
pluginCommandsStore.getState()
|
||||
|
||||
export const subscribePluginCommands = pluginCommandsStore.subscribe
|
||||
|
||||
export function setPluginCommandsState(commands: Command[]): void {
|
||||
pluginCommandsStore.setState(() => [...commands])
|
||||
}
|
||||
@@ -27,19 +27,19 @@ function getClaudeCodeGuideBasePrompt(): string {
|
||||
? `${FILE_READ_TOOL_NAME}, \`find\`, and \`grep\``
|
||||
: `${FILE_READ_TOOL_NAME}, ${GLOB_TOOL_NAME}, and ${GREP_TOOL_NAME}`
|
||||
|
||||
return `You are the OpenClaude guide agent. Your primary responsibility is helping users understand and use OpenClaude, the Claude Agent SDK, and the Claude API (formerly the Anthropic API) effectively.
|
||||
return `You are the Claude guide agent. Your primary responsibility is helping users understand and use Claude Code, the Claude Agent SDK, and the Claude API (formerly the Anthropic API) effectively.
|
||||
|
||||
**Your expertise spans three domains:**
|
||||
|
||||
1. **OpenClaude** (the CLI tool): Installation, configuration, hooks, skills, MCP servers, keyboard shortcuts, IDE integrations, settings, and workflows.
|
||||
1. **Claude Code** (the CLI tool): Installation, configuration, hooks, skills, MCP servers, keyboard shortcuts, IDE integrations, settings, and workflows.
|
||||
|
||||
2. **Claude Agent SDK**: A framework for building custom AI agents. Available for Node.js/TypeScript and Python.
|
||||
2. **Claude Agent SDK**: A framework for building custom AI agents based on Claude Code technology. Available for Node.js/TypeScript and Python.
|
||||
|
||||
3. **Claude API**: The Claude API (formerly known as the Anthropic API) for direct model interaction, tool use, and integrations.
|
||||
|
||||
**Documentation sources:**
|
||||
|
||||
- **Claude Code docs** (${CLAUDE_CODE_DOCS_MAP_URL}): Use these as the compatibility reference for questions about the OpenClaude CLI tool, including:
|
||||
- **Claude Code docs** (${CLAUDE_CODE_DOCS_MAP_URL}): Fetch this for questions about the Claude Code CLI tool, including:
|
||||
- Installation, setup, and getting started
|
||||
- Hooks (pre/post command execution)
|
||||
- Custom skills
|
||||
@@ -97,7 +97,7 @@ function getFeedbackGuideline(): string {
|
||||
|
||||
export const CLAUDE_CODE_GUIDE_AGENT: BuiltInAgentDefinition = {
|
||||
agentType: CLAUDE_CODE_GUIDE_AGENT_TYPE,
|
||||
whenToUse: `Use this agent when the user asks questions ("Can OpenClaude...", "Does OpenClaude...", "How do I...") about: (1) OpenClaude (the CLI tool) - features, hooks, slash commands, MCP servers, settings, IDE integrations, keyboard shortcuts; (2) Claude Agent SDK - building custom agents; (3) Claude API (formerly Anthropic API) - API usage, tool use, Anthropic SDK usage. **IMPORTANT:** Before spawning a new agent, check if there is already a running or recently completed claude-code-guide agent that you can continue via ${SEND_MESSAGE_TOOL_NAME}.`,
|
||||
whenToUse: `Use this agent when the user asks questions ("Can Claude...", "Does Claude...", "How do I...") about: (1) Claude Code (the CLI tool) - features, hooks, slash commands, MCP servers, settings, IDE integrations, keyboard shortcuts; (2) Claude Agent SDK - building custom agents; (3) Claude API (formerly Anthropic API) - API usage, tool use, Anthropic SDK usage. **IMPORTANT:** Before spawning a new agent, check if there is already a running or recently completed claude-code-guide agent that you can continue via ${SEND_MESSAGE_TOOL_NAME}.`,
|
||||
// Ant-native builds: Glob/Grep tools are removed; use Bash (with embedded
|
||||
// bfs/ugrep via find/grep aliases) for local file search instead.
|
||||
tools: hasEmbeddedSearchTools()
|
||||
|
||||
@@ -21,7 +21,7 @@ function getExploreSystemPrompt(): string {
|
||||
? `- Use \`grep\` via ${BASH_TOOL_NAME} for searching file contents with regex`
|
||||
: `- Use ${GREP_TOOL_NAME} for searching file contents with regex`
|
||||
|
||||
return `You are a file search specialist for OpenClaude. You excel at thoroughly navigating and exploring codebases.
|
||||
return `You are a file search specialist for OpenClaude, an open-source fork of Claude Code. You excel at thoroughly navigating and exploring codebases.
|
||||
|
||||
=== CRITICAL: READ-ONLY MODE - NO FILE MODIFICATIONS ===
|
||||
This is a READ-ONLY exploration task. You are STRICTLY PROHIBITED from:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { BuiltInAgentDefinition } from '../loadAgentsDir.js'
|
||||
|
||||
const SHARED_PREFIX = `You are an agent for OpenClaude, an open-source coding agent and CLI. Given the user's message, you should use the tools available to complete the task. Complete the task fully—don't gold-plate, but don't leave it half-done.`
|
||||
const SHARED_PREFIX = `You are an agent for OpenClaude, an open-source fork of Claude Code. Given the user's message, you should use the tools available to complete the task. Complete the task fully—don't gold-plate, but don't leave it half-done.`
|
||||
|
||||
const SHARED_GUIDELINES = `Your strengths:
|
||||
- Searching for code, configurations, and patterns across large codebases
|
||||
|
||||
@@ -18,7 +18,7 @@ function getPlanV2SystemPrompt(): string {
|
||||
? `\`find\`, \`grep\`, and ${FILE_READ_TOOL_NAME}`
|
||||
: `${GLOB_TOOL_NAME}, ${GREP_TOOL_NAME}, and ${FILE_READ_TOOL_NAME}`
|
||||
|
||||
return `You are a software architect and planning specialist for OpenClaude. Your role is to explore the codebase and design implementation plans.
|
||||
return `You are a software architect and planning specialist for Claude Code. Your role is to explore the codebase and design implementation plans.
|
||||
|
||||
=== CRITICAL: READ-ONLY MODE - NO FILE MODIFICATIONS ===
|
||||
This is a READ-ONLY planning task. You are STRICTLY PROHIBITED from:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { BuiltInAgentDefinition } from '../loadAgentsDir.js'
|
||||
|
||||
const STATUSLINE_SYSTEM_PROMPT = `You are a status line setup agent for OpenClaude. Your job is to create or update the statusLine command in the user's OpenClaude settings.
|
||||
const STATUSLINE_SYSTEM_PROMPT = `You are a status line setup agent for Claude Code. Your job is to create or update the statusLine command in the user's Claude Code settings.
|
||||
|
||||
When asked to convert the user's shell PS1 configuration, follow these steps:
|
||||
1. Read the user's shell configuration files in this order of preference:
|
||||
@@ -47,7 +47,7 @@ How to use the statusLine command:
|
||||
"project_dir": "string", // Project root directory path
|
||||
"added_dirs": ["string"] // Directories added via /add-dir
|
||||
},
|
||||
"version": "string", // OpenClaude app version (e.g., "1.0.71")
|
||||
"version": "string", // Claude Code app version (e.g., "1.0.71")
|
||||
"output_style": {
|
||||
"name": "string", // Output style name (e.g., "default", "Explanatory", "Learning")
|
||||
},
|
||||
@@ -110,11 +110,10 @@ How to use the statusLine command:
|
||||
To display both 5-hour and 7-day limits when available:
|
||||
- input=$(cat); five=$(echo "$input" | jq -r '.rate_limits.five_hour.used_percentage // empty'); week=$(echo "$input" | jq -r '.rate_limits.seven_day.used_percentage // empty'); out=""; [ -n "$five" ] && out="5h:$(printf '%.0f' "$five")%"; [ -n "$week" ] && out="$out 7d:$(printf '%.0f' "$week")%"; echo "$out"
|
||||
|
||||
2. For longer commands, save a new file alongside the user's existing settings home instead of creating a new config directory.
|
||||
- Prefer the currently used config home if one already exists.
|
||||
- In most existing setups this will be ~/.claude/statusline-command.sh.
|
||||
2. For longer commands, you can save a new file in the user's ~/.claude directory, e.g.:
|
||||
- ~/.claude/statusline-command.sh and reference that file in the settings.
|
||||
|
||||
3. Update the user's resolved settings file with:
|
||||
3. Update the user's ~/.claude/settings.json with:
|
||||
{
|
||||
"statusLine": {
|
||||
"type": "command",
|
||||
@@ -122,23 +121,20 @@ How to use the statusLine command:
|
||||
}
|
||||
}
|
||||
|
||||
4. Use the existing config home already in use by the installation.
|
||||
- Prefer the settings file that already exists.
|
||||
- If the settings file is a symlink, update the target file instead.
|
||||
- Do not create a second parallel settings home just because another directory name also exists.
|
||||
4. If ~/.claude/settings.json is a symlink, update the target file instead.
|
||||
|
||||
Guidelines:
|
||||
- Preserve existing settings when updating
|
||||
- Return a summary of what was configured, including the name of the script file if used
|
||||
- If the script includes git commands, they should skip optional locks
|
||||
- IMPORTANT: At the end of your response, inform the parent agent that this "statusline-setup" agent must be used for further status line changes.
|
||||
Also ensure that the user is informed that they can ask OpenClaude to continue to make changes to the status line.
|
||||
Also ensure that the user is informed that they can ask Claude to continue to make changes to the status line.
|
||||
`
|
||||
|
||||
export const STATUSLINE_SETUP_AGENT: BuiltInAgentDefinition = {
|
||||
agentType: 'statusline-setup',
|
||||
whenToUse:
|
||||
"Use this agent to configure the user's OpenClaude status line setting.",
|
||||
"Use this agent to configure the user's Claude Code status line setting.",
|
||||
tools: ['Read', 'Edit'],
|
||||
source: 'built-in',
|
||||
baseDir: 'built-in',
|
||||
|
||||
@@ -14,21 +14,8 @@ import {
|
||||
export const inputSchema = lazySchema(() => z.object({}).passthrough())
|
||||
type InputSchema = ReturnType<typeof inputSchema>
|
||||
|
||||
// MCP tools can return either a plain string or an array of content blocks
|
||||
// (text, images, etc.). The outputSchema must reflect both shapes so the model
|
||||
// knows rich content is possible.
|
||||
export const outputSchema = lazySchema(() =>
|
||||
z.union([
|
||||
z.string().describe('MCP tool execution result as text'),
|
||||
z
|
||||
.array(
|
||||
z.object({
|
||||
type: z.string(),
|
||||
text: z.string().optional(),
|
||||
}),
|
||||
)
|
||||
.describe('MCP tool execution result as content blocks'),
|
||||
]),
|
||||
z.string().describe('MCP tool execution result'),
|
||||
)
|
||||
type OutputSchema = ReturnType<typeof outputSchema>
|
||||
|
||||
@@ -78,19 +65,7 @@ export const MCPTool = buildTool({
|
||||
renderToolUseProgressMessage,
|
||||
renderToolResultMessage,
|
||||
isResultTruncated(output: Output): boolean {
|
||||
if (typeof output === 'string') {
|
||||
return isOutputLineTruncated(output)
|
||||
}
|
||||
// Array of content blocks — check if any text block exceeds the display limit
|
||||
if (Array.isArray(output)) {
|
||||
return output.some(
|
||||
block =>
|
||||
block?.type === 'text' &&
|
||||
typeof block.text === 'string' &&
|
||||
isOutputLineTruncated(block.text),
|
||||
)
|
||||
}
|
||||
return false
|
||||
return isOutputLineTruncated(output)
|
||||
},
|
||||
mapToolResultToToolResultBlockParam(content, toolUseID) {
|
||||
return {
|
||||
|
||||
@@ -1,29 +1,6 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
|
||||
import type { Command } from '../../commands.js'
|
||||
import { SkillTool } from './SkillTool.js'
|
||||
import { renderToolUseMessage } from './UI.js'
|
||||
|
||||
function createPromptCommand(
|
||||
name: string,
|
||||
options: {
|
||||
source?: 'builtin' | 'plugin' | 'mcp' | 'bundled'
|
||||
loadedFrom?: Command['loadedFrom']
|
||||
} = {},
|
||||
): Command {
|
||||
return {
|
||||
type: 'prompt',
|
||||
name,
|
||||
description: `${name} description`,
|
||||
progressMessage: `${name} progress`,
|
||||
contentLength: 0,
|
||||
source: options.source ?? 'builtin',
|
||||
loadedFrom: options.loadedFrom,
|
||||
async getPromptForCommand() {
|
||||
return []
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
describe('SkillTool missing parameter handling', () => {
|
||||
test('missing skill stays required at the schema level', async () => {
|
||||
@@ -52,47 +29,3 @@ describe('SkillTool missing parameter handling', () => {
|
||||
expect(parsed.success).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('SkillTool renderToolUseMessage', () => {
|
||||
test('plugin skills render correctly without plugin command metadata', () => {
|
||||
const pluginSkillName = 'plugin:review-pr'
|
||||
|
||||
expect(
|
||||
renderToolUseMessage(
|
||||
{ skill: pluginSkillName },
|
||||
{
|
||||
commands: [],
|
||||
},
|
||||
),
|
||||
).toBe(pluginSkillName)
|
||||
|
||||
expect(
|
||||
renderToolUseMessage(
|
||||
{ skill: pluginSkillName },
|
||||
{
|
||||
commands: [
|
||||
createPromptCommand(pluginSkillName, {
|
||||
source: 'plugin',
|
||||
loadedFrom: 'plugin',
|
||||
}),
|
||||
],
|
||||
},
|
||||
),
|
||||
).toBe(pluginSkillName)
|
||||
})
|
||||
|
||||
test('legacy commands still render with a slash prefix when metadata is present', () => {
|
||||
expect(
|
||||
renderToolUseMessage(
|
||||
{ skill: 'legacy-command' },
|
||||
{
|
||||
commands: [
|
||||
createPromptCommand('legacy-command', {
|
||||
loadedFrom: 'commands_DEPRECATED',
|
||||
}),
|
||||
],
|
||||
},
|
||||
),
|
||||
).toBe('/legacy-command')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -54,10 +54,7 @@ export function renderToolUseMessage({
|
||||
if (!skill) {
|
||||
return null;
|
||||
}
|
||||
// Only legacy /commands_DEPRECATED entries need the command lookup so we can
|
||||
// preserve the slash-prefixed display. Plugin skills already carry the
|
||||
// invoked skill name in `skill`, so transcript/history rendering does not
|
||||
// need plugin command metadata.
|
||||
// Look up the command to check if it came from the legacy /commands folder
|
||||
const command = commands?.find(c => c.name === skill);
|
||||
const displayName = command?.loadedFrom === 'commands_DEPRECATED' ? `/${skill}` : skill;
|
||||
return displayName;
|
||||
|
||||
@@ -1,518 +0,0 @@
|
||||
# Web Search Providers
|
||||
|
||||
OpenClaude supports multiple search backends through a provider adapter system.
|
||||
|
||||
## Supported Providers
|
||||
|
||||
| Provider | Env Var | Auth Header | Method |
|
||||
|---|---|---|---|
|
||||
| Custom API | `WEB_SEARCH_API` | Configurable | GET/POST |
|
||||
| SearXNG | `WEB_PROVIDER=searxng` | — | GET |
|
||||
| Google | `WEB_PROVIDER=google` | `Authorization: Bearer` | GET |
|
||||
| Brave | `WEB_PROVIDER=brave` | `X-Subscription-Token` | GET |
|
||||
| SerpAPI | `WEB_PROVIDER=serpapi` | `Authorization: Bearer` | GET |
|
||||
| Firecrawl | `FIRECRAWL_API_KEY` | Internal | SDK |
|
||||
| Tavily | `TAVILY_API_KEY` | `Authorization: Bearer` | POST |
|
||||
| Exa | `EXA_API_KEY` | `x-api-key` | POST |
|
||||
| You.com | `YOU_API_KEY` | `X-API-Key` | GET |
|
||||
| Jina | `JINA_API_KEY` | `Authorization: Bearer` | GET |
|
||||
| Bing | `BING_API_KEY` | `Ocp-Apim-Subscription-Key` | GET |
|
||||
| Mojeek | `MOJEEK_API_KEY` | `Authorization: Bearer` | GET |
|
||||
| Linkup | `LINKUP_API_KEY` | `Authorization: Bearer` | POST |
|
||||
| DuckDuckGo | *(default)* | — | SDK |
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Tavily (recommended for AI — fast, RAG-ready)
|
||||
export TAVILY_API_KEY=tvly-your-key
|
||||
|
||||
# Exa (neural search, semantic queries)
|
||||
export EXA_API_KEY=your-exa-key
|
||||
|
||||
# Brave (traditional web search, good coverage)
|
||||
export WEB_PROVIDER=brave
|
||||
export WEB_KEY=your-brave-key
|
||||
|
||||
# Bing
|
||||
export BING_API_KEY=your-bing-key
|
||||
|
||||
# Self-hosted SearXNG (free, private)
|
||||
export WEB_PROVIDER=searxng
|
||||
export WEB_SEARCH_API=https://search.example.com/search
|
||||
```
|
||||
|
||||
## Provider Selection Mode
|
||||
|
||||
`WEB_SEARCH_PROVIDER` controls fallback behavior:
|
||||
|
||||
| Mode | Behavior |
|
||||
|---|---|
|
||||
| `auto` (default) | Try all configured providers in order, fall through on failure |
|
||||
| `tavily` | Tavily only — throws on failure |
|
||||
| `exa` | Exa only — throws on failure |
|
||||
| `custom` | Custom API only — throws on failure. **Not in the auto chain** — must be explicitly selected |
|
||||
| `firecrawl` | Firecrawl only — throws on failure |
|
||||
| `ddg` | DuckDuckGo only — throws on failure |
|
||||
| `native` | Anthropic native / Codex only |
|
||||
|
||||
**Auto mode priority:** firecrawl → tavily → exa → you → jina → bing → mojeek → linkup → ddg
|
||||
|
||||
> **Note:** The `custom` provider is excluded from the `auto` chain. It is only used when `WEB_SEARCH_PROVIDER=custom` is explicitly set. This prevents the generic outbound provider from silently becoming the default backend.
|
||||
|
||||
```bash
|
||||
# Fail loudly if Tavily is down (don't silently switch backends)
|
||||
export WEB_SEARCH_PROVIDER=tavily
|
||||
|
||||
# Try everything, fall through gracefully
|
||||
export WEB_SEARCH_PROVIDER=auto
|
||||
```
|
||||
|
||||
## Provider Request & Response Formats
|
||||
|
||||
### Tavily
|
||||
|
||||
```bash
|
||||
export TAVILY_API_KEY=tvly-your-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
POST https://api.tavily.com/search
|
||||
Authorization: Bearer tvly-your-key
|
||||
Content-Type: application/json
|
||||
|
||||
{"query": "search terms", "max_results": 10, "include_answer": false}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"content": "Full text snippet from the page...",
|
||||
"score": 0.95
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Exa
|
||||
|
||||
```bash
|
||||
export EXA_API_KEY=your-exa-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
POST https://api.exa.ai/search
|
||||
x-api-key: your-exa-key
|
||||
Content-Type: application/json
|
||||
|
||||
{"query": "search terms", "numResults": 10, "type": "auto"}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"snippet": "A short summary of the page content...",
|
||||
"score": 0.89
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### You.com
|
||||
|
||||
```bash
|
||||
export YOU_API_KEY=your-you-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://api.ydc-index.io/v1/search?query=search+terms
|
||||
X-API-Key: your-you-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"results": {
|
||||
"web": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"snippets": ["First snippet from the page...", "Second snippet..."],
|
||||
"description": "Page description"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Jina
|
||||
|
||||
```bash
|
||||
export JINA_API_KEY=your-jina-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://s.jina.ai/?q=search+terms
|
||||
Authorization: Bearer your-jina-key
|
||||
Accept: application/json
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"description": "Snippet from the page..."
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Bing
|
||||
|
||||
```bash
|
||||
export BING_API_KEY=your-bing-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://api.bing.microsoft.com/v7.0/search?q=search+terms&count=10
|
||||
Ocp-Apim-Subscription-Key: your-bing-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"webPages": {
|
||||
"value": [
|
||||
{
|
||||
"name": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"snippet": "A short excerpt from the page...",
|
||||
"displayUrl": "example.com/page"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Mojeek
|
||||
|
||||
```bash
|
||||
export MOJEEK_API_KEY=your-mojeek-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://www.mojeek.com/search?q=search+terms&fmt=json
|
||||
Authorization: Bearer your-mojeek-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"response": {
|
||||
"results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"snippet": "Excerpt from the page..."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Linkup
|
||||
|
||||
```bash
|
||||
export LINKUP_API_KEY=your-linkup-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
POST https://api.linkup.so/v1/search
|
||||
Authorization: Bearer your-linkup-key
|
||||
Content-Type: application/json
|
||||
|
||||
{"q": "search terms", "search_type": "standard"}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"name": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"snippet": "A short description of the result..."
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### SearXNG (Built-in Preset)
|
||||
|
||||
```bash
|
||||
export WEB_PROVIDER=searxng
|
||||
export WEB_SEARCH_API=https://search.example.com/search
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://search.example.com/search?q=search+terms
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"content": "Snippet from the page...",
|
||||
"engine": "google"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Google Custom Search (Built-in Preset)
|
||||
|
||||
```bash
|
||||
export WEB_PROVIDER=google
|
||||
export WEB_KEY=your-google-api-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://www.googleapis.com/customsearch/v1?q=search+terms
|
||||
Authorization: Bearer your-google-api-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"link": "https://example.com/page",
|
||||
"snippet": "A short excerpt...",
|
||||
"displayLink": "example.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Brave (Built-in Preset)
|
||||
|
||||
```bash
|
||||
export WEB_PROVIDER=brave
|
||||
export WEB_KEY=your-brave-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://api.search.brave.com/res/v1/web/search?q=search+terms
|
||||
X-Subscription-Token: your-brave-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"web": {
|
||||
"results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"url": "https://example.com/page",
|
||||
"description": "Page description..."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SerpAPI (Built-in Preset)
|
||||
|
||||
```bash
|
||||
export WEB_PROVIDER=serpapi
|
||||
export WEB_KEY=your-serpapi-key
|
||||
```
|
||||
|
||||
**Request:**
|
||||
```
|
||||
GET https://serpapi.com/search.json?q=search+terms
|
||||
Authorization: Bearer your-serpapi-key
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"organic_results": [
|
||||
{
|
||||
"title": "Result Title",
|
||||
"link": "https://example.com/page",
|
||||
"snippet": "A short excerpt...",
|
||||
"displayed_link": "example.com"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### DuckDuckGo (Default Fallback)
|
||||
|
||||
No configuration needed. Uses the `duck-duck-scrape` npm package.
|
||||
|
||||
```bash
|
||||
# Set as explicit-only backend
|
||||
export WEB_SEARCH_PROVIDER=ddg
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Custom API Configuration
|
||||
|
||||
### Standard GET
|
||||
|
||||
```
|
||||
GET https://api.example.com/search?q=hello
|
||||
```
|
||||
|
||||
```bash
|
||||
export WEB_SEARCH_API=https://api.example.com/search
|
||||
export WEB_QUERY_PARAM=q
|
||||
```
|
||||
|
||||
### Query in URL Path
|
||||
|
||||
```
|
||||
GET https://api.example.com/v2/search/hello
|
||||
```
|
||||
|
||||
```bash
|
||||
export WEB_URL_TEMPLATE=https://api.example.com/v2/search/{query}
|
||||
```
|
||||
|
||||
### POST with Custom Body
|
||||
|
||||
```
|
||||
POST https://api.example.com/v1/query
|
||||
Content-Type: application/json
|
||||
|
||||
{"input": {"text": "hello"}}
|
||||
```
|
||||
|
||||
```bash
|
||||
export WEB_SEARCH_API=https://api.example.com/v1/query
|
||||
export WEB_METHOD=POST
|
||||
export WEB_BODY_TEMPLATE='{"input":{"text":"{query}"}}'
|
||||
```
|
||||
|
||||
### Extra Static Params
|
||||
|
||||
```bash
|
||||
export WEB_PARAMS='{"lang":"en","count":"10"}'
|
||||
```
|
||||
|
||||
## Auth
|
||||
|
||||
API keys are sent in HTTP headers, **never** in query strings.
|
||||
|
||||
```bash
|
||||
# Default: Authorization: Bearer <key>
|
||||
export WEB_KEY=your-key
|
||||
|
||||
# Custom header
|
||||
export WEB_AUTH_HEADER=X-Api-Key
|
||||
export WEB_AUTH_SCHEME=""
|
||||
|
||||
# Extra headers
|
||||
export WEB_HEADERS="X-Tenant: acme; Accept: application/json"
|
||||
```
|
||||
|
||||
## Response Parsing
|
||||
|
||||
The tool auto-detects many response formats:
|
||||
|
||||
```jsonc
|
||||
{ "results": [{ "title": "...", "url": "..." }] } // flat array
|
||||
{ "items": [{ "title": "...", "link": "..." }] } // Google-style
|
||||
{ "results": { "engine": [{ "title": "...", "url": "..." }] } } // nested map
|
||||
[{ "title": "...", "url": "..." }] // bare array
|
||||
```
|
||||
|
||||
Field name aliases: `title`/`headline`/`name`, `url`/`link`/`href`, `description`/`snippet`/`content`
|
||||
|
||||
For deeply nested responses:
|
||||
```bash
|
||||
export WEB_JSON_PATH=response.payload.results
|
||||
```
|
||||
|
||||
## Retry
|
||||
|
||||
Failed requests (network errors, 5xx) are retried once after 500ms. Client errors (4xx) are not retried. Custom requests have a default 120s timeout.
|
||||
|
||||
## Custom Provider Security Guardrails
|
||||
|
||||
The custom provider enforces the following guardrails by default:
|
||||
|
||||
| Guardrail | Default | Override |
|
||||
|-----------|---------|----------|
|
||||
| HTTPS-only | ✅ | `WEB_CUSTOM_ALLOW_HTTP=true` |
|
||||
| Block private IPs / localhost | ✅ | `WEB_CUSTOM_ALLOW_PRIVATE=true` |
|
||||
| Header allowlist | ✅ | `WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS=true` |
|
||||
| Max POST body | 300 KB | `WEB_CUSTOM_MAX_BODY_KB=<kb>` |
|
||||
| Request timeout | 120s | `WEB_CUSTOM_TIMEOUT_SEC=<seconds>` |
|
||||
| Audit log (one-time warning) | ✅ | — |
|
||||
|
||||
### Self-hosted SearXNG example
|
||||
|
||||
```bash
|
||||
export WEB_PROVIDER=searxng
|
||||
export WEB_SEARCH_API=https://search.mydomain.com/search
|
||||
export WEB_CUSTOM_ALLOW_PRIVATE=true # needed if SearXNG is on a private IP
|
||||
```
|
||||
|
||||
### Header allowlist
|
||||
|
||||
By default only these headers are permitted:
|
||||
`accept`, `accept-encoding`, `accept-language`, `authorization`, `cache-control`, `content-type`, `if-modified-since`, `if-none-match`, `ocp-apim-subscription-key`, `user-agent`, `x-api-key`, `x-subscription-token`, `x-tenant-id`
|
||||
|
||||
## Adding a Provider
|
||||
|
||||
1. Create `providers/myprovider.ts`:
|
||||
|
||||
```typescript
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, type ProviderOutput } from './types.js'
|
||||
|
||||
export const myProvider: SearchProvider = {
|
||||
name: 'myprovider',
|
||||
isConfigured() { return Boolean(process.env.MYPROVIDER_API_KEY) },
|
||||
async search(input: SearchInput): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
// ... call API, map to SearchHit[] ...
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'myprovider',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
2. Register in `providers/index.ts` — add import and push to `ALL_PROVIDERS`.
|
||||
@@ -28,13 +28,6 @@ import {
|
||||
renderToolUseProgressMessage,
|
||||
} from './UI.js'
|
||||
|
||||
import {
|
||||
runSearch,
|
||||
getProviderMode,
|
||||
getAvailableProviders,
|
||||
type ProviderOutput,
|
||||
} from './providers/index.js'
|
||||
|
||||
const inputSchema = lazySchema(() =>
|
||||
z.strictObject({
|
||||
query: z.string().min(2).describe('The search query to use'),
|
||||
@@ -86,39 +79,6 @@ export type { WebSearchProgress } from '../../types/tools.js'
|
||||
|
||||
import type { WebSearchProgress } from '../../types/tools.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared formatting: ProviderOutput → Output
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function formatProviderOutput(po: ProviderOutput, query: string): Output {
|
||||
const results: (SearchResult | string)[] = []
|
||||
|
||||
const snippets = po.hits
|
||||
.filter(h => h.description)
|
||||
.map(h => `**${h.title}** — ${h.description} (${h.url})`)
|
||||
.join('\n')
|
||||
if (snippets) results.push(snippets)
|
||||
|
||||
if (po.hits.length > 0) {
|
||||
results.push({
|
||||
tool_use_id: `${po.providerName}-search`,
|
||||
content: po.hits.map(h => ({ title: h.title, url: h.url })),
|
||||
})
|
||||
}
|
||||
|
||||
if (results.length === 0) results.push('No results found.')
|
||||
|
||||
return {
|
||||
query,
|
||||
results,
|
||||
durationSeconds: po.durationSeconds,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Native Anthropic + Codex paths (unchanged, tightly coupled to SDK)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function makeToolSchema(input: Input): BetaWebSearchTool20250305 {
|
||||
return {
|
||||
type: 'web_search_20250305',
|
||||
@@ -129,10 +89,161 @@ function makeToolSchema(input: Input): BetaWebSearchTool20250305 {
|
||||
}
|
||||
}
|
||||
|
||||
function isFirecrawlEnabled(): boolean {
|
||||
return Boolean(process.env.FIRECRAWL_API_KEY)
|
||||
}
|
||||
|
||||
function shouldUseFirecrawl(): boolean {
|
||||
if (!isFirecrawlEnabled()) return false
|
||||
// Don't override native search on providers that already have it
|
||||
if (isCodexResponsesWebSearchEnabled()) return false
|
||||
const provider = getAPIProvider()
|
||||
if (provider === 'firstParty' || provider === 'vertex' || provider === 'foundry') return false
|
||||
return true
|
||||
}
|
||||
|
||||
function isClaudeModel(model: string): boolean {
|
||||
return /claude/i.test(model)
|
||||
}
|
||||
|
||||
function shouldUseDuckDuckGo(): boolean {
|
||||
if (isCodexResponsesWebSearchEnabled()) return false
|
||||
|
||||
const provider = getAPIProvider()
|
||||
// Don't override providers/models that have native web search support.
|
||||
if (provider === 'firstParty' || provider === 'vertex' || provider === 'foundry') {
|
||||
return false
|
||||
}
|
||||
|
||||
// Use free DDG search for non-Claude models by default.
|
||||
return !isClaudeModel(getMainLoopModel())
|
||||
}
|
||||
|
||||
async function runDuckDuckGoSearch(input: Input): Promise<Output> {
|
||||
const startTime = performance.now()
|
||||
|
||||
try {
|
||||
const { search } = await import('duck-duck-scrape')
|
||||
|
||||
const response = await search(input.query, {
|
||||
safeSearch: 0,
|
||||
})
|
||||
|
||||
let hits = response.results.map(r => ({
|
||||
title: r.title || r.url,
|
||||
url: r.url,
|
||||
snippet: r.description,
|
||||
}))
|
||||
|
||||
if (input.blocked_domains?.length) {
|
||||
hits = hits.filter(h => {
|
||||
try {
|
||||
const host = new URL(h.url).hostname
|
||||
return !input.blocked_domains!.some(d => host.endsWith(d))
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (input.allowed_domains?.length) {
|
||||
hits = hits.filter(h => {
|
||||
try {
|
||||
const host = new URL(h.url).hostname
|
||||
return input.allowed_domains!.some(d => host.endsWith(d))
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const snippets = hits
|
||||
.filter(h => h.snippet)
|
||||
.map(h => `**${h.title}** — ${h.snippet} (${h.url})`)
|
||||
.join('\n')
|
||||
|
||||
const results: Output['results'] = []
|
||||
if (snippets) results.push(snippets)
|
||||
results.push({
|
||||
tool_use_id: 'duckduckgo-search',
|
||||
content: hits.map(({ title, url }) => ({ title, url })),
|
||||
})
|
||||
|
||||
return {
|
||||
query: input.query,
|
||||
results,
|
||||
durationSeconds: (performance.now() - startTime) / 1000,
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
const isRateLimited =
|
||||
message.includes('429') ||
|
||||
message.includes('rate') ||
|
||||
message.includes('CAPTCHA') ||
|
||||
message.includes('blocked')
|
||||
|
||||
if (isRateLimited && isFirecrawlEnabled()) {
|
||||
return runFirecrawlSearch(input)
|
||||
}
|
||||
|
||||
return {
|
||||
query: input.query,
|
||||
results: [
|
||||
'Web search temporarily unavailable — try again or add a Firecrawl API key for reliable results.',
|
||||
],
|
||||
durationSeconds: (performance.now() - startTime) / 1000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runFirecrawlSearch(input: Input): Promise<Output> {
|
||||
const startTime = performance.now()
|
||||
const { FirecrawlClient } = await import('@mendable/firecrawl-js')
|
||||
const app = new FirecrawlClient({ apiKey: process.env.FIRECRAWL_API_KEY! })
|
||||
|
||||
let query = input.query
|
||||
if (input.blocked_domains?.length) {
|
||||
const exclusions = input.blocked_domains.map(d => `-site:${d}`).join(' ')
|
||||
query = `${query} ${exclusions}`
|
||||
}
|
||||
|
||||
const data = await app.search(query, { limit: 10 })
|
||||
|
||||
let hits = (data.web ?? []).map((r: { url: string; title?: string }) => ({
|
||||
title: r.title ?? r.url,
|
||||
url: r.url,
|
||||
}))
|
||||
|
||||
if (input.allowed_domains?.length) {
|
||||
hits = hits.filter(h =>
|
||||
input.allowed_domains!.some(d => {
|
||||
try {
|
||||
return new URL(h.url).hostname.endsWith(d)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const snippets = (data.web ?? [])
|
||||
.filter((r: { description?: string }) => r.description)
|
||||
.map((r: { url: string; title?: string; description?: string }) =>
|
||||
`**${r.title ?? r.url}** — ${r.description} (${r.url})`,
|
||||
)
|
||||
.join('\n')
|
||||
|
||||
const results: Output['results'] = []
|
||||
if (snippets) results.push(snippets)
|
||||
results.push({ tool_use_id: 'firecrawl-search', content: hits })
|
||||
|
||||
return {
|
||||
query: input.query,
|
||||
results,
|
||||
durationSeconds: (performance.now() - startTime) / 1000,
|
||||
}
|
||||
}
|
||||
|
||||
function isCodexResponsesWebSearchEnabled(): boolean {
|
||||
if (getAPIProvider() !== 'openai') {
|
||||
return false
|
||||
@@ -406,60 +517,6 @@ function makeOutputFromSearchResponse(
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper: should we use adapter-based providers?
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns true for transient errors that are safe to fall through on in auto mode
|
||||
* (network failures, timeouts, HTTP 5xx). Config and guardrail errors return false.
|
||||
*/
|
||||
function isTransientError(err: unknown): boolean {
|
||||
if (!(err instanceof Error)) return true
|
||||
const msg = err.message.toLowerCase()
|
||||
// Guardrail / config errors — must surface
|
||||
if (msg.includes('must use https')) return false
|
||||
if (msg.includes('private/reserved address')) return false
|
||||
if (msg.includes('not in the safe allowlist')) return false
|
||||
if (msg.includes('exceeds') && msg.includes('bytes')) return false
|
||||
if (msg.includes('not a valid url')) return false
|
||||
if (msg.includes('is not configured')) return false
|
||||
// Transient errors — safe to fall through
|
||||
if (err.name === 'AbortError') return true
|
||||
if (msg.includes('timed out')) return true
|
||||
if (msg.includes('fetch failed') || msg.includes('econnrefused') || msg.includes('enotfound')) return true
|
||||
if (msg.includes('returned 5')) return true // HTTP 5xx
|
||||
// Unknown — treat as transient to preserve auto-mode fallback semantics
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true when we should use the adapter-based provider system.
|
||||
*
|
||||
* In auto mode: native/first-party/Codex paths take precedence.
|
||||
* → Only falls back to adapter if no native path is available.
|
||||
* In explicit adapter modes (tavily, ddg, custom, etc.): always true.
|
||||
* In native mode: never true.
|
||||
*/
|
||||
function shouldUseAdapterProvider(): boolean {
|
||||
const mode = getProviderMode()
|
||||
if (mode === 'native') return false
|
||||
if (mode !== 'auto') return true // explicit adapter mode (tavily, ddg, custom, etc.)
|
||||
|
||||
// Auto mode: native/first-party/Codex take precedence over adapter
|
||||
if (isCodexResponsesWebSearchEnabled()) return false
|
||||
const provider = getAPIProvider()
|
||||
if (provider === 'firstParty' || provider === 'vertex' || provider === 'foundry') {
|
||||
return false
|
||||
}
|
||||
// No native path available — fall back to adapter
|
||||
return getAvailableProviders().length > 0
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tool export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const WebSearchTool = buildTool({
|
||||
name: WEB_SEARCH_TOOL_NAME,
|
||||
searchHint: 'search the web for current information',
|
||||
@@ -477,20 +534,21 @@ export const WebSearchTool = buildTool({
|
||||
return summary ? `Searching for ${summary}` : 'Searching the web'
|
||||
},
|
||||
isEnabled() {
|
||||
const mode = getProviderMode()
|
||||
|
||||
// Specific provider mode: enabled if any adapter is configured
|
||||
if (mode !== 'auto' && mode !== 'native') {
|
||||
return getAvailableProviders().length > 0
|
||||
if (shouldUseFirecrawl()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Auto/native mode: check all paths
|
||||
if (getAvailableProviders().length > 0) return true
|
||||
if (isCodexResponsesWebSearchEnabled()) return true
|
||||
if (shouldUseDuckDuckGo()) {
|
||||
return true
|
||||
}
|
||||
|
||||
const provider = getAPIProvider()
|
||||
const model = getMainLoopModel()
|
||||
|
||||
if (isCodexResponsesWebSearchEnabled()) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Enable for firstParty
|
||||
if (provider === 'firstParty') {
|
||||
return true
|
||||
@@ -543,8 +601,11 @@ export const WebSearchTool = buildTool({
|
||||
}
|
||||
},
|
||||
async prompt() {
|
||||
// Strip "US only" when using non-native backends
|
||||
if (shouldUseAdapterProvider() || isCodexResponsesWebSearchEnabled()) {
|
||||
if (
|
||||
shouldUseDuckDuckGo() ||
|
||||
shouldUseFirecrawl() ||
|
||||
isCodexResponsesWebSearchEnabled()
|
||||
) {
|
||||
return getWebSearchPrompt().replace(
|
||||
/\n\s*-\s*Web search is only available in the US/,
|
||||
'',
|
||||
@@ -581,47 +642,20 @@ export const WebSearchTool = buildTool({
|
||||
return { result: true }
|
||||
},
|
||||
async call(input, context, _canUseTool, _parentMessage, onProgress) {
|
||||
// --- Adapter-based providers (custom, firecrawl, ddg) ---
|
||||
// runSearch handles fallback semantics based on WEB_SEARCH_PROVIDER mode:
|
||||
// - "auto": tries each provider, falls through on failure
|
||||
// - specific mode: runs one provider, throws on failure
|
||||
if (shouldUseAdapterProvider()) {
|
||||
const mode = getProviderMode()
|
||||
const isExplicitAdapter = mode !== 'auto'
|
||||
try {
|
||||
const providerOutput = await runSearch(
|
||||
{
|
||||
query: input.query,
|
||||
allowed_domains: input.allowed_domains,
|
||||
blocked_domains: input.blocked_domains,
|
||||
},
|
||||
context.abortController.signal,
|
||||
)
|
||||
// Explicit adapter: return even 0 hits (no silent native fallback)
|
||||
if (isExplicitAdapter || providerOutput.hits.length > 0) {
|
||||
return { data: formatProviderOutput(providerOutput, input.query) }
|
||||
}
|
||||
// Auto mode with 0 hits: fall through to native
|
||||
} catch (err) {
|
||||
// Explicit adapter: throw the real error (no silent native fallback)
|
||||
if (isExplicitAdapter) throw err
|
||||
// Auto mode: only fall through on transient errors (network, timeout, 5xx).
|
||||
// Config / guardrail errors (SSRF, HTTPS, bad URL, etc.) must surface.
|
||||
if (!isTransientError(err)) throw err
|
||||
console.error(
|
||||
`[web-search] Adapter failed, falling through to native: ${err}`,
|
||||
)
|
||||
}
|
||||
if (shouldUseFirecrawl()) {
|
||||
return { data: await runFirecrawlSearch(input) }
|
||||
}
|
||||
|
||||
if (shouldUseDuckDuckGo()) {
|
||||
return { data: await runDuckDuckGoSearch(input) }
|
||||
}
|
||||
|
||||
// --- Codex / OpenAI Responses path ---
|
||||
if (isCodexResponsesWebSearchEnabled()) {
|
||||
return {
|
||||
data: await runCodexWebSearch(input, context.abortController.signal),
|
||||
}
|
||||
}
|
||||
|
||||
// --- Native Anthropic path (firstParty / vertex / foundry) ---
|
||||
const startTime = performance.now()
|
||||
const { query } = input
|
||||
const userMessage = createUserMessage({
|
||||
@@ -681,6 +715,8 @@ export const WebSearchTool = buildTool({
|
||||
if (contentBlock && contentBlock.type === 'server_tool_use') {
|
||||
currentToolUseId = contentBlock.id
|
||||
currentToolUseJson = ''
|
||||
// Note: The ServerToolUseBlock doesn't contain input.query
|
||||
// The actual query comes through input_json_delta events
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -697,10 +733,12 @@ export const WebSearchTool = buildTool({
|
||||
|
||||
// Try to extract query from partial JSON for progress updates
|
||||
try {
|
||||
// Look for a complete query field
|
||||
const queryMatch = currentToolUseJson.match(
|
||||
/"query"\s*:\s*"((?:[^"\\]|\\.)*)"/,
|
||||
)
|
||||
if (queryMatch && queryMatch[1]) {
|
||||
// The regex properly handles escaped characters
|
||||
const query = jsonParse('"' + queryMatch[1] + '"')
|
||||
|
||||
if (
|
||||
@@ -733,6 +771,7 @@ export const WebSearchTool = buildTool({
|
||||
) {
|
||||
const contentBlock = event.event.content_block
|
||||
if (contentBlock && contentBlock.type === 'web_search_tool_result') {
|
||||
// Get the actual query that was used for this search
|
||||
const toolUseId = contentBlock.tool_use_id
|
||||
const actualQuery = toolUseQueries.get(toolUseId) || query
|
||||
const content = contentBlock.content
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
/**
|
||||
* Bing Web Search API adapter.
|
||||
* GET https://api.bing.microsoft.com/v7.0/search?q=...
|
||||
* Auth: Ocp-Apim-Subscription-Key: <key>
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, type ProviderOutput } from './types.js'
|
||||
|
||||
export const bingProvider: SearchProvider = {
|
||||
name: 'bing',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.BING_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const url = new URL('https://api.bing.microsoft.com/v7.0/search')
|
||||
url.searchParams.set('q', input.query)
|
||||
url.searchParams.set('count', '10')
|
||||
|
||||
const res = await fetch(url.toString(), {
|
||||
headers: { 'Ocp-Apim-Subscription-Key': process.env.BING_API_KEY! },
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Bing search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const hits = (data.webPages?.value ?? []).map((r: any) => ({
|
||||
title: r.name ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.snippet,
|
||||
source: r.displayUrl,
|
||||
}))
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'bing',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
import { describe, expect, test, beforeEach, afterEach } from 'bun:test'
|
||||
import { extractHits, customProvider } from './custom.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// extractHits — flexible response parsing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('extractHits', () => {
|
||||
test('extracts from results array', () => {
|
||||
const data = { results: [{ title: 'T', url: 'https://ex.com' }] }
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
expect(hits[0].title).toBe('T')
|
||||
})
|
||||
|
||||
test('extracts from items array (Google-style)', () => {
|
||||
const data = { items: [{ title: 'T', link: 'https://ex.com' }] }
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
expect(hits[0].url).toBe('https://ex.com')
|
||||
})
|
||||
|
||||
test('extracts from data array', () => {
|
||||
const data = { data: [{ title: 'T', url: 'https://ex.com' }] }
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('extracts from bare array', () => {
|
||||
const data = [{ title: 'T', url: 'https://ex.com' }]
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('extracts from nested map (e.g. web.results)', () => {
|
||||
const data = {
|
||||
web: {
|
||||
results: [{ title: 'T', url: 'https://ex.com' }],
|
||||
},
|
||||
}
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('extracts with explicit jsonPath', () => {
|
||||
const data = {
|
||||
response: {
|
||||
payload: [{ title: 'T', url: 'https://ex.com' }],
|
||||
},
|
||||
}
|
||||
const hits = extractHits(data, 'response.payload')
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('returns empty for empty object', () => {
|
||||
expect(extractHits({})).toHaveLength(0)
|
||||
})
|
||||
|
||||
test('returns empty for null', () => {
|
||||
expect(extractHits(null)).toHaveLength(0)
|
||||
})
|
||||
|
||||
test('returns empty for no array keys', () => {
|
||||
expect(extractHits({ status: 'ok', count: 5 })).toHaveLength(0)
|
||||
})
|
||||
|
||||
test('filters out hits with no title and no url', () => {
|
||||
const data = {
|
||||
results: [
|
||||
{ title: 'Valid', url: 'https://ex.com' },
|
||||
{ description: 'no title or url' },
|
||||
],
|
||||
}
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('extracts from organic_results (SerpAPI-style)', () => {
|
||||
const data = {
|
||||
organic_results: [{ title: 'T', link: 'https://ex.com' }],
|
||||
}
|
||||
const hits = extractHits(data)
|
||||
expect(hits).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// buildAuthHeadersForPreset — tested indirectly via env vars
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('buildAuthHeadersForPreset auth header behavior', () => {
|
||||
const savedEnv: Record<string, string | undefined> = {}
|
||||
|
||||
beforeEach(() => {
|
||||
for (const k of ['WEB_KEY', 'WEB_AUTH_HEADER', 'WEB_AUTH_SCHEME']) {
|
||||
savedEnv[k] = process.env[k]
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
for (const [k, v] of Object.entries(savedEnv)) {
|
||||
if (v === undefined) delete process.env[k]
|
||||
else process.env[k] = v
|
||||
}
|
||||
})
|
||||
|
||||
// We test isConfigured() which depends on WEB_SEARCH_API/WEB_PROVIDER/WEB_URL_TEMPLATE
|
||||
// and the auth behavior through the public search() interface
|
||||
test('custom provider is configured when WEB_URL_TEMPLATE is set', () => {
|
||||
process.env.WEB_URL_TEMPLATE = 'https://example.com/search?q={query}'
|
||||
const { customProvider } = require('./custom.js')
|
||||
expect(customProvider.isConfigured()).toBe(true)
|
||||
delete process.env.WEB_URL_TEMPLATE
|
||||
})
|
||||
|
||||
test('custom provider is NOT configured when no env vars are set', () => {
|
||||
delete process.env.WEB_URL_TEMPLATE
|
||||
delete process.env.WEB_SEARCH_API
|
||||
delete process.env.WEB_PROVIDER
|
||||
expect(customProvider.isConfigured()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// buildAuthHeadersForPreset — direct tests for WEB_AUTH_HEADER / WEB_AUTH_SCHEME
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('buildAuthHeadersForPreset direct assertions', () => {
|
||||
const savedEnv: Record<string, string | undefined> = {}
|
||||
|
||||
beforeEach(() => {
|
||||
for (const k of ['WEB_KEY', 'WEB_AUTH_HEADER', 'WEB_AUTH_SCHEME']) {
|
||||
savedEnv[k] = process.env[k]
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
for (const [k, v] of Object.entries(savedEnv)) {
|
||||
if (v === undefined) delete process.env[k]
|
||||
else process.env[k] = v
|
||||
}
|
||||
})
|
||||
|
||||
test('WEB_AUTH_HEADER="" is an explicit opt-out — returns empty headers even with WEB_KEY set', () => {
|
||||
process.env.WEB_KEY = 'sk-test-123'
|
||||
process.env.WEB_AUTH_HEADER = ''
|
||||
const { buildAuthHeadersForPreset } = require('./custom.js')
|
||||
expect(buildAuthHeadersForPreset({ urlTemplate: '', queryParam: 'q', authHeader: 'Authorization' })).toEqual({})
|
||||
})
|
||||
|
||||
test('WEB_AUTH_SCHEME="" strips the scheme prefix (bare key only)', () => {
|
||||
process.env.WEB_KEY = 'sk-test-123'
|
||||
process.env.WEB_AUTH_SCHEME = ''
|
||||
delete process.env.WEB_AUTH_HEADER
|
||||
const { buildAuthHeadersForPreset } = require('./custom.js')
|
||||
const result = buildAuthHeadersForPreset({ urlTemplate: '', queryParam: 'q', authHeader: 'X-Api-Key' })
|
||||
// scheme is '' so the header value should be just the key (trimmed)
|
||||
expect(result).toEqual({ 'X-Api-Key': 'sk-test-123' })
|
||||
})
|
||||
|
||||
test('uses preset authHeader and authScheme when no env overrides', () => {
|
||||
process.env.WEB_KEY = 'tok-abc'
|
||||
delete process.env.WEB_AUTH_HEADER
|
||||
delete process.env.WEB_AUTH_SCHEME
|
||||
const { buildAuthHeadersForPreset } = require('./custom.js')
|
||||
const result = buildAuthHeadersForPreset({ urlTemplate: '', queryParam: 'q', authHeader: 'Authorization', authScheme: 'Bearer' })
|
||||
expect(result).toEqual({ 'Authorization': 'Bearer tok-abc' })
|
||||
})
|
||||
|
||||
test('returns empty when WEB_KEY is not set', () => {
|
||||
delete process.env.WEB_KEY
|
||||
delete process.env.WEB_AUTH_HEADER
|
||||
delete process.env.WEB_AUTH_SCHEME
|
||||
const { buildAuthHeadersForPreset } = require('./custom.js')
|
||||
expect(buildAuthHeadersForPreset({ urlTemplate: '', queryParam: 'q', authHeader: 'Authorization' })).toEqual({})
|
||||
})
|
||||
})
|
||||
@@ -1,483 +0,0 @@
|
||||
/**
|
||||
* Custom API provider adapter.
|
||||
*
|
||||
* Supports:
|
||||
* - Any HTTP endpoint via WEB_SEARCH_API
|
||||
* - Built-in presets via WEB_PROVIDER (searxng, google, brave, serpapi)
|
||||
* - GET or POST (WEB_METHOD)
|
||||
* - Query in path via WEB_URL_TEMPLATE with {query}
|
||||
* - Custom POST body via WEB_BODY_TEMPLATE with {query}
|
||||
* - Extra static params via WEB_PARAMS (JSON)
|
||||
* - Flexible response parsing (auto-detects common shapes)
|
||||
* - One automatic retry on failure
|
||||
*
|
||||
* ## Security Guardrails (Option B)
|
||||
*
|
||||
* This adapter creates a generic outbound HTTP client. The following
|
||||
* guardrails are enforced to reduce SSRF and data-exfiltration risk:
|
||||
*
|
||||
* 1. HTTPS-only by default (opt-out: WEB_CUSTOM_ALLOW_HTTP=true)
|
||||
* 2. Private / loopback / link-local IPs are blocked by default
|
||||
* (opt-out: WEB_CUSTOM_ALLOW_PRIVATE=true)
|
||||
* 3. Built-in allowlist of header names — arbitrary headers require
|
||||
* WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS=true
|
||||
* 4. Max body size guard (300 KB for POST)
|
||||
* 5. Request timeout (default 120s, configurable via WEB_CUSTOM_TIMEOUT_SEC)
|
||||
* 6. Audit log on first custom search (one-time warning)
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import {
|
||||
applyDomainFilters,
|
||||
normalizeHit,
|
||||
safeHostname,
|
||||
type ProviderOutput,
|
||||
type SearchHit,
|
||||
} from './types.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Built-in provider presets
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface ProviderPreset {
|
||||
urlTemplate: string
|
||||
queryParam: string
|
||||
method?: string
|
||||
authHeader?: string
|
||||
authScheme?: string
|
||||
jsonPath?: string
|
||||
responseAdapter?: (data: any) => SearchHit[]
|
||||
}
|
||||
|
||||
const BUILT_IN_PROVIDERS: Record<string, ProviderPreset> = {
|
||||
searxng: {
|
||||
// NOTE: default uses https://localhost — users must override WEB_SEARCH_API
|
||||
// for their actual instance. The http:// default was intentionally removed
|
||||
// to comply with the HTTPS-only guardrail.
|
||||
urlTemplate: 'https://localhost:8080/search',
|
||||
queryParam: 'q',
|
||||
jsonPath: 'results',
|
||||
responseAdapter(data: any) {
|
||||
return (data.results ?? []).map((r: any) => ({
|
||||
title: r.title ?? r.url,
|
||||
url: r.url,
|
||||
description: r.content,
|
||||
source: r.engine ?? r.source,
|
||||
}))
|
||||
},
|
||||
},
|
||||
google: {
|
||||
urlTemplate: 'https://www.googleapis.com/customsearch/v1',
|
||||
queryParam: 'q',
|
||||
authHeader: 'Authorization',
|
||||
authScheme: 'Bearer',
|
||||
responseAdapter(data: any) {
|
||||
return (data.items ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.link ?? '',
|
||||
description: r.snippet,
|
||||
source: r.displayLink,
|
||||
}))
|
||||
},
|
||||
},
|
||||
brave: {
|
||||
urlTemplate: 'https://api.search.brave.com/res/v1/web/search',
|
||||
queryParam: 'q',
|
||||
authHeader: 'X-Subscription-Token',
|
||||
responseAdapter(data: any) {
|
||||
return (data.web?.results ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.description,
|
||||
source: safeHostname(r.url),
|
||||
}))
|
||||
},
|
||||
},
|
||||
serpapi: {
|
||||
urlTemplate: 'https://serpapi.com/search.json',
|
||||
queryParam: 'q',
|
||||
authHeader: 'Authorization',
|
||||
authScheme: 'Bearer',
|
||||
responseAdapter(data: any) {
|
||||
return (data.organic_results ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.link ?? '',
|
||||
description: r.snippet,
|
||||
source: r.displayed_link,
|
||||
}))
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Security guardrails
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Maximum POST body size in bytes (300 KB default, configurable via WEB_CUSTOM_MAX_BODY_KB). */
|
||||
const DEFAULT_MAX_BODY_KB = 300
|
||||
|
||||
/** Default request timeout in seconds. */
|
||||
const DEFAULT_TIMEOUT_SECONDS = 120
|
||||
|
||||
/** Header names that are always allowed (case-insensitive). */
|
||||
const SAFE_HEADER_NAMES = new Set([
|
||||
'accept',
|
||||
'accept-encoding',
|
||||
'accept-language',
|
||||
'authorization',
|
||||
'cache-control',
|
||||
'content-type',
|
||||
'if-modified-since',
|
||||
'if-none-match',
|
||||
'ocp-apim-subscription-key',
|
||||
'user-agent',
|
||||
'x-api-key',
|
||||
'x-subscription-token',
|
||||
'x-tenant-id',
|
||||
])
|
||||
|
||||
/**
|
||||
* Private / reserved IP ranges that should not be reachable from a
|
||||
* search adapter (SSRF mitigation).
|
||||
*
|
||||
* This is a hostname-level check. DNS resolution to private IPs is
|
||||
* NOT blocked here (that would require resolving before fetch, which
|
||||
* Node fetch does not expose). This guard blocks obvious cases.
|
||||
*/
|
||||
const BLOCKED_HOSTNAME_PATTERNS = [
|
||||
/^localhost$/i,
|
||||
/^127\.\d+\.\d+\.\d+$/,
|
||||
/^10\.\d+\.\d+\.\d+$/,
|
||||
/^172\.(1[6-9]|2\d|3[01])\.\d+\.\d+$/,
|
||||
/^192\.168\.\d+\.\d+$/,
|
||||
/^0\.0\.0\.0$/,
|
||||
/^\[::1?\]$/i, // [::1] or [::]
|
||||
/^0x[0-9a-f]+$/i, // hex-encoded IPs
|
||||
]
|
||||
|
||||
function isPrivateHostname(hostname: string): boolean {
|
||||
return BLOCKED_HOSTNAME_PATTERNS.some(re => re.test(hostname))
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the target URL against security guardrails.
|
||||
* Throws on violation.
|
||||
*/
|
||||
function validateUrl(urlString: string): void {
|
||||
let parsed: URL
|
||||
try {
|
||||
parsed = new URL(urlString)
|
||||
} catch {
|
||||
throw new Error(`Custom search URL is not a valid URL: ${urlString.slice(0, 100)}`)
|
||||
}
|
||||
|
||||
// 2. HTTPS-only (unless explicitly opted out)
|
||||
const allowHttp = process.env.WEB_CUSTOM_ALLOW_HTTP === 'true'
|
||||
if (!allowHttp && parsed.protocol !== 'https:') {
|
||||
throw new Error(
|
||||
`Custom search URL must use https:// (got ${parsed.protocol}). ` +
|
||||
`Set WEB_CUSTOM_ALLOW_HTTP=true to override (not recommended).`,
|
||||
)
|
||||
}
|
||||
|
||||
// 3. Private network check (unless explicitly opted out)
|
||||
const allowPrivate = process.env.WEB_CUSTOM_ALLOW_PRIVATE === 'true'
|
||||
if (!allowPrivate && isPrivateHostname(parsed.hostname)) {
|
||||
throw new Error(
|
||||
`Custom search URL targets a private/reserved address (${parsed.hostname}). ` +
|
||||
`This is blocked by default to prevent SSRF. ` +
|
||||
`Set WEB_CUSTOM_ALLOW_PRIVATE=true to override (e.g. for local SearXNG).`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that user-supplied headers are in the safe allowlist,
|
||||
* unless WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS=true.
|
||||
*/
|
||||
function validateHeaderName(name: string): boolean {
|
||||
const allowArbitrary = process.env.WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS === 'true'
|
||||
if (allowArbitrary) return true
|
||||
return SAFE_HEADER_NAMES.has(name.toLowerCase())
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a one-time audit warning that custom outbound search is active.
|
||||
* Prevents silent data exfiltration.
|
||||
*/
|
||||
let auditLogged = false
|
||||
function auditLogCustomSearch(url: string): void {
|
||||
if (auditLogged) return
|
||||
auditLogged = true
|
||||
console.warn(
|
||||
`[web-search] ⚠️ Custom search provider is active. ` +
|
||||
`Outbound requests go to: ${safeHostname(url) ?? url}. ` +
|
||||
`Ensure this endpoint is trusted. ` +
|
||||
`See: https://github.com/Gitlawb/openclaude/pull/512#security`,
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Auth — preset overrides for built-in providers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export function buildAuthHeadersForPreset(preset?: ProviderPreset): Record<string, string> {
|
||||
const apiKey = process.env.WEB_KEY
|
||||
if (!apiKey) return {}
|
||||
|
||||
// WEB_AUTH_HEADER="" is an explicit opt-out of auth headers entirely
|
||||
const explicitHeader = process.env.WEB_AUTH_HEADER
|
||||
if (explicitHeader === '') return {}
|
||||
|
||||
const headerName = explicitHeader ?? preset?.authHeader ?? 'Authorization'
|
||||
const scheme = process.env.WEB_AUTH_SCHEME !== undefined
|
||||
? process.env.WEB_AUTH_SCHEME
|
||||
: (preset?.authScheme ?? 'Bearer')
|
||||
return { [headerName]: `${scheme} ${apiKey}`.trim() }
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Request construction
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function resolveConfig(): {
|
||||
urlTemplate: string
|
||||
queryParam: string
|
||||
method: string
|
||||
jsonPath?: string
|
||||
responseAdapter?: (data: any) => SearchHit[]
|
||||
preset?: ProviderPreset
|
||||
} {
|
||||
const providerName = process.env.WEB_PROVIDER
|
||||
const preset = providerName ? BUILT_IN_PROVIDERS[providerName] : undefined
|
||||
|
||||
return {
|
||||
urlTemplate: process.env.WEB_URL_TEMPLATE
|
||||
?? process.env.WEB_SEARCH_API
|
||||
?? preset?.urlTemplate
|
||||
?? '',
|
||||
queryParam: process.env.WEB_QUERY_PARAM ?? preset?.queryParam ?? 'q',
|
||||
method: process.env.WEB_METHOD ?? preset?.method ?? 'GET',
|
||||
jsonPath: process.env.WEB_JSON_PATH ?? preset?.jsonPath,
|
||||
responseAdapter: preset?.responseAdapter,
|
||||
preset,
|
||||
}
|
||||
}
|
||||
|
||||
function parseExtraParams(): Record<string, string> {
|
||||
const raw = process.env.WEB_PARAMS
|
||||
if (!raw) return {}
|
||||
try {
|
||||
const obj = JSON.parse(raw)
|
||||
if (obj && typeof obj === 'object' && !Array.isArray(obj)) return obj
|
||||
} catch { /* ignore */ }
|
||||
return {}
|
||||
}
|
||||
|
||||
function buildRequest(query: string) {
|
||||
const config = resolveConfig()
|
||||
const method = config.method.toUpperCase()
|
||||
|
||||
// --- URL ---
|
||||
const rawTemplate = config.urlTemplate
|
||||
const templateWithQuery = rawTemplate.replace(/\{query\}/g, encodeURIComponent(query))
|
||||
const url = new URL(templateWithQuery)
|
||||
|
||||
// Merge extra static params
|
||||
for (const [k, v] of Object.entries(parseExtraParams())) {
|
||||
url.searchParams.set(k, v)
|
||||
}
|
||||
|
||||
// If {query} wasn't in template, add as param
|
||||
if (!rawTemplate.includes('{query}')) {
|
||||
url.searchParams.set(config.queryParam, query)
|
||||
}
|
||||
|
||||
const urlString = url.toString()
|
||||
|
||||
// --- Security validation ---
|
||||
validateUrl(urlString)
|
||||
auditLogCustomSearch(urlString)
|
||||
|
||||
// --- Headers ---
|
||||
const headers: Record<string, string> = {
|
||||
...buildAuthHeadersForPreset(config.preset),
|
||||
}
|
||||
|
||||
// Merge WEB_HEADERS with allowlist enforcement
|
||||
const rawExtra = process.env.WEB_HEADERS
|
||||
if (rawExtra) {
|
||||
for (const pair of rawExtra.split(';')) {
|
||||
const i = pair.indexOf(':')
|
||||
if (i > 0) {
|
||||
const k = pair.slice(0, i).trim()
|
||||
const v = pair.slice(i + 1).trim()
|
||||
if (k) {
|
||||
if (!validateHeaderName(k)) {
|
||||
throw new Error(
|
||||
`Header "${k}" is not in the safe allowlist. ` +
|
||||
`Allowed: ${[...SAFE_HEADER_NAMES].join(', ')}. ` +
|
||||
`Set WEB_CUSTOM_ALLOW_ARBITRARY_HEADERS=true to override.`,
|
||||
)
|
||||
}
|
||||
headers[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const init: RequestInit = { method, headers }
|
||||
|
||||
if (method === 'POST') {
|
||||
headers['Content-Type'] = 'application/json'
|
||||
const bodyTemplate = process.env.WEB_BODY_TEMPLATE
|
||||
if (bodyTemplate) {
|
||||
const body = bodyTemplate.replace(/\{query\}/g, query)
|
||||
const maxBodyBytes = (Number(process.env.WEB_CUSTOM_MAX_BODY_KB) || DEFAULT_MAX_BODY_KB) * 1024
|
||||
if (Buffer.byteLength(body) > maxBodyBytes) {
|
||||
throw new Error(
|
||||
`POST body exceeds ${maxBodyBytes} bytes. ` +
|
||||
`Increase WEB_CUSTOM_MAX_BODY_KB if needed.`,
|
||||
)
|
||||
}
|
||||
init.body = body
|
||||
} else {
|
||||
init.body = JSON.stringify({ [config.queryParam]: query })
|
||||
}
|
||||
}
|
||||
|
||||
return { url: urlString, init, config }
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Response parsing — flexible, handles many shapes
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function walkJsonPath(obj: any, path: string): any {
|
||||
let current = obj
|
||||
for (const seg of path.split('.')) {
|
||||
if (current == null || typeof current !== 'object') return undefined
|
||||
current = current[seg]
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
function extractFromNode(node: any): SearchHit[] {
|
||||
if (!node) return []
|
||||
if (Array.isArray(node)) return node.map(normalizeHit).filter(Boolean) as SearchHit[]
|
||||
if (typeof node === 'object') {
|
||||
const all: SearchHit[] = []
|
||||
for (const sub of Object.values(node)) all.push(...extractFromNode(sub))
|
||||
return all
|
||||
}
|
||||
// node is a primitive (string/number) — not a valid hit structure
|
||||
return []
|
||||
}
|
||||
|
||||
export function extractHits(raw: any, jsonPath?: string): SearchHit[] {
|
||||
if (jsonPath) return extractFromNode(walkJsonPath(raw, jsonPath))
|
||||
if (Array.isArray(raw)) return raw.map(normalizeHit).filter(Boolean) as SearchHit[]
|
||||
if (!raw || typeof raw !== 'object') return []
|
||||
|
||||
const arrayKeys = ['results', 'items', 'data', 'web', 'organic_results', 'hits', 'entries']
|
||||
for (const key of arrayKeys) {
|
||||
const val = raw[key]
|
||||
if (Array.isArray(val)) return val.map(normalizeHit).filter(Boolean) as SearchHit[]
|
||||
if (val && typeof val === 'object' && !Array.isArray(val)) {
|
||||
const all: SearchHit[] = []
|
||||
for (const sub of Object.values(val)) {
|
||||
if (Array.isArray(sub)) all.push(...(sub.map(normalizeHit).filter(Boolean) as SearchHit[]))
|
||||
}
|
||||
if (all.length > 0) return all
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Fetch with one retry + timeout
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async function fetchWithRetry(url: string, init: RequestInit, signal?: AbortSignal): Promise<any> {
|
||||
const timeoutSec = Number(process.env.WEB_CUSTOM_TIMEOUT_SEC) || DEFAULT_TIMEOUT_SECONDS
|
||||
const timeoutMs = timeoutSec * 1000
|
||||
let lastErr: Error | undefined
|
||||
let lastStatus: number | undefined
|
||||
|
||||
for (let attempt = 0; attempt < 2; attempt++) {
|
||||
// Create a timeout that races with the external signal
|
||||
const controller = new AbortController()
|
||||
const timer = setTimeout(() => controller.abort(), timeoutMs)
|
||||
|
||||
// If the external signal is already aborted, forward it
|
||||
if (signal?.aborted) {
|
||||
controller.abort()
|
||||
} else {
|
||||
signal?.addEventListener('abort', () => controller.abort(), { once: true })
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(url, { ...init, signal: controller.signal })
|
||||
clearTimeout(timer)
|
||||
|
||||
if (!res.ok) {
|
||||
lastStatus = res.status
|
||||
throw new Error(`Custom search API returned ${res.status}: ${res.statusText}`)
|
||||
}
|
||||
return await res.json()
|
||||
} catch (err) {
|
||||
clearTimeout(timer)
|
||||
lastErr = err instanceof Error ? err : new Error(String(err))
|
||||
|
||||
// AbortError from timeout
|
||||
if (lastErr.name === 'AbortError' && !signal?.aborted) {
|
||||
throw new Error(`Custom search timed out after ${timeoutSec}s`)
|
||||
}
|
||||
|
||||
// Retry on 5xx or network errors only
|
||||
if (attempt === 0) {
|
||||
if (lastStatus !== undefined && lastStatus >= 500) {
|
||||
await new Promise(r => setTimeout(r, 500))
|
||||
continue
|
||||
}
|
||||
if (lastStatus === undefined) {
|
||||
// Network error — retry
|
||||
await new Promise(r => setTimeout(r, 500))
|
||||
continue
|
||||
}
|
||||
// 4xx — don't retry
|
||||
}
|
||||
throw lastErr
|
||||
}
|
||||
}
|
||||
throw lastErr!
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Provider export
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export const customProvider: SearchProvider = {
|
||||
name: 'custom',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.WEB_SEARCH_API || process.env.WEB_PROVIDER || process.env.WEB_URL_TEMPLATE)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
const { url, init, config } = buildRequest(input.query)
|
||||
const raw = await fetchWithRetry(url, init, signal)
|
||||
|
||||
const hits = config.responseAdapter
|
||||
? config.responseAdapter(raw)
|
||||
: extractHits(raw, config.jsonPath)
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'custom',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
|
||||
describe('DuckDuckGo SafeSearchType', () => {
|
||||
test('SafeSearchType.STRICT === 0 (matches previous raw value)', async () => {
|
||||
const { SafeSearchType } = await import('duck-duck-scrape')
|
||||
expect(SafeSearchType.STRICT).toBe(0)
|
||||
})
|
||||
|
||||
test('SafeSearchType enum values are sane', async () => {
|
||||
const { SafeSearchType } = await import('duck-duck-scrape')
|
||||
expect(SafeSearchType.STRICT).toBe(0)
|
||||
expect(SafeSearchType.MODERATE).toBe(-1)
|
||||
expect(SafeSearchType.OFF).toBe(-2)
|
||||
})
|
||||
})
|
||||
@@ -1,40 +0,0 @@
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, type ProviderOutput } from './types.js'
|
||||
|
||||
export const duckduckgoProvider: SearchProvider = {
|
||||
name: 'duckduckgo',
|
||||
|
||||
isConfigured() {
|
||||
// DDG is the default fallback — always available (duck-duck-scrape is a runtime dep)
|
||||
return true
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
let search: typeof import('duck-duck-scrape').search
|
||||
let SafeSearchType: typeof import('duck-duck-scrape').SafeSearchType
|
||||
try {
|
||||
;({ search, SafeSearchType } = await import('duck-duck-scrape'))
|
||||
} catch {
|
||||
throw new Error('duck-duck-scrape package not installed. Run: npm install duck-duck-scrape')
|
||||
}
|
||||
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError')
|
||||
// TODO: duck-duck-scrape doesn't accept AbortSignal — can't cancel in-flight searches
|
||||
const response = await search(input.query, { safeSearch: SafeSearchType.STRICT })
|
||||
|
||||
const hits = applyDomainFilters(
|
||||
response.results.map(r => ({
|
||||
title: r.title || r.url,
|
||||
url: r.url,
|
||||
description: r.description ?? undefined,
|
||||
})),
|
||||
input,
|
||||
)
|
||||
|
||||
return {
|
||||
hits,
|
||||
providerName: 'duckduckgo',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
/**
|
||||
* Exa Search API adapter.
|
||||
* POST https://api.exa.ai/search
|
||||
* Auth: x-api-key: <key>
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const exaProvider: SearchProvider = {
|
||||
name: 'exa',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.EXA_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const body: Record<string, any> = {
|
||||
query: input.query,
|
||||
numResults: 10,
|
||||
type: 'auto',
|
||||
}
|
||||
|
||||
if (input.allowed_domains?.length) body.includeDomains = input.allowed_domains
|
||||
if (input.blocked_domains?.length) body.excludeDomains = input.blocked_domains
|
||||
|
||||
const res = await fetch('https://api.exa.ai/search', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': process.env.EXA_API_KEY!,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Exa search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const hits = (data.results ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.snippet ?? r.text,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
// Exa handles domain filtering server-side via includeDomains/excludeDomains
|
||||
hits,
|
||||
providerName: 'exa',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, type ProviderOutput } from './types.js'
|
||||
|
||||
export const firecrawlProvider: SearchProvider = {
|
||||
name: 'firecrawl',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.FIRECRAWL_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError')
|
||||
// TODO: @mendable/firecrawl-js SDK doesn't accept AbortSignal — can't cancel in-flight searches
|
||||
const { FirecrawlClient } = await import('@mendable/firecrawl-js')
|
||||
const app = new FirecrawlClient({ apiKey: process.env.FIRECRAWL_API_KEY! })
|
||||
|
||||
let query = input.query
|
||||
if (input.blocked_domains?.length) {
|
||||
const exclusions = input.blocked_domains.map(d => `-site:${d}`).join(' ')
|
||||
query = `${query} ${exclusions}`
|
||||
}
|
||||
|
||||
const data = await app.search(query, { limit: 10 })
|
||||
|
||||
const hits = applyDomainFilters(
|
||||
(data.web ?? []).map((r: { url: string; title?: string; description?: string }) => ({
|
||||
title: r.title ?? r.url,
|
||||
url: r.url,
|
||||
description: r.description,
|
||||
})),
|
||||
input,
|
||||
)
|
||||
|
||||
return {
|
||||
hits,
|
||||
providerName: 'firecrawl',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,160 +0,0 @@
|
||||
import { describe, expect, test, beforeEach, afterEach } from 'bun:test'
|
||||
import { getProviderMode, getProviderChain, getAvailableProviders } from './index.js'
|
||||
import type { ProviderMode } from './index.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// getProviderMode
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('getProviderMode', () => {
|
||||
const savedEnv = process.env.WEB_SEARCH_PROVIDER
|
||||
|
||||
afterEach(() => {
|
||||
if (savedEnv === undefined) {
|
||||
delete process.env.WEB_SEARCH_PROVIDER
|
||||
} else {
|
||||
process.env.WEB_SEARCH_PROVIDER = savedEnv
|
||||
}
|
||||
})
|
||||
|
||||
test('returns auto by default', () => {
|
||||
delete process.env.WEB_SEARCH_PROVIDER
|
||||
expect(getProviderMode()).toBe('auto')
|
||||
})
|
||||
|
||||
test('returns configured mode', () => {
|
||||
process.env.WEB_SEARCH_PROVIDER = 'tavily'
|
||||
expect(getProviderMode()).toBe('tavily')
|
||||
})
|
||||
|
||||
test('returns ddg mode', () => {
|
||||
process.env.WEB_SEARCH_PROVIDER = 'ddg'
|
||||
expect(getProviderMode()).toBe('ddg')
|
||||
})
|
||||
|
||||
test('returns native mode', () => {
|
||||
process.env.WEB_SEARCH_PROVIDER = 'native'
|
||||
expect(getProviderMode()).toBe('native')
|
||||
})
|
||||
|
||||
test('falls back to auto for invalid mode', () => {
|
||||
process.env.WEB_SEARCH_PROVIDER = 'nonexistent_provider'
|
||||
expect(getProviderMode()).toBe('auto')
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// getProviderChain
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('getProviderChain', () => {
|
||||
test('auto mode returns at least one configured provider', () => {
|
||||
// DDG isAlways configured (no API key needed)
|
||||
const chain = getProviderChain('auto')
|
||||
expect(chain.length).toBeGreaterThan(0)
|
||||
expect(chain.some(p => p.name === 'duckduckgo')).toBe(true)
|
||||
})
|
||||
|
||||
test('auto mode does NOT include custom provider', () => {
|
||||
const chain = getProviderChain('auto')
|
||||
expect(chain.some(p => p.name === 'custom')).toBe(false)
|
||||
})
|
||||
|
||||
test('custom mode explicitly returns custom provider', () => {
|
||||
const chain = getProviderChain('custom' as ProviderMode)
|
||||
expect(chain).toHaveLength(1)
|
||||
expect(chain[0].name).toBe('custom')
|
||||
})
|
||||
|
||||
test('specific mode returns exactly one provider', () => {
|
||||
const chain = getProviderChain('tavily' as ProviderMode)
|
||||
expect(chain).toHaveLength(1)
|
||||
expect(chain[0].name).toBe('tavily')
|
||||
})
|
||||
|
||||
test('ddg mode returns duckduckgo provider', () => {
|
||||
const chain = getProviderChain('ddg' as ProviderMode)
|
||||
expect(chain).toHaveLength(1)
|
||||
expect(chain[0].name).toBe('duckduckgo')
|
||||
})
|
||||
|
||||
test('native mode returns empty chain', () => {
|
||||
expect(getProviderChain('native')).toHaveLength(0)
|
||||
})
|
||||
|
||||
test('unknown mode returns empty chain', () => {
|
||||
expect(getProviderChain('nonexistent' as ProviderMode)).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// AbortError stops the chain
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('runSearch', () => {
|
||||
test('AbortError stops the chain immediately in auto mode', async () => {
|
||||
// Use AbortController to cancel
|
||||
const controller = new AbortController()
|
||||
controller.abort() // cancel immediately
|
||||
|
||||
await expect(
|
||||
// Dynamic import to avoid circular issues
|
||||
import('./index.js').then(m =>
|
||||
m.runSearch({ query: 'test' }, controller.signal),
|
||||
),
|
||||
).rejects.toThrow()
|
||||
})
|
||||
|
||||
test('explicit mode fails fast when provider is not configured', async () => {
|
||||
// Save and clear tavily key
|
||||
const saved = process.env.TAVILY_API_KEY
|
||||
delete process.env.TAVILY_API_KEY
|
||||
const savedProvider = process.env.WEB_SEARCH_PROVIDER
|
||||
process.env.WEB_SEARCH_PROVIDER = 'tavily'
|
||||
|
||||
try {
|
||||
const { runSearch } = await import('./index.js')
|
||||
await expect(runSearch({ query: 'test' })).rejects.toThrow(
|
||||
/not configured/i,
|
||||
)
|
||||
} finally {
|
||||
if (saved !== undefined) process.env.TAVILY_API_KEY = saved
|
||||
else delete process.env.TAVILY_API_KEY
|
||||
if (savedProvider !== undefined) process.env.WEB_SEARCH_PROVIDER = savedProvider
|
||||
else delete process.env.WEB_SEARCH_PROVIDER
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// getAvailableProviders
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('getAvailableProviders', () => {
|
||||
test('always includes duckduckgo (no API key required)', () => {
|
||||
const providers = getAvailableProviders()
|
||||
expect(providers.some(p => p.name === 'duckduckgo')).toBe(true)
|
||||
})
|
||||
|
||||
test('does NOT include custom in available providers (auto chain)', () => {
|
||||
const providers = getAvailableProviders()
|
||||
expect(providers.some(p => p.name === 'custom')).toBe(false)
|
||||
})
|
||||
|
||||
test('includes providers when API keys are set', () => {
|
||||
const saved = process.env.TAVILY_API_KEY
|
||||
process.env.TAVILY_API_KEY = 'test-key'
|
||||
const providers = getAvailableProviders()
|
||||
expect(providers.some(p => p.name === 'tavily')).toBe(true)
|
||||
if (saved === undefined) delete process.env.TAVILY_API_KEY
|
||||
else process.env.TAVILY_API_KEY = saved
|
||||
})
|
||||
|
||||
test('excludes providers when API keys are missing', () => {
|
||||
const saved = process.env.TAVILY_API_KEY
|
||||
delete process.env.TAVILY_API_KEY
|
||||
const providers = getAvailableProviders()
|
||||
expect(providers.some(p => p.name === 'tavily')).toBe(false)
|
||||
if (saved !== undefined) process.env.TAVILY_API_KEY = saved
|
||||
})
|
||||
})
|
||||
@@ -1,192 +0,0 @@
|
||||
/**
|
||||
* Provider registry and selection logic.
|
||||
*
|
||||
* WEB_SEARCH_PROVIDER controls which backend to use:
|
||||
*
|
||||
* "auto" (default) — try providers in priority order, fall through on failure
|
||||
* "custom" — use WEB_SEARCH_API / WEB_PROVIDER preset only (fail loudly)
|
||||
* "firecrawl" — use Firecrawl only (fail loudly)
|
||||
* "tavily" — use Tavily only (fail loudly)
|
||||
* "exa" — use Exa only (fail loudly)
|
||||
* "you" — use You.com only (fail loudly)
|
||||
* "jina" — use Jina only (fail loudly)
|
||||
* "bing" — use Bing only (fail loudly)
|
||||
* "mojeek" — use Mojeek only (fail loudly)
|
||||
* "linkup" — use Linkup only (fail loudly)
|
||||
* "ddg" — use DuckDuckGo only (fail loudly)
|
||||
* "native" — use Anthropic native / Codex only (fail loudly)
|
||||
*
|
||||
* "auto" mode is the only mode that silently falls through to the next provider.
|
||||
* All other modes throw on failure — no silent backend switching.
|
||||
*
|
||||
* NOTE: "custom" is NOT included in the "auto" fallback chain.
|
||||
* It is only used when WEB_SEARCH_PROVIDER=custom is explicitly selected.
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import type { ProviderOutput } from './types.js'
|
||||
|
||||
import { customProvider } from './custom.js'
|
||||
import { duckduckgoProvider } from './duckduckgo.js'
|
||||
import { firecrawlProvider } from './firecrawl.js'
|
||||
import { tavilyProvider } from './tavily.js'
|
||||
import { exaProvider } from './exa.js'
|
||||
import { youProvider } from './you.js'
|
||||
import { jinaProvider } from './jina.js'
|
||||
import { bingProvider } from './bing.js'
|
||||
import { mojeekProvider } from './mojeek.js'
|
||||
import { linkupProvider } from './linkup.js'
|
||||
|
||||
export { type SearchInput, type SearchProvider, type ProviderOutput, type SearchHit } from './types.js'
|
||||
export { applyDomainFilters, safeHostname, hostMatchesDomain } from './types.js'
|
||||
export { extractHits } from './custom.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// All registered providers — order matters for auto mode
|
||||
// ---------------------------------------------------------------------------
|
||||
// Priority: firecrawl → tavily → exa → you → jina → bing → mojeek → linkup → ddg
|
||||
// DDG is last because it's free but rate-limited.
|
||||
// NOTE: customProvider is intentionally excluded from the auto chain.
|
||||
// It is only available when WEB_SEARCH_PROVIDER=custom is explicitly set.
|
||||
// This prevents the generic outbound provider from silently becoming the default backend.
|
||||
|
||||
const ALL_PROVIDERS: SearchProvider[] = [
|
||||
firecrawlProvider,
|
||||
tavilyProvider,
|
||||
exaProvider,
|
||||
youProvider,
|
||||
jinaProvider,
|
||||
bingProvider,
|
||||
mojeekProvider,
|
||||
linkupProvider,
|
||||
duckduckgoProvider,
|
||||
]
|
||||
|
||||
export function getAvailableProviders(): SearchProvider[] {
|
||||
return ALL_PROVIDERS.filter(p => p.isConfigured())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Selection
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export type ProviderMode =
|
||||
| 'auto'
|
||||
| 'custom'
|
||||
| 'firecrawl'
|
||||
| 'ddg'
|
||||
| 'tavily'
|
||||
| 'exa'
|
||||
| 'you'
|
||||
| 'jina'
|
||||
| 'bing'
|
||||
| 'mojeek'
|
||||
| 'linkup'
|
||||
| 'native'
|
||||
|
||||
const PROVIDER_BY_NAME: Record<string, SearchProvider> = {
|
||||
custom: customProvider,
|
||||
firecrawl: firecrawlProvider,
|
||||
ddg: duckduckgoProvider,
|
||||
tavily: tavilyProvider,
|
||||
exa: exaProvider,
|
||||
you: youProvider,
|
||||
jina: jinaProvider,
|
||||
bing: bingProvider,
|
||||
mojeek: mojeekProvider,
|
||||
linkup: linkupProvider,
|
||||
}
|
||||
|
||||
const VALID_MODES = new Set<string>(Object.keys(PROVIDER_BY_NAME).concat(['auto', 'native']))
|
||||
|
||||
export function getProviderMode(): ProviderMode {
|
||||
const raw = process.env.WEB_SEARCH_PROVIDER ?? 'auto'
|
||||
if (VALID_MODES.has(raw)) return raw as ProviderMode
|
||||
return 'auto'
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of providers to try, in order.
|
||||
* - Specific mode → single provider
|
||||
* - Auto → priority order (ALL_PROVIDERS, filtered by isConfigured)
|
||||
*/
|
||||
export function getProviderChain(mode: ProviderMode): SearchProvider[] {
|
||||
if (mode === 'auto') {
|
||||
return ALL_PROVIDERS.filter(p => p.isConfigured())
|
||||
}
|
||||
if (mode === 'native') {
|
||||
return []
|
||||
}
|
||||
const provider = PROVIDER_BY_NAME[mode]
|
||||
if (!provider) return []
|
||||
return [provider]
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a search using the configured provider chain.
|
||||
*
|
||||
* - Auto mode: tries each provider in order, falls through on failure.
|
||||
* If ALL providers fail, throws the last error.
|
||||
* - Specific mode: runs the single provider, throws immediately on failure.
|
||||
*/
|
||||
export async function runSearch(
|
||||
input: SearchInput,
|
||||
signal?: AbortSignal,
|
||||
): Promise<ProviderOutput> {
|
||||
const mode = getProviderMode()
|
||||
const chain = getProviderChain(mode)
|
||||
|
||||
if (chain.length === 0) {
|
||||
throw new Error(
|
||||
mode === 'native'
|
||||
? 'Native web search requires firstParty/vertex/foundry provider.'
|
||||
: `No search providers available for mode "${mode}". Check your env vars.`,
|
||||
)
|
||||
}
|
||||
|
||||
const errors: Error[] = []
|
||||
|
||||
// Explicit provider mode: fail fast if the provider isn't configured
|
||||
if (mode !== 'auto' && mode !== 'native') {
|
||||
const provider = chain[0]
|
||||
if (provider && !provider.isConfigured()) {
|
||||
throw new Error(
|
||||
`Search provider "${mode}" is not configured. ` +
|
||||
`Set the required environment variable (e.g. ${mode.toUpperCase()}_API_KEY) ` +
|
||||
`or switch to WEB_SEARCH_PROVIDER=auto.`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
for (const provider of chain) {
|
||||
try {
|
||||
return await provider.search(input, signal)
|
||||
} catch (err) {
|
||||
const error = err instanceof Error ? err : new Error(String(err))
|
||||
|
||||
// Cancellation must stop immediately — don't fall through to other providers
|
||||
if (error.name === 'AbortError' || signal?.aborted) {
|
||||
throw error
|
||||
}
|
||||
|
||||
errors.push(error)
|
||||
|
||||
// Specific mode: fail loudly, no fallback
|
||||
if (mode !== 'auto') {
|
||||
throw error
|
||||
}
|
||||
|
||||
// Auto mode: log and try next
|
||||
console.error(`[web-search] ${provider.name} failed: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
// All providers failed in auto mode
|
||||
const lastErr = errors[errors.length - 1]
|
||||
if (!lastErr) throw new Error('All search providers failed with no error details.')
|
||||
if (errors.length === 1) throw lastErr
|
||||
throw new Error(
|
||||
`All ${errors.length} search providers failed:\n` +
|
||||
errors.map((e, i) => ` ${i + 1}. ${e.message}`).join('\n'),
|
||||
)
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* Jina Search API adapter.
|
||||
* GET https://s.jina.ai/?q=...
|
||||
* Auth: Authorization: Bearer <key>
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const jinaProvider: SearchProvider = {
|
||||
name: 'jina',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.JINA_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const url = new URL('https://s.jina.ai/')
|
||||
url.searchParams.set('q', input.query)
|
||||
|
||||
const res = await fetch(url.toString(), {
|
||||
headers: {
|
||||
Authorization: `Bearer ${process.env.JINA_API_KEY}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Jina search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const hits = (data.data ?? data.results ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.description ?? r.snippet ?? r.content,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'jina',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
/**
|
||||
* Linkup Search API adapter.
|
||||
* POST https://api.linkup.so/v1/search
|
||||
* Auth: Authorization: Bearer <key>
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const linkupProvider: SearchProvider = {
|
||||
name: 'linkup',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.LINKUP_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const res = await fetch('https://api.linkup.so/v1/search', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${process.env.LINKUP_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
q: input.query,
|
||||
search_type: 'standard',
|
||||
}),
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Linkup search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const hits = (data.results ?? []).map((r: any) => ({
|
||||
title: r.name ?? r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.snippet ?? r.description ?? r.content,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'linkup',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* Mojeek Search API adapter.
|
||||
* GET https://www.mojeek.com/search?q=...&fmt=json
|
||||
* Auth: optional Bearer for API tier
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const mojeekProvider: SearchProvider = {
|
||||
name: 'mojeek',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.MOJEEK_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const url = new URL('https://www.mojeek.com/search')
|
||||
url.searchParams.set('q', input.query)
|
||||
url.searchParams.set('fmt', 'json')
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
if (process.env.MOJEEK_API_KEY) {
|
||||
headers['Authorization'] = `Bearer ${process.env.MOJEEK_API_KEY}`
|
||||
}
|
||||
|
||||
const res = await fetch(url.toString(), { headers, signal })
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Mojeek search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const rawResults = data?.response?.results ?? data?.results ?? []
|
||||
|
||||
const hits = rawResults.map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.snippet ?? r.desc,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'mojeek',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* Tavily Search API adapter.
|
||||
* POST https://api.tavily.com/search
|
||||
* Auth: Authorization: Bearer tvly-xxxx
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const tavilyProvider: SearchProvider = {
|
||||
name: 'tavily',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.TAVILY_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const res = await fetch('https://api.tavily.com/search', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${process.env.TAVILY_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: input.query,
|
||||
max_results: 10,
|
||||
include_answer: false,
|
||||
}),
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Tavily search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
|
||||
const hits = (data.results ?? []).map((r: any) => ({
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: r.content ?? r.snippet,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'tavily',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,229 +0,0 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
import { applyDomainFilters, hostMatchesDomain, normalizeHit, safeHostname } from './types.js'
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// safeHostname
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('safeHostname', () => {
|
||||
test('returns hostname for valid URL', () => {
|
||||
expect(safeHostname('https://example.com/path')).toBe('example.com')
|
||||
})
|
||||
|
||||
test('returns hostname with subdomain', () => {
|
||||
expect(safeHostname('https://api.example.com/v1')).toBe('api.example.com')
|
||||
})
|
||||
|
||||
test('returns undefined for invalid URL', () => {
|
||||
expect(safeHostname('not-a-url')).toBeUndefined()
|
||||
})
|
||||
|
||||
test('returns undefined for empty string', () => {
|
||||
expect(safeHostname('')).toBeUndefined()
|
||||
})
|
||||
|
||||
test('returns undefined for undefined', () => {
|
||||
expect(safeHostname(undefined)).toBeUndefined()
|
||||
})
|
||||
|
||||
test('returns undefined for relative path', () => {
|
||||
expect(safeHostname('/path/only')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// hostMatchesDomain
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('hostMatchesDomain', () => {
|
||||
test('exact match', () => {
|
||||
expect(hostMatchesDomain('example.com', 'example.com')).toBe(true)
|
||||
})
|
||||
|
||||
test('subdomain match', () => {
|
||||
expect(hostMatchesDomain('sub.example.com', 'example.com')).toBe(true)
|
||||
expect(hostMatchesDomain('deep.sub.example.com', 'example.com')).toBe(true)
|
||||
})
|
||||
|
||||
test('suffix collision is blocked (badexample.com ≠ example.com)', () => {
|
||||
expect(hostMatchesDomain('badexample.com', 'example.com')).toBe(false)
|
||||
})
|
||||
|
||||
test('different domain', () => {
|
||||
expect(hostMatchesDomain('other.com', 'example.com')).toBe(false)
|
||||
})
|
||||
|
||||
test('partial word collision is blocked', () => {
|
||||
expect(hostMatchesDomain('notexample.com', 'example.com')).toBe(false)
|
||||
expect(hostMatchesDomain('xample.com', 'example.com')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// normalizeHit
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('normalizeHit', () => {
|
||||
test('extracts standard fields', () => {
|
||||
const hit = normalizeHit({ title: 'Test', url: 'https://example.com' })
|
||||
expect(hit).toEqual({ title: 'Test', url: 'https://example.com' })
|
||||
})
|
||||
|
||||
test('extracts alternative field names (headline, link, snippet)', () => {
|
||||
const hit = normalizeHit({
|
||||
headline: 'Test',
|
||||
link: 'https://ex.com',
|
||||
snippet: 'desc',
|
||||
})
|
||||
expect(hit?.title).toBe('Test')
|
||||
expect(hit?.url).toBe('https://ex.com')
|
||||
expect(hit?.description).toBe('desc')
|
||||
})
|
||||
|
||||
test('extracts source from various keys', () => {
|
||||
const hit = normalizeHit({
|
||||
title: 'T',
|
||||
url: 'https://example.com',
|
||||
displayLink: 'example.com',
|
||||
})
|
||||
expect(hit?.source).toBe('example.com')
|
||||
})
|
||||
|
||||
test('returns null for empty object', () => {
|
||||
expect(normalizeHit({})).toBeNull()
|
||||
})
|
||||
|
||||
test('returns null for null input', () => {
|
||||
expect(normalizeHit(null)).toBeNull()
|
||||
})
|
||||
|
||||
test('returns null for non-object input', () => {
|
||||
expect(normalizeHit('string')).toBeNull()
|
||||
expect(normalizeHit(42)).toBeNull()
|
||||
})
|
||||
|
||||
test('uses url as title when title missing', () => {
|
||||
const hit = normalizeHit({ url: 'https://example.com' })
|
||||
expect(hit?.title).toBe('https://example.com')
|
||||
expect(hit?.url).toBe('https://example.com')
|
||||
})
|
||||
})
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// applyDomainFilters
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe('applyDomainFilters', () => {
|
||||
test('filters blocked domains', () => {
|
||||
const hits = [
|
||||
{ title: 'good', url: 'https://example.com/page' },
|
||||
{ title: 'bad', url: 'https://badsite.com/page' },
|
||||
]
|
||||
const result = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
blocked_domains: ['badsite.com'],
|
||||
})
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].url).toBe('https://example.com/page')
|
||||
})
|
||||
|
||||
test('keeps malformed URLs when filtering blocked (security)', () => {
|
||||
const hits = [{ title: 'weird', url: 'not-a-valid-url' }]
|
||||
const result = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
blocked_domains: ['example.com'],
|
||||
})
|
||||
// Can't confirm it's blocked → keep it
|
||||
expect(result).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('filters allowed domains only', () => {
|
||||
const hits = [
|
||||
{ title: 'good', url: 'https://example.com/page' },
|
||||
{ title: 'bad', url: 'https://other.com/page' },
|
||||
]
|
||||
const result = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
allowed_domains: ['example.com'],
|
||||
})
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].url).toBe('https://example.com/page')
|
||||
})
|
||||
|
||||
test('drops malformed URLs when filtering allowed (security)', () => {
|
||||
const hits = [{ title: 'weird', url: 'not-a-valid-url' }]
|
||||
const result = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
allowed_domains: ['example.com'],
|
||||
})
|
||||
// Can't confirm it's allowed → drop it
|
||||
expect(result).toHaveLength(0)
|
||||
})
|
||||
|
||||
test('handles subdomain matching', () => {
|
||||
const hits = [{ title: 't', url: 'https://sub.example.com/page' }]
|
||||
const blocked = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
blocked_domains: ['example.com'],
|
||||
})
|
||||
expect(blocked).toHaveLength(0)
|
||||
|
||||
const allowed = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
allowed_domains: ['example.com'],
|
||||
})
|
||||
expect(allowed).toHaveLength(1)
|
||||
})
|
||||
|
||||
test('returns all hits when no domain filters', () => {
|
||||
const hits = [
|
||||
{ title: 'a', url: 'https://a.com' },
|
||||
{ title: 'b', url: 'https://b.com' },
|
||||
]
|
||||
const result = applyDomainFilters(hits, { query: 'test' })
|
||||
expect(result).toHaveLength(2)
|
||||
})
|
||||
|
||||
test('combines blocked and allowed filters', () => {
|
||||
const hits = [
|
||||
{ title: 'good', url: 'https://example.com/page' },
|
||||
{ title: 'blocked', url: 'https://badsite.com/page' },
|
||||
{ title: 'other', url: 'https://other.com/page' },
|
||||
]
|
||||
const result = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
blocked_domains: ['badsite.com'],
|
||||
allowed_domains: ['example.com'],
|
||||
})
|
||||
expect(result).toHaveLength(1)
|
||||
expect(result[0].url).toBe('https://example.com/page')
|
||||
})
|
||||
|
||||
test('does NOT match suffix collision (badexample.com blocked does not affect example.com)', () => {
|
||||
const hits = [
|
||||
{ title: 'good', url: 'https://example.com/page' },
|
||||
{ title: 'collision', url: 'https://badexample.com/page' },
|
||||
]
|
||||
const blocked = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
blocked_domains: ['example.com'],
|
||||
})
|
||||
// Only exact/subdomain of example.com is blocked, not badexample.com
|
||||
expect(blocked).toHaveLength(1)
|
||||
expect(blocked[0].url).toBe('https://badexample.com/page')
|
||||
})
|
||||
|
||||
test('allowed_domains does NOT match suffix collision', () => {
|
||||
const hits = [
|
||||
{ title: 'good', url: 'https://example.com/page' },
|
||||
{ title: 'collision', url: 'https://badexample.com/page' },
|
||||
]
|
||||
const allowed = applyDomainFilters(hits, {
|
||||
query: 'test',
|
||||
allowed_domains: ['example.com'],
|
||||
})
|
||||
// Only exact/subdomain of example.com is allowed
|
||||
expect(allowed).toHaveLength(1)
|
||||
expect(allowed[0].url).toBe('https://example.com/page')
|
||||
})
|
||||
})
|
||||
@@ -1,119 +0,0 @@
|
||||
/**
|
||||
* Search provider adapter types.
|
||||
*
|
||||
* Every backend implements SearchProvider. WebSearchTool.selectProvider()
|
||||
* picks the right one; shared logic (domain filtering, snippet formatting,
|
||||
* result-block construction) lives in the tool layer, not in adapters.
|
||||
*/
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface SearchHit {
|
||||
title: string
|
||||
url: string
|
||||
description?: string
|
||||
source?: string
|
||||
}
|
||||
|
||||
export interface SearchInput {
|
||||
query: string
|
||||
allowed_domains?: string[]
|
||||
blocked_domains?: string[]
|
||||
}
|
||||
|
||||
export interface ProviderOutput {
|
||||
hits: SearchHit[]
|
||||
/** Provider name for logging / tool_use_id */
|
||||
providerName: string
|
||||
/** Duration of the provider call in seconds */
|
||||
durationSeconds: number
|
||||
}
|
||||
|
||||
export interface SearchProvider {
|
||||
/** Human-readable label (used in tool_use_id, logs) */
|
||||
readonly name: string
|
||||
/** Returns true when the env vars / config needed for this provider are present */
|
||||
isConfigured(): boolean
|
||||
/** Perform the search. Throw on unrecoverable errors. */
|
||||
search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput>
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Flexible response parsing helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const TITLE_KEYS = ['title', 'headline', 'name', 'heading'] as const
|
||||
const URL_KEYS = ['url', 'link', 'href', 'uri', 'permalink'] as const
|
||||
const DESC_KEYS = [
|
||||
'description', 'snippet', 'content', 'preview', 'summary', 'text', 'body',
|
||||
] as const
|
||||
const SOURCE_KEYS = [
|
||||
'source', 'domain', 'displayLink', 'displayed_link', 'engine',
|
||||
] as const
|
||||
|
||||
function firstMatch(obj: any, keys: readonly string[]): string | undefined {
|
||||
for (const k of keys) {
|
||||
if (typeof obj?.[k] === 'string' && obj[k]) return obj[k]
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/** Extract a SearchHit from any object shape using well-known field aliases. */
|
||||
export function normalizeHit(raw: any): SearchHit | null {
|
||||
if (!raw || typeof raw !== 'object') return null
|
||||
const title = firstMatch(raw, TITLE_KEYS)
|
||||
const url = firstMatch(raw, URL_KEYS)
|
||||
if (!title && !url) return null
|
||||
const hit: SearchHit = { title: title ?? url!, url: url ?? title! }
|
||||
const desc = firstMatch(raw, DESC_KEYS)
|
||||
const source = firstMatch(raw, SOURCE_KEYS)
|
||||
if (desc) hit.description = desc
|
||||
if (source) hit.source = source
|
||||
return hit
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Domain filtering — shared across ALL providers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Safely extract hostname from a URL string. Returns undefined on parse failure. */
|
||||
export function safeHostname(url: string | undefined): string | undefined {
|
||||
if (!url) return undefined
|
||||
try { return new URL(url).hostname } catch { return undefined }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a hostname exactly matches a domain or is a subdomain of it.
|
||||
* Example: hostMatchesDomain('sub.example.com', 'example.com') → true
|
||||
* hostMatchesDomain('badexample.com', 'example.com') → false
|
||||
*/
|
||||
export function hostMatchesDomain(host: string, domain: string): boolean {
|
||||
if (host === domain) return true
|
||||
// Subdomain: must end with `.domain` (not just `domain`)
|
||||
return host.endsWith('.' + domain)
|
||||
}
|
||||
|
||||
export function applyDomainFilters(
|
||||
hits: SearchHit[],
|
||||
input: SearchInput,
|
||||
): SearchHit[] {
|
||||
let out = hits
|
||||
if (input.blocked_domains?.length) {
|
||||
out = out.filter(h => {
|
||||
const host = safeHostname(h.url)
|
||||
if (!host) return true // can't confirm blocked → keep
|
||||
return !input.blocked_domains!.some(d => hostMatchesDomain(host, d))
|
||||
})
|
||||
}
|
||||
if (input.allowed_domains?.length) {
|
||||
out = out.filter(h => {
|
||||
const host = safeHostname(h.url)
|
||||
if (!host) return false // can't confirm allowed → drop
|
||||
return input.allowed_domains!.some(d => hostMatchesDomain(host, d))
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
/**
|
||||
* You.com Search API adapter.
|
||||
* GET https://api.ydc-index.io/v1/search?query=...
|
||||
* Auth: X-API-Key: <key>
|
||||
*/
|
||||
|
||||
import type { SearchInput, SearchProvider } from './types.js'
|
||||
import { applyDomainFilters, safeHostname, type ProviderOutput } from './types.js'
|
||||
|
||||
export const youProvider: SearchProvider = {
|
||||
name: 'you',
|
||||
|
||||
isConfigured() {
|
||||
return Boolean(process.env.YOU_API_KEY)
|
||||
},
|
||||
|
||||
async search(input: SearchInput, signal?: AbortSignal): Promise<ProviderOutput> {
|
||||
const start = performance.now()
|
||||
|
||||
const url = new URL('https://api.ydc-index.io/v1/search')
|
||||
url.searchParams.set('query', input.query)
|
||||
|
||||
const res = await fetch(url.toString(), {
|
||||
headers: { 'X-API-Key': process.env.YOU_API_KEY! },
|
||||
signal,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`You.com search error ${res.status}: ${await res.text().catch(() => '')}`)
|
||||
}
|
||||
|
||||
const data = await res.json()
|
||||
const webResults = data?.results?.web ?? data?.results ?? []
|
||||
|
||||
const hits = webResults.map((r: any) => {
|
||||
const snippet = Array.isArray(r.snippets) ? r.snippets[0] : r.snippet
|
||||
return {
|
||||
title: r.title ?? '',
|
||||
url: r.url ?? '',
|
||||
description: snippet ?? r.description,
|
||||
source: r.url ? safeHostname(r.url) : undefined,
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
hits: applyDomainFilters(hits, input),
|
||||
providerName: 'you',
|
||||
durationSeconds: (performance.now() - start) / 1000,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -226,10 +226,8 @@ export type VimMode = 'INSERT' | 'NORMAL'
|
||||
*/
|
||||
export type BaseInputState = {
|
||||
onInput: (input: string, key: Key) => void
|
||||
value: string
|
||||
renderedValue: string
|
||||
offset: number
|
||||
setValue: (value: string, offset?: number) => void
|
||||
setOffset: (offset: number) => void
|
||||
/** Cursor line (0-indexed) within the rendered text, accounting for wrapping. */
|
||||
cursorLine: number
|
||||
|
||||
@@ -118,7 +118,6 @@ export function isAnthropicAuthEnabled(): boolean {
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
|
||||
// Check if user has configured an external API key source
|
||||
@@ -1742,7 +1741,6 @@ export function isUsing3PServices(): boolean {
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -78,8 +78,7 @@ export function getContextWindowForModel(
|
||||
const isOpenAIProvider =
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
if (isOpenAIProvider) {
|
||||
const openaiWindow = getOpenAIContextWindow(model)
|
||||
if (openaiWindow !== undefined) {
|
||||
@@ -187,8 +186,7 @@ export function getModelMaxOutputTokens(model: string): {
|
||||
if (
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
|
||||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
|
||||
) {
|
||||
const openaiMax = getOpenAIMaxOutputTokens(model)
|
||||
if (openaiMax !== undefined) {
|
||||
|
||||
@@ -11,11 +11,10 @@ import {
|
||||
type InstallMethod,
|
||||
} from './config.js'
|
||||
import { getCwd } from './cwd.js'
|
||||
import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
|
||||
import { isEnvTruthy } from './envUtils.js'
|
||||
import { execFileNoThrow } from './execFileNoThrow.js'
|
||||
import { getFsImplementation } from './fsOperations.js'
|
||||
import {
|
||||
getDetectedLocalInstallDir,
|
||||
getShellType,
|
||||
isRunningFromLocalInstallation,
|
||||
localInstallationExists,
|
||||
@@ -44,16 +43,6 @@ import {
|
||||
import { jsonParse } from './slowOperations.js'
|
||||
import { which } from './which.js'
|
||||
|
||||
function getCliBinaryName(): string {
|
||||
return MACRO.PACKAGE_URL === '@anthropic-ai/claude-code'
|
||||
? 'claude'
|
||||
: 'openclaude'
|
||||
}
|
||||
|
||||
function getNativeDataDirName(): string {
|
||||
return getCliBinaryName()
|
||||
}
|
||||
|
||||
export type InstallationType =
|
||||
| 'npm-global'
|
||||
| 'npm-local'
|
||||
@@ -173,7 +162,7 @@ async function getInstallationPath(): Promise<string> {
|
||||
}
|
||||
|
||||
try {
|
||||
const path = await which(getCliBinaryName())
|
||||
const path = await which('claude')
|
||||
if (path) {
|
||||
return path
|
||||
}
|
||||
@@ -183,14 +172,8 @@ async function getInstallationPath(): Promise<string> {
|
||||
|
||||
// If we can't find it, check common locations
|
||||
try {
|
||||
const nativeBinaryPath = join(
|
||||
homedir(),
|
||||
'.local',
|
||||
'bin',
|
||||
getCliBinaryName(),
|
||||
)
|
||||
await getFsImplementation().stat(nativeBinaryPath)
|
||||
return nativeBinaryPath
|
||||
await getFsImplementation().stat(join(homedir(), '.local/bin/claude'))
|
||||
return join(homedir(), '.local/bin/claude')
|
||||
} catch {
|
||||
// Not found
|
||||
}
|
||||
@@ -226,8 +209,8 @@ async function detectMultipleInstallations(): Promise<
|
||||
const installations: Array<{ type: string; path: string }> = []
|
||||
|
||||
// Check for local installation
|
||||
const localPath = await getDetectedLocalInstallDir()
|
||||
if (localPath) {
|
||||
const localPath = join(homedir(), '.claude', 'local')
|
||||
if (await localInstallationExists()) {
|
||||
installations.push({ type: 'npm-local', path: localPath })
|
||||
}
|
||||
|
||||
@@ -250,8 +233,8 @@ async function detectMultipleInstallations(): Promise<
|
||||
// Linux / macOS have prefix/bin/claude and prefix/lib/node_modules
|
||||
// Windows has prefix/claude and prefix/node_modules
|
||||
const globalBinPath = isWindows
|
||||
? join(npmPrefix, getCliBinaryName())
|
||||
: join(npmPrefix, 'bin', getCliBinaryName())
|
||||
? join(npmPrefix, 'claude')
|
||||
: join(npmPrefix, 'bin', 'claude')
|
||||
|
||||
let globalBinExists = false
|
||||
try {
|
||||
@@ -306,7 +289,7 @@ async function detectMultipleInstallations(): Promise<
|
||||
// Check for native installation
|
||||
|
||||
// Check common native installation paths
|
||||
const nativeBinPath = join(homedir(), '.local', 'bin', getCliBinaryName())
|
||||
const nativeBinPath = join(homedir(), '.local', 'bin', 'claude')
|
||||
try {
|
||||
await fs.stat(nativeBinPath)
|
||||
installations.push({ type: 'native', path: nativeBinPath })
|
||||
@@ -317,12 +300,7 @@ async function detectMultipleInstallations(): Promise<
|
||||
// Also check if config indicates native installation
|
||||
const config = getGlobalConfig()
|
||||
if (config.installMethod === 'native') {
|
||||
const nativeDataPath = join(
|
||||
homedir(),
|
||||
'.local',
|
||||
'share',
|
||||
getNativeDataDirName(),
|
||||
)
|
||||
const nativeDataPath = join(homedir(), '.local', 'share', 'claude')
|
||||
try {
|
||||
await fs.stat(nativeDataPath)
|
||||
if (!installations.some(i => i.type === 'native')) {
|
||||
@@ -457,14 +435,14 @@ async function detectConfigurationIssues(
|
||||
if (type === 'npm-local' && config.installMethod !== 'local') {
|
||||
warnings.push({
|
||||
issue: `Running from local installation but config install method is '${config.installMethod}'`,
|
||||
fix: `Consider using native installation: ${getCliBinaryName()} install`,
|
||||
fix: 'Consider using native installation: claude install',
|
||||
})
|
||||
}
|
||||
|
||||
if (type === 'native' && config.installMethod !== 'native') {
|
||||
warnings.push({
|
||||
issue: `Running native installation but config install method is '${config.installMethod}'`,
|
||||
fix: `Run ${getCliBinaryName()} install to update configuration`,
|
||||
fix: 'Run claude install to update configuration',
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -472,7 +450,7 @@ async function detectConfigurationIssues(
|
||||
if (type === 'npm-global' && (await localInstallationExists())) {
|
||||
warnings.push({
|
||||
issue: 'Local installation exists but not being used',
|
||||
fix: `Consider using native installation: ${getCliBinaryName()} install`,
|
||||
fix: 'Consider using native installation: claude install',
|
||||
})
|
||||
}
|
||||
|
||||
@@ -482,7 +460,7 @@ async function detectConfigurationIssues(
|
||||
// Check if running local installation but it's not in PATH
|
||||
if (type === 'npm-local') {
|
||||
// Check if claude is already accessible via PATH
|
||||
const whichResult = await which(getCliBinaryName())
|
||||
const whichResult = await which('claude')
|
||||
const claudeInPath = !!whichResult
|
||||
|
||||
// Only show warning if claude is NOT in PATH AND no valid alias exists
|
||||
@@ -491,13 +469,13 @@ async function detectConfigurationIssues(
|
||||
// Alias exists but points to invalid target
|
||||
warnings.push({
|
||||
issue: 'Local installation not accessible',
|
||||
fix: `Alias exists but points to invalid target: ${existingAlias}. Update alias: alias ${getCliBinaryName()}="~/.openclaude/local/${getCliBinaryName()}"`,
|
||||
fix: `Alias exists but points to invalid target: ${existingAlias}. Update alias: alias claude="~/.claude/local/claude"`,
|
||||
})
|
||||
} else {
|
||||
// No alias exists and not in PATH
|
||||
warnings.push({
|
||||
issue: 'Local installation not accessible',
|
||||
fix: `Create alias: alias ${getCliBinaryName()}="~/.openclaude/local/${getCliBinaryName()}"`,
|
||||
fix: 'Create alias: alias claude="~/.claude/local/claude"',
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -602,7 +580,7 @@ export async function getDoctorDiagnostic(): Promise<DiagnosticInfo> {
|
||||
if (!hasUpdatePermissions && !getAutoUpdaterDisabledReason()) {
|
||||
warnings.push({
|
||||
issue: 'Insufficient permissions for auto-updates',
|
||||
fix: `Do one of: (1) Re-install node without sudo, or (2) Use \`${getCliBinaryName()} install\` for native installation`,
|
||||
fix: 'Do one of: (1) Re-install node without sudo, or (2) Use `claude install` for native installation',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,39 +3,23 @@ import { existsSync } from 'fs'
|
||||
import { homedir } from 'os'
|
||||
import { join } from 'path'
|
||||
|
||||
export function resolveClaudeConfigHomeDir(options?: {
|
||||
configDirEnv?: string
|
||||
homeDir?: string
|
||||
openClaudeExists?: boolean
|
||||
legacyClaudeExists?: boolean
|
||||
}): string {
|
||||
if (options?.configDirEnv) {
|
||||
return options.configDirEnv.normalize('NFC')
|
||||
}
|
||||
|
||||
const homeDir = options?.homeDir ?? homedir()
|
||||
const openClaudeDir = join(homeDir, '.openclaude')
|
||||
const legacyClaudeDir = join(homeDir, '.claude')
|
||||
const openClaudeExists =
|
||||
options?.openClaudeExists ?? existsSync(openClaudeDir)
|
||||
const legacyClaudeExists =
|
||||
options?.legacyClaudeExists ?? existsSync(legacyClaudeDir)
|
||||
|
||||
// Preserve existing user config/install state until we ship an explicit
|
||||
// migration. New installs (neither path exists) use ~/.openclaude.
|
||||
if (!openClaudeExists && legacyClaudeExists) {
|
||||
return legacyClaudeDir.normalize('NFC')
|
||||
}
|
||||
|
||||
return openClaudeDir.normalize('NFC')
|
||||
}
|
||||
|
||||
// Memoized: 150+ callers, many on hot paths. Keyed off CLAUDE_CONFIG_DIR so
|
||||
// tests that change the env var get a fresh value without explicit cache.clear.
|
||||
export const getClaudeConfigHomeDir = memoize(
|
||||
(): string => resolveClaudeConfigHomeDir({
|
||||
configDirEnv: process.env.CLAUDE_CONFIG_DIR,
|
||||
}),
|
||||
(): string => {
|
||||
if (process.env.CLAUDE_CONFIG_DIR) {
|
||||
return process.env.CLAUDE_CONFIG_DIR.normalize('NFC')
|
||||
}
|
||||
const newDefault = join(homedir(), '.openclaude')
|
||||
// Migration compatibility: if ~/.openclaude doesn't exist yet but ~/.claude
|
||||
// does, keep using ~/.claude so existing users don't lose their data on
|
||||
// upgrade. New installs (neither dir exists) go straight to ~/.openclaude.
|
||||
const legacyPath = join(homedir(), '.claude')
|
||||
if (!existsSync(newDefault) && existsSync(legacyPath)) {
|
||||
return legacyPath.normalize('NFC')
|
||||
}
|
||||
return newDefault.normalize('NFC')
|
||||
},
|
||||
() => process.env.CLAUDE_CONFIG_DIR,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,13 +2,9 @@ import { expect, test } from 'bun:test'
|
||||
import { mkdtempSync, writeFileSync } from 'node:fs'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
|
||||
async function importFreshExecFileNoThrowModule() {
|
||||
return import(`./execFileNoThrow.ts?ts=${Date.now()}-${Math.random()}`)
|
||||
}
|
||||
import { execFileNoThrowWithCwd } from './execFileNoThrow.js'
|
||||
|
||||
test('execFileNoThrowWithCwd rejects shell-like executable names', async () => {
|
||||
const { execFileNoThrowWithCwd } = await importFreshExecFileNoThrowModule()
|
||||
const result = await execFileNoThrowWithCwd('openclaude && whoami', [])
|
||||
|
||||
expect(result.code).toBe(1)
|
||||
@@ -16,7 +12,6 @@ test('execFileNoThrowWithCwd rejects shell-like executable names', async () => {
|
||||
})
|
||||
|
||||
test('execFileNoThrowWithCwd rejects cwd values with control characters', async () => {
|
||||
const { execFileNoThrowWithCwd } = await importFreshExecFileNoThrowModule()
|
||||
const result = await execFileNoThrowWithCwd(process.execPath, ['--version'], {
|
||||
cwd: 'C:\\repo\nmalicious',
|
||||
})
|
||||
@@ -26,7 +21,6 @@ test('execFileNoThrowWithCwd rejects cwd values with control characters', async
|
||||
})
|
||||
|
||||
test('execFileNoThrowWithCwd rejects arguments with control characters', async () => {
|
||||
const { execFileNoThrowWithCwd } = await importFreshExecFileNoThrowModule()
|
||||
const result = await execFileNoThrowWithCwd(process.execPath, [
|
||||
'--version\nmalicious',
|
||||
])
|
||||
@@ -36,7 +30,6 @@ test('execFileNoThrowWithCwd rejects arguments with control characters', async (
|
||||
})
|
||||
|
||||
test('execFileNoThrowWithCwd rejects environment entries with control characters', async () => {
|
||||
const { execFileNoThrowWithCwd } = await importFreshExecFileNoThrowModule()
|
||||
const result = await execFileNoThrowWithCwd(process.execPath, ['--version'], {
|
||||
env: {
|
||||
...process.env,
|
||||
@@ -52,7 +45,6 @@ test('execFileNoThrowWithCwd preserves Windows .cmd compatibility', async () =>
|
||||
if (process.platform !== 'win32') {
|
||||
return
|
||||
}
|
||||
const { execFileNoThrowWithCwd } = await importFreshExecFileNoThrowModule()
|
||||
|
||||
const dir = mkdtempSync(join(tmpdir(), 'openclaude-execfile-'))
|
||||
const file = join(dir, 'hello.cmd')
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user