// @ts-nocheck import { existsSync, mkdirSync, writeFileSync } from 'node:fs' import { dirname, join, resolve } from 'node:path' import { spawnSync } from 'node:child_process' import { resolveCodexApiCredentials, resolveProviderRequest, isLocalProviderUrl as isProviderLocalUrl, } from '../src/services/api/providerConfig.js' type CheckResult = { ok: boolean label: string detail?: string } type CliOptions = { json: boolean outFile: string | null } function pass(label: string, detail?: string): CheckResult { return { ok: true, label, detail } } function fail(label: string, detail?: string): CheckResult { return { ok: false, label, detail } } function isTruthy(value: string | undefined): boolean { if (!value) return false const normalized = value.trim().toLowerCase() return normalized !== '' && normalized !== '0' && normalized !== 'false' && normalized !== 'no' } function parseOptions(argv: string[]): CliOptions { const options: CliOptions = { json: false, outFile: null, } for (let i = 0; i < argv.length; i++) { const arg = argv[i] if (arg === '--json') { options.json = true continue } if (arg === '--out') { const next = argv[i + 1] if (next && !next.startsWith('--')) { options.outFile = next i++ } } } return options } function checkNodeVersion(): CheckResult { const raw = process.versions.node const major = Number(raw.split('.')[0] ?? '0') if (Number.isNaN(major)) { return fail('Node.js version', `Could not parse version: ${raw}`) } if (major < 20) { return fail('Node.js version', `Detected ${raw}. Require >= 20.`) } return pass('Node.js version', raw) } function checkBunRuntime(): CheckResult { const bunVersion = (globalThis as { Bun?: { version?: string } }).Bun?.version if (!bunVersion) { return pass('Bun runtime', 'Not running inside Bun (this is acceptable for Node startup).') } return pass('Bun runtime', bunVersion) } function checkBuildArtifacts(): CheckResult { const distCli = resolve(process.cwd(), 'dist', 'cli.mjs') if (!existsSync(distCli)) { return fail('Build artifacts', `Missing ${distCli}. Run: bun run build`) } return pass('Build artifacts', distCli) } function isLocalBaseUrl(baseUrl: string): boolean { return isProviderLocalUrl(baseUrl) } function currentBaseUrl(): string { return process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1' } function checkOpenAIEnv(): CheckResult[] { const results: CheckResult[] = [] const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) if (!useOpenAI) { results.push(pass('Provider mode', 'Anthropic login flow enabled (CLAUDE_CODE_USE_OPENAI is off).')) return results } const request = resolveProviderRequest({ model: process.env.OPENAI_MODEL, baseUrl: process.env.OPENAI_BASE_URL, }) results.push( pass( 'Provider mode', request.transport === 'codex_responses' ? 'Codex responses backend enabled.' : 'OpenAI-compatible provider enabled.', ), ) if (!process.env.OPENAI_MODEL) { results.push(pass('OPENAI_MODEL', 'Not set. Runtime fallback model will be used.')) } else { results.push(pass('OPENAI_MODEL', process.env.OPENAI_MODEL)) } results.push(pass('OPENAI_BASE_URL', request.baseUrl)) if (request.transport === 'codex_responses') { const credentials = resolveCodexApiCredentials(process.env) if (!credentials.apiKey) { const authHint = credentials.authPath ? `Missing CODEX_API_KEY and no usable auth.json at ${credentials.authPath}.` : 'Missing CODEX_API_KEY and auth.json fallback.' results.push(fail('CODEX auth', authHint)) } else if (!credentials.accountId) { results.push(fail('CHATGPT_ACCOUNT_ID', 'Missing chatgpt_account_id in Codex auth.')) } else { const detail = credentials.source === 'env' ? 'Using CODEX_API_KEY.' : `Using ${credentials.authPath}.` results.push(pass('CODEX auth', detail)) } return results } const key = process.env.OPENAI_API_KEY if (key === 'SUA_CHAVE') { results.push(fail('OPENAI_API_KEY', 'Placeholder value detected: SUA_CHAVE.')) } else if (!key && !isLocalBaseUrl(request.baseUrl)) { results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.')) } else if (!key) { results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Ollama/LM Studio).')) } else { results.push(pass('OPENAI_API_KEY', 'Configured.')) } return results } async function checkBaseUrlReachability(): Promise { if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) { return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).') } const request = resolveProviderRequest({ model: process.env.OPENAI_MODEL, baseUrl: process.env.OPENAI_BASE_URL, }) const endpoint = request.transport === 'codex_responses' ? `${request.baseUrl}/responses` : `${request.baseUrl}/models` const controller = new AbortController() const timeout = setTimeout(() => controller.abort(), 4000) try { const headers: Record = {} let method = 'GET' let body: string | undefined if (request.transport === 'codex_responses') { const credentials = resolveCodexApiCredentials(process.env) if (credentials.apiKey) { headers.Authorization = `Bearer ${credentials.apiKey}` } if (credentials.accountId) { headers['chatgpt-account-id'] = credentials.accountId } headers['Content-Type'] = 'application/json' method = 'POST' body = JSON.stringify({ model: request.resolvedModel, instructions: 'Runtime doctor probe.', input: [ { type: 'message', role: 'user', content: [{ type: 'input_text', text: 'ping' }], }, ], store: false, stream: true, }) } else if (process.env.OPENAI_API_KEY) { headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}` } const response = await fetch(endpoint, { method, headers, body, signal: controller.signal, }) if (response.status === 200 || response.status === 401 || response.status === 403) { return pass('Provider reachability', `Reached ${endpoint} (status ${response.status}).`) } return fail('Provider reachability', `Unexpected status ${response.status} from ${endpoint}.`) } catch (error) { const message = error instanceof Error ? error.message : String(error) return fail('Provider reachability', `Failed to reach ${endpoint}: ${message}`) } finally { clearTimeout(timeout) } } function checkOllamaProcessorMode(): CheckResult { if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) { return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).') } const baseUrl = currentBaseUrl() if (!isLocalBaseUrl(baseUrl)) { return pass('Ollama processor mode', 'Skipped (provider URL is not local).') } const result = spawnSync('ollama', ['ps'], { cwd: process.cwd(), encoding: 'utf8', shell: true, }) if (result.status !== 0) { const detail = (result.stderr || result.stdout || 'Unable to run ollama ps').trim() return fail('Ollama processor mode', detail) } const output = (result.stdout || '').trim() if (!output) { return fail('Ollama processor mode', 'ollama ps returned empty output.') } const lines = output.split(/\r?\n/).map(line => line.trim()).filter(Boolean) const modelLine = lines.find(line => line.includes(':') && !line.startsWith('NAME')) if (!modelLine) { return pass('Ollama processor mode', 'No loaded model found (run a prompt first).') } if (modelLine.includes('CPU')) { return pass('Ollama processor mode', 'Detected CPU mode. This is valid but can be slow for larger models.') } return pass('Ollama processor mode', `Detected non-CPU mode: ${modelLine}`) } function serializeSafeEnvSummary(): Record { const request = resolveProviderRequest({ model: process.env.OPENAI_MODEL, baseUrl: process.env.OPENAI_BASE_URL, }) return { CLAUDE_CODE_USE_OPENAI: isTruthy(process.env.CLAUDE_CODE_USE_OPENAI), OPENAI_MODEL: process.env.OPENAI_MODEL ?? '(unset)', OPENAI_BASE_URL: request.baseUrl, OPENAI_API_KEY_SET: Boolean(process.env.OPENAI_API_KEY), CODEX_API_KEY_SET: Boolean(resolveCodexApiCredentials(process.env).apiKey), } } function printResults(results: CheckResult[]): void { for (const result of results) { const icon = result.ok ? 'PASS' : 'FAIL' const suffix = result.detail ? ` - ${result.detail}` : '' console.log(`[${icon}] ${result.label}${suffix}`) } } function writeJsonReport( options: CliOptions, results: CheckResult[], ): void { const payload = { timestamp: new Date().toISOString(), cwd: process.cwd(), summary: { total: results.length, passed: results.filter(result => result.ok).length, failed: results.filter(result => !result.ok).length, }, env: serializeSafeEnvSummary(), results, } if (options.json) { console.log(JSON.stringify(payload, null, 2)) } if (options.outFile) { const outputPath = resolve(process.cwd(), options.outFile) mkdirSync(dirname(outputPath), { recursive: true }) writeFileSync(outputPath, JSON.stringify(payload, null, 2), 'utf8') if (!options.json) { console.log(`Report written to ${outputPath}`) } } } async function main(): Promise { const options = parseOptions(process.argv.slice(2)) const results: CheckResult[] = [] results.push(checkNodeVersion()) results.push(checkBunRuntime()) results.push(checkBuildArtifacts()) results.push(...checkOpenAIEnv()) results.push(await checkBaseUrlReachability()) results.push(checkOllamaProcessorMode()) if (!options.json) { printResults(results) } writeJsonReport(options, results) const hasFailure = results.some(result => !result.ok) if (hasFailure) { process.exitCode = 1 return } if (!options.json) { console.log('\nRuntime checks completed successfully.') } } await main() export {}