refactor: update import paths for react/compiler-runtime to react-compiler-runtime
feat: add OpenClaude local agent playbook for setup and usage instructions chore: implement provider bootstrap script for profile initialization chore: create provider launch script to manage provider execution chore: add system check script for runtime diagnostics and validation feat: implement useEffectEventCompat hook for React 18 compatibility
This commit is contained in:
99
scripts/provider-bootstrap.ts
Normal file
99
scripts/provider-bootstrap.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
// @ts-nocheck
|
||||
import { writeFileSync } from 'node:fs'
|
||||
import { resolve } from 'node:path'
|
||||
|
||||
type ProviderProfile = 'openai' | 'ollama'
|
||||
|
||||
type ProfileFile = {
|
||||
profile: ProviderProfile
|
||||
env: {
|
||||
OPENAI_BASE_URL?: string
|
||||
OPENAI_MODEL?: string
|
||||
OPENAI_API_KEY?: string
|
||||
}
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
function parseArg(name: string): string | null {
|
||||
const args = process.argv.slice(2)
|
||||
const idx = args.indexOf(name)
|
||||
if (idx === -1) return null
|
||||
return args[idx + 1] ?? null
|
||||
}
|
||||
|
||||
function parseProviderArg(): ProviderProfile | 'auto' {
|
||||
const p = parseArg('--provider')?.toLowerCase()
|
||||
if (p === 'openai' || p === 'ollama') return p
|
||||
return 'auto'
|
||||
}
|
||||
|
||||
async function hasLocalOllama(): Promise<boolean> {
|
||||
const endpoint = 'http://localhost:11434/api/tags'
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 1200)
|
||||
|
||||
try {
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'GET',
|
||||
signal: controller.signal,
|
||||
})
|
||||
return response.ok
|
||||
} catch {
|
||||
return false
|
||||
} finally {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
}
|
||||
|
||||
function sanitizeApiKey(key: string | null): string | undefined {
|
||||
if (!key || key === 'SUA_CHAVE') return undefined
|
||||
return key
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const provider = parseProviderArg()
|
||||
const argModel = parseArg('--model')
|
||||
const argBaseUrl = parseArg('--base-url')
|
||||
const argApiKey = parseArg('--api-key')
|
||||
|
||||
let selected: ProviderProfile
|
||||
if (provider === 'auto') {
|
||||
selected = (await hasLocalOllama()) ? 'ollama' : 'openai'
|
||||
} else {
|
||||
selected = provider
|
||||
}
|
||||
|
||||
const env: ProfileFile['env'] = {}
|
||||
if (selected === 'ollama') {
|
||||
env.OPENAI_BASE_URL = argBaseUrl || 'http://localhost:11434/v1'
|
||||
env.OPENAI_MODEL = argModel || process.env.OPENAI_MODEL || 'llama3.1:8b'
|
||||
const key = sanitizeApiKey(argApiKey || process.env.OPENAI_API_KEY || null)
|
||||
if (key) env.OPENAI_API_KEY = key
|
||||
} else {
|
||||
env.OPENAI_BASE_URL = argBaseUrl || process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1'
|
||||
env.OPENAI_MODEL = argModel || process.env.OPENAI_MODEL || 'gpt-4o'
|
||||
const key = sanitizeApiKey(argApiKey || process.env.OPENAI_API_KEY || null)
|
||||
if (!key) {
|
||||
console.error('OpenAI profile requires a real API key. Use --api-key or set OPENAI_API_KEY.')
|
||||
process.exit(1)
|
||||
}
|
||||
env.OPENAI_API_KEY = key
|
||||
}
|
||||
|
||||
const profile: ProfileFile = {
|
||||
profile: selected,
|
||||
env,
|
||||
createdAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const outputPath = resolve(process.cwd(), '.openclaude-profile.json')
|
||||
writeFileSync(outputPath, JSON.stringify(profile, null, 2), 'utf8')
|
||||
|
||||
console.log(`Saved profile: ${selected}`)
|
||||
console.log(`Path: ${outputPath}`)
|
||||
console.log('Next: bun run dev:profile')
|
||||
}
|
||||
|
||||
await main()
|
||||
|
||||
export {}
|
||||
138
scripts/provider-launch.ts
Normal file
138
scripts/provider-launch.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
// @ts-nocheck
|
||||
import { spawn } from 'node:child_process'
|
||||
import { existsSync, readFileSync } from 'node:fs'
|
||||
import { resolve } from 'node:path'
|
||||
|
||||
type ProviderProfile = 'openai' | 'ollama'
|
||||
|
||||
type ProfileFile = {
|
||||
profile: ProviderProfile
|
||||
env?: {
|
||||
OPENAI_BASE_URL?: string
|
||||
OPENAI_MODEL?: string
|
||||
OPENAI_API_KEY?: string
|
||||
}
|
||||
}
|
||||
|
||||
function parseProfile(argv: string[]): ProviderProfile | 'auto' | null {
|
||||
const profile = argv[0]?.toLowerCase()
|
||||
if (!profile) return 'auto'
|
||||
if (profile === 'auto') return 'auto'
|
||||
if (profile === 'openai' || profile === 'ollama') return profile
|
||||
return null
|
||||
}
|
||||
|
||||
function loadPersistedProfile(): ProfileFile | null {
|
||||
const path = resolve(process.cwd(), '.openclaude-profile.json')
|
||||
if (!existsSync(path)) return null
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(path, 'utf8')) as ProfileFile
|
||||
if (parsed.profile === 'openai' || parsed.profile === 'ollama') {
|
||||
return parsed
|
||||
}
|
||||
return null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function hasLocalOllama(): Promise<boolean> {
|
||||
const endpoint = 'http://localhost:11434/api/tags'
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 1200)
|
||||
try {
|
||||
const response = await fetch(endpoint, { signal: controller.signal })
|
||||
return response.ok
|
||||
} catch {
|
||||
return false
|
||||
} finally {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
}
|
||||
|
||||
function runCommand(command: string, env: NodeJS.ProcessEnv): Promise<number> {
|
||||
return new Promise(resolve => {
|
||||
const child = spawn(command, {
|
||||
cwd: process.cwd(),
|
||||
env,
|
||||
stdio: 'inherit',
|
||||
shell: true,
|
||||
})
|
||||
|
||||
child.on('close', code => resolve(code ?? 1))
|
||||
child.on('error', () => resolve(1))
|
||||
})
|
||||
}
|
||||
|
||||
function buildEnv(profile: ProviderProfile, persisted: ProfileFile | null): NodeJS.ProcessEnv {
|
||||
const persistedEnv = persisted?.env ?? {}
|
||||
const env: NodeJS.ProcessEnv = {
|
||||
...process.env,
|
||||
CLAUDE_CODE_USE_OPENAI: '1',
|
||||
}
|
||||
|
||||
if (profile === 'ollama') {
|
||||
env.OPENAI_BASE_URL = persistedEnv.OPENAI_BASE_URL || process.env.OPENAI_BASE_URL || 'http://localhost:11434/v1'
|
||||
env.OPENAI_MODEL = persistedEnv.OPENAI_MODEL || process.env.OPENAI_MODEL || 'llama3.1:8b'
|
||||
if (!process.env.OPENAI_API_KEY || process.env.OPENAI_API_KEY === 'SUA_CHAVE') {
|
||||
delete env.OPENAI_API_KEY
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
env.OPENAI_BASE_URL = process.env.OPENAI_BASE_URL || persistedEnv.OPENAI_BASE_URL || 'https://api.openai.com/v1'
|
||||
env.OPENAI_MODEL = process.env.OPENAI_MODEL || persistedEnv.OPENAI_MODEL || 'gpt-4o'
|
||||
env.OPENAI_API_KEY = process.env.OPENAI_API_KEY || persistedEnv.OPENAI_API_KEY
|
||||
return env
|
||||
}
|
||||
|
||||
function printSummary(profile: ProviderProfile, env: NodeJS.ProcessEnv): void {
|
||||
const keySet = Boolean(env.OPENAI_API_KEY)
|
||||
console.log(`Launching profile: ${profile}`)
|
||||
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
|
||||
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
|
||||
console.log(`OPENAI_API_KEY_SET=${keySet}`)
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const requestedProfile = parseProfile(process.argv.slice(2))
|
||||
if (!requestedProfile) {
|
||||
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|auto]')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const persisted = loadPersistedProfile()
|
||||
let profile: ProviderProfile
|
||||
|
||||
if (requestedProfile === 'auto') {
|
||||
if (persisted) {
|
||||
profile = persisted.profile
|
||||
} else {
|
||||
profile = (await hasLocalOllama()) ? 'ollama' : 'openai'
|
||||
}
|
||||
} else {
|
||||
profile = requestedProfile
|
||||
}
|
||||
|
||||
const env = buildEnv(profile, persisted)
|
||||
|
||||
if (profile === 'openai' && (!env.OPENAI_API_KEY || env.OPENAI_API_KEY === 'SUA_CHAVE')) {
|
||||
console.error('OPENAI_API_KEY is required for openai profile and cannot be SUA_CHAVE. Run: bun run profile:init -- --provider openai --api-key <key>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
printSummary(profile, env)
|
||||
|
||||
const doctorCode = await runCommand('bun run scripts/system-check.ts', env)
|
||||
if (doctorCode !== 0) {
|
||||
console.error('Runtime doctor failed. Fix configuration before launching.')
|
||||
process.exit(doctorCode)
|
||||
}
|
||||
|
||||
const devCode = await runCommand('bun run dev', env)
|
||||
process.exit(devCode)
|
||||
}
|
||||
|
||||
await main()
|
||||
|
||||
export {}
|
||||
288
scripts/system-check.ts
Normal file
288
scripts/system-check.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
// @ts-nocheck
|
||||
import { existsSync, mkdirSync, writeFileSync } from 'node:fs'
|
||||
import { dirname, join, resolve } from 'node:path'
|
||||
import { spawnSync } from 'node:child_process'
|
||||
|
||||
type CheckResult = {
|
||||
ok: boolean
|
||||
label: string
|
||||
detail?: string
|
||||
}
|
||||
|
||||
type CliOptions = {
|
||||
json: boolean
|
||||
outFile: string | null
|
||||
}
|
||||
|
||||
function pass(label: string, detail?: string): CheckResult {
|
||||
return { ok: true, label, detail }
|
||||
}
|
||||
|
||||
function fail(label: string, detail?: string): CheckResult {
|
||||
return { ok: false, label, detail }
|
||||
}
|
||||
|
||||
function isTruthy(value: string | undefined): boolean {
|
||||
if (!value) return false
|
||||
const normalized = value.trim().toLowerCase()
|
||||
return normalized !== '' && normalized !== '0' && normalized !== 'false' && normalized !== 'no'
|
||||
}
|
||||
|
||||
function parseOptions(argv: string[]): CliOptions {
|
||||
const options: CliOptions = {
|
||||
json: false,
|
||||
outFile: null,
|
||||
}
|
||||
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
const arg = argv[i]
|
||||
if (arg === '--json') {
|
||||
options.json = true
|
||||
continue
|
||||
}
|
||||
|
||||
if (arg === '--out') {
|
||||
const next = argv[i + 1]
|
||||
if (next && !next.startsWith('--')) {
|
||||
options.outFile = next
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return options
|
||||
}
|
||||
|
||||
function checkNodeVersion(): CheckResult {
|
||||
const raw = process.versions.node
|
||||
const major = Number(raw.split('.')[0] ?? '0')
|
||||
if (Number.isNaN(major)) {
|
||||
return fail('Node.js version', `Could not parse version: ${raw}`)
|
||||
}
|
||||
|
||||
if (major < 20) {
|
||||
return fail('Node.js version', `Detected ${raw}. Require >= 20.`)
|
||||
}
|
||||
|
||||
return pass('Node.js version', raw)
|
||||
}
|
||||
|
||||
function checkBunRuntime(): CheckResult {
|
||||
const bunVersion = (globalThis as { Bun?: { version?: string } }).Bun?.version
|
||||
if (!bunVersion) {
|
||||
return pass('Bun runtime', 'Not running inside Bun (this is acceptable for Node startup).')
|
||||
}
|
||||
return pass('Bun runtime', bunVersion)
|
||||
}
|
||||
|
||||
function checkBuildArtifacts(): CheckResult {
|
||||
const distCli = resolve(process.cwd(), 'dist', 'cli.mjs')
|
||||
if (!existsSync(distCli)) {
|
||||
return fail('Build artifacts', `Missing ${distCli}. Run: bun run build`)
|
||||
}
|
||||
return pass('Build artifacts', distCli)
|
||||
}
|
||||
|
||||
function isLocalBaseUrl(baseUrl: string): boolean {
|
||||
try {
|
||||
const url = new URL(baseUrl)
|
||||
return url.hostname === 'localhost' || url.hostname === '127.0.0.1' || url.hostname === '::1'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function currentBaseUrl(): string {
|
||||
return process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1'
|
||||
}
|
||||
|
||||
function checkOpenAIEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
|
||||
if (!useOpenAI) {
|
||||
results.push(pass('Provider mode', 'Anthropic login flow enabled (CLAUDE_CODE_USE_OPENAI is off).'))
|
||||
return results
|
||||
}
|
||||
|
||||
const baseUrl = process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1'
|
||||
const model = process.env.OPENAI_MODEL
|
||||
const key = process.env.OPENAI_API_KEY
|
||||
|
||||
results.push(pass('Provider mode', 'OpenAI-compatible provider enabled.'))
|
||||
|
||||
if (!model) {
|
||||
results.push(pass('OPENAI_MODEL', 'Not set. Runtime fallback model will be used.'))
|
||||
} else {
|
||||
results.push(pass('OPENAI_MODEL', model))
|
||||
}
|
||||
|
||||
results.push(pass('OPENAI_BASE_URL', baseUrl))
|
||||
|
||||
if (key === 'SUA_CHAVE') {
|
||||
results.push(fail('OPENAI_API_KEY', 'Placeholder value detected: SUA_CHAVE.'))
|
||||
} else if (!key && !isLocalBaseUrl(baseUrl)) {
|
||||
results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.'))
|
||||
} else if (!key) {
|
||||
results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Ollama/LM Studio).'))
|
||||
} else {
|
||||
results.push(pass('OPENAI_API_KEY', 'Configured.'))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
|
||||
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
|
||||
const baseUrl = currentBaseUrl()
|
||||
const key = process.env.OPENAI_API_KEY
|
||||
const endpoint = `${baseUrl.replace(/\/$/, '')}/models`
|
||||
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(() => controller.abort(), 4000)
|
||||
|
||||
try {
|
||||
const headers: Record<string, string> = {}
|
||||
if (key) {
|
||||
headers.Authorization = `Bearer ${key}`
|
||||
}
|
||||
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
})
|
||||
|
||||
if (response.status === 200 || response.status === 401 || response.status === 403) {
|
||||
return pass('Provider reachability', `Reached ${endpoint} (status ${response.status}).`)
|
||||
}
|
||||
|
||||
return fail('Provider reachability', `Unexpected status ${response.status} from ${endpoint}.`)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return fail('Provider reachability', `Failed to reach ${endpoint}: ${message}`)
|
||||
} finally {
|
||||
clearTimeout(timeout)
|
||||
}
|
||||
}
|
||||
|
||||
function checkOllamaProcessorMode(): CheckResult {
|
||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
|
||||
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
|
||||
const baseUrl = currentBaseUrl()
|
||||
if (!isLocalBaseUrl(baseUrl)) {
|
||||
return pass('Ollama processor mode', 'Skipped (provider URL is not local).')
|
||||
}
|
||||
|
||||
const result = spawnSync('ollama', ['ps'], {
|
||||
cwd: process.cwd(),
|
||||
encoding: 'utf8',
|
||||
shell: true,
|
||||
})
|
||||
|
||||
if (result.status !== 0) {
|
||||
const detail = (result.stderr || result.stdout || 'Unable to run ollama ps').trim()
|
||||
return fail('Ollama processor mode', detail)
|
||||
}
|
||||
|
||||
const output = (result.stdout || '').trim()
|
||||
if (!output) {
|
||||
return fail('Ollama processor mode', 'ollama ps returned empty output.')
|
||||
}
|
||||
|
||||
const lines = output.split(/\r?\n/).map(line => line.trim()).filter(Boolean)
|
||||
const modelLine = lines.find(line => line.includes(':') && !line.startsWith('NAME'))
|
||||
if (!modelLine) {
|
||||
return pass('Ollama processor mode', 'No loaded model found (run a prompt first).')
|
||||
}
|
||||
|
||||
if (modelLine.includes('CPU')) {
|
||||
return pass('Ollama processor mode', 'Detected CPU mode. This is valid but can be slow for larger models.')
|
||||
}
|
||||
|
||||
return pass('Ollama processor mode', `Detected non-CPU mode: ${modelLine}`)
|
||||
}
|
||||
|
||||
function serializeSafeEnvSummary(): Record<string, string | boolean> {
|
||||
return {
|
||||
CLAUDE_CODE_USE_OPENAI: isTruthy(process.env.CLAUDE_CODE_USE_OPENAI),
|
||||
OPENAI_MODEL: process.env.OPENAI_MODEL ?? '(unset)',
|
||||
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1',
|
||||
OPENAI_API_KEY_SET: Boolean(process.env.OPENAI_API_KEY),
|
||||
}
|
||||
}
|
||||
|
||||
function printResults(results: CheckResult[]): void {
|
||||
for (const result of results) {
|
||||
const icon = result.ok ? 'PASS' : 'FAIL'
|
||||
const suffix = result.detail ? ` - ${result.detail}` : ''
|
||||
console.log(`[${icon}] ${result.label}${suffix}`)
|
||||
}
|
||||
}
|
||||
|
||||
function writeJsonReport(
|
||||
options: CliOptions,
|
||||
results: CheckResult[],
|
||||
): void {
|
||||
const payload = {
|
||||
timestamp: new Date().toISOString(),
|
||||
cwd: process.cwd(),
|
||||
summary: {
|
||||
total: results.length,
|
||||
passed: results.filter(result => result.ok).length,
|
||||
failed: results.filter(result => !result.ok).length,
|
||||
},
|
||||
env: serializeSafeEnvSummary(),
|
||||
results,
|
||||
}
|
||||
|
||||
if (options.json) {
|
||||
console.log(JSON.stringify(payload, null, 2))
|
||||
}
|
||||
|
||||
if (options.outFile) {
|
||||
const outputPath = resolve(process.cwd(), options.outFile)
|
||||
mkdirSync(dirname(outputPath), { recursive: true })
|
||||
writeFileSync(outputPath, JSON.stringify(payload, null, 2), 'utf8')
|
||||
if (!options.json) {
|
||||
console.log(`Report written to ${outputPath}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const options = parseOptions(process.argv.slice(2))
|
||||
const results: CheckResult[] = []
|
||||
|
||||
results.push(checkNodeVersion())
|
||||
results.push(checkBunRuntime())
|
||||
results.push(checkBuildArtifacts())
|
||||
results.push(...checkOpenAIEnv())
|
||||
results.push(await checkBaseUrlReachability())
|
||||
results.push(checkOllamaProcessorMode())
|
||||
|
||||
if (!options.json) {
|
||||
printResults(results)
|
||||
}
|
||||
|
||||
writeJsonReport(options, results)
|
||||
|
||||
const hasFailure = results.some(result => !result.ok)
|
||||
if (hasFailure) {
|
||||
process.exitCode = 1
|
||||
return
|
||||
}
|
||||
|
||||
if (!options.json) {
|
||||
console.log('\nRuntime checks completed successfully.')
|
||||
}
|
||||
}
|
||||
|
||||
await main()
|
||||
|
||||
export {}
|
||||
Reference in New Issue
Block a user