feat: add guided /provider setup

This commit is contained in:
Vasanthdev2004
2026-04-02 13:13:50 +05:30
parent 1059915c84
commit 08f0b6030e
15 changed files with 2111 additions and 233 deletions

View File

@@ -1,6 +1,4 @@
// @ts-nocheck
import { writeFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
@@ -15,6 +13,7 @@ import {
buildOllamaProfileEnv,
buildOpenAIProfileEnv,
createProfileFile,
saveProfileFile,
selectAutoProfile,
type ProfileFile,
type ProviderProfile,
@@ -147,8 +146,7 @@ async function main(): Promise<void> {
const profile = createProfileFile(selected, env)
const outputPath = resolve(process.cwd(), '.openclaude-profile.json')
writeFileSync(outputPath, JSON.stringify(profile, null, 2), { encoding: 'utf8', mode: 0o600 })
const outputPath = saveProfileFile(profile)
console.log(`Saved profile: ${selected}`)
console.log(`Goal: ${goal}`)

View File

@@ -1,129 +1,8 @@
import type { OllamaModelDescriptor } from '../src/utils/providerRecommendation.ts'
export const DEFAULT_OLLAMA_BASE_URL = 'http://localhost:11434'
function withTimeoutSignal(timeoutMs: number): {
signal: AbortSignal
clear: () => void
} {
const controller = new AbortController()
const timeout = setTimeout(() => controller.abort(), timeoutMs)
return {
signal: controller.signal,
clear: () => clearTimeout(timeout),
}
}
function trimTrailingSlash(value: string): string {
return value.replace(/\/+$/, '')
}
export function getOllamaApiBaseUrl(baseUrl?: string): string {
const parsed = new URL(
baseUrl || process.env.OLLAMA_BASE_URL || DEFAULT_OLLAMA_BASE_URL,
)
const pathname = trimTrailingSlash(parsed.pathname)
parsed.pathname = pathname.endsWith('/v1')
? pathname.slice(0, -3) || '/'
: pathname || '/'
parsed.search = ''
parsed.hash = ''
return trimTrailingSlash(parsed.toString())
}
export function getOllamaChatBaseUrl(baseUrl?: string): string {
return `${getOllamaApiBaseUrl(baseUrl)}/v1`
}
export async function hasLocalOllama(baseUrl?: string): Promise<boolean> {
const { signal, clear } = withTimeoutSignal(1200)
try {
const response = await fetch(`${getOllamaApiBaseUrl(baseUrl)}/api/tags`, {
method: 'GET',
signal,
})
return response.ok
} catch {
return false
} finally {
clear()
}
}
export async function listOllamaModels(
baseUrl?: string,
): Promise<OllamaModelDescriptor[]> {
const { signal, clear } = withTimeoutSignal(5000)
try {
const response = await fetch(`${getOllamaApiBaseUrl(baseUrl)}/api/tags`, {
method: 'GET',
signal,
})
if (!response.ok) {
return []
}
const data = await response.json() as {
models?: Array<{
name?: string
size?: number
details?: {
family?: string
families?: string[]
parameter_size?: string
quantization_level?: string
}
}>
}
return (data.models ?? [])
.filter(model => Boolean(model.name))
.map(model => ({
name: model.name!,
sizeBytes: typeof model.size === 'number' ? model.size : null,
family: model.details?.family ?? null,
families: model.details?.families ?? [],
parameterSize: model.details?.parameter_size ?? null,
quantizationLevel: model.details?.quantization_level ?? null,
}))
} catch {
return []
} finally {
clear()
}
}
export async function benchmarkOllamaModel(
modelName: string,
baseUrl?: string,
): Promise<number | null> {
const start = Date.now()
const { signal, clear } = withTimeoutSignal(20000)
try {
const response = await fetch(`${getOllamaApiBaseUrl(baseUrl)}/api/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
signal,
body: JSON.stringify({
model: modelName,
stream: false,
messages: [{ role: 'user', content: 'Reply with OK.' }],
options: {
temperature: 0,
num_predict: 8,
},
}),
})
if (!response.ok) {
return null
}
await response.json()
return Date.now() - start
} catch {
return null
} finally {
clear()
}
}
export {
benchmarkOllamaModel,
DEFAULT_OLLAMA_BASE_URL,
getOllamaApiBaseUrl,
getOllamaChatBaseUrl,
hasLocalOllama,
listOllamaModels,
} from '../src/utils/providerDiscovery.ts'

View File

@@ -1,7 +1,5 @@
// @ts-nocheck
import { spawn } from 'node:child_process'
import { existsSync, readFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
@@ -11,6 +9,7 @@ import {
} from '../src/utils/providerRecommendation.ts'
import {
buildLaunchEnv,
loadProfileFile,
selectAutoProfile,
type ProfileFile,
type ProviderProfile,
@@ -75,17 +74,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
}
function loadPersistedProfile(): ProfileFile | null {
const path = resolve(process.cwd(), '.openclaude-profile.json')
if (!existsSync(path)) return null
try {
const parsed = JSON.parse(readFileSync(path, 'utf8')) as ProfileFile
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex' || parsed.profile === 'gemini') {
return parsed
}
return null
} catch {
return null
}
return loadProfileFile()
}
async function resolveOllamaDefaultModel(

View File

@@ -1,6 +1,4 @@
// @ts-nocheck
import { writeFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
applyBenchmarkLatency,
@@ -16,6 +14,7 @@ import {
buildOllamaProfileEnv,
buildOpenAIProfileEnv,
createProfileFile,
saveProfileFile,
sanitizeApiKey,
type ProfileFile,
type ProviderProfile,
@@ -153,11 +152,7 @@ async function maybeApplyProfile(
const profileFile = createProfileFile(profile, env)
writeFileSync(
resolve(process.cwd(), '.openclaude-profile.json'),
JSON.stringify(profileFile, null, 2),
'utf8',
)
saveProfileFile(profileFile)
return true
}