Stub internal-only recording and model capability helpers (#377)
This follow-up Phase C-lite slice replaces purely internal helper modules with stable external no-op surfaces and collapses internal elevated error logging to a no-op. The change removes additional USER_TYPE-gated helper behavior without touching product-facing runtime flows. Constraint: Keep this PR limited to isolated helper modules that are already external no-ops in practice Rejected: Pulling in broader speculation or logging sink changes | less isolated and easier to debate during review Confidence: high Scope-risk: narrow Reversibility: clean Directive: Continue Phase C with similarly isolated helpers before moving into mixed behavior files Tested: bun run build Tested: bun run smoke Tested: bun run verify:privacy Tested: bun run test:provider Tested: bun run test:provider-recommendation Not-tested: Full repo typecheck (upstream baseline remains noisy) Co-authored-by: anandh8x <test@example.com>
This commit is contained in:
@@ -1,239 +1,18 @@
|
|||||||
import { appendFile, rename } from 'fs/promises'
|
// External build: terminal recording is not available.
|
||||||
import { basename, dirname, join } from 'path'
|
// Keep this module as a stable no-op surface so runtime imports stay valid.
|
||||||
import { getOriginalCwd, getSessionId } from '../bootstrap/state.js'
|
|
||||||
import { createBufferedWriter } from './bufferedWriter.js'
|
|
||||||
import { registerCleanup } from './cleanupRegistry.js'
|
|
||||||
import { logForDebugging } from './debug.js'
|
|
||||||
import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
|
|
||||||
import { getFsImplementation } from './fsOperations.js'
|
|
||||||
import { sanitizePath } from './path.js'
|
|
||||||
import { jsonStringify } from './slowOperations.js'
|
|
||||||
|
|
||||||
// Mutable recording state — filePath is updated when session ID changes (e.g., --resume)
|
|
||||||
const recordingState: { filePath: string | null; timestamp: number } = {
|
|
||||||
filePath: null,
|
|
||||||
timestamp: 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the asciicast recording file path.
|
|
||||||
* For ants with CLAUDE_CODE_TERMINAL_RECORDING=1: returns a path.
|
|
||||||
* Otherwise: returns null.
|
|
||||||
* The path is computed once and cached in recordingState.
|
|
||||||
*/
|
|
||||||
export function getRecordFilePath(): string | null {
|
export function getRecordFilePath(): string | null {
|
||||||
if (recordingState.filePath !== null) {
|
return null
|
||||||
return recordingState.filePath
|
|
||||||
}
|
|
||||||
if (process.env.USER_TYPE !== 'ant') {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
if (!isEnvTruthy(process.env.CLAUDE_CODE_TERMINAL_RECORDING)) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
// Record alongside the transcript.
|
|
||||||
// Each launch gets its own file so --continue produces multiple recordings.
|
|
||||||
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
|
|
||||||
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
|
|
||||||
recordingState.timestamp = Date.now()
|
|
||||||
recordingState.filePath = join(
|
|
||||||
projectDir,
|
|
||||||
`${getSessionId()}-${recordingState.timestamp}.cast`,
|
|
||||||
)
|
|
||||||
return recordingState.filePath
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function _resetRecordingStateForTesting(): void {
|
export function _resetRecordingStateForTesting(): void {}
|
||||||
recordingState.filePath = null
|
|
||||||
recordingState.timestamp = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find all .cast files for the current session.
|
|
||||||
* Returns paths sorted by filename (chronological by timestamp suffix).
|
|
||||||
*/
|
|
||||||
export function getSessionRecordingPaths(): string[] {
|
export function getSessionRecordingPaths(): string[] {
|
||||||
const sessionId = getSessionId()
|
return []
|
||||||
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
|
|
||||||
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
|
|
||||||
try {
|
|
||||||
// eslint-disable-next-line custom-rules/no-sync-fs -- called during /share before upload, not in hot path
|
|
||||||
const entries = getFsImplementation().readdirSync(projectDir)
|
|
||||||
const names = (
|
|
||||||
typeof entries[0] === 'string'
|
|
||||||
? entries
|
|
||||||
: (entries as { name: string }[]).map(e => e.name)
|
|
||||||
) as string[]
|
|
||||||
const files = names
|
|
||||||
.filter(f => f.startsWith(sessionId) && f.endsWith('.cast'))
|
|
||||||
.sort()
|
|
||||||
return files.map(f => join(projectDir, f))
|
|
||||||
} catch {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export async function renameRecordingForSession(): Promise<void> {}
|
||||||
* Rename the recording file to match the current session ID.
|
|
||||||
* Called after --resume/--continue changes the session ID via switchSession().
|
|
||||||
* The recorder was installed with the initial (random) session ID; this renames
|
|
||||||
* the file so getSessionRecordingPaths() can find it by the resumed session ID.
|
|
||||||
*/
|
|
||||||
export async function renameRecordingForSession(): Promise<void> {
|
|
||||||
const oldPath = recordingState.filePath
|
|
||||||
if (!oldPath || recordingState.timestamp === 0) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
|
|
||||||
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
|
|
||||||
const newPath = join(
|
|
||||||
projectDir,
|
|
||||||
`${getSessionId()}-${recordingState.timestamp}.cast`,
|
|
||||||
)
|
|
||||||
if (oldPath === newPath) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// Flush pending writes before renaming
|
|
||||||
await recorder?.flush()
|
|
||||||
const oldName = basename(oldPath)
|
|
||||||
const newName = basename(newPath)
|
|
||||||
try {
|
|
||||||
await rename(oldPath, newPath)
|
|
||||||
recordingState.filePath = newPath
|
|
||||||
logForDebugging(`[asciicast] Renamed recording: ${oldName} → ${newName}`)
|
|
||||||
} catch {
|
|
||||||
logForDebugging(
|
|
||||||
`[asciicast] Failed to rename recording from ${oldName} to ${newName}`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type AsciicastRecorder = {
|
export async function flushAsciicastRecorder(): Promise<void> {}
|
||||||
flush(): Promise<void>
|
|
||||||
dispose(): Promise<void>
|
|
||||||
}
|
|
||||||
|
|
||||||
let recorder: AsciicastRecorder | null = null
|
export function installAsciicastRecorder(): void {}
|
||||||
|
|
||||||
function getTerminalSize(): { cols: number; rows: number } {
|
|
||||||
// Direct access to stdout dimensions — not in a React component
|
|
||||||
// eslint-disable-next-line custom-rules/prefer-use-terminal-size
|
|
||||||
const cols = process.stdout.columns || 80
|
|
||||||
// eslint-disable-next-line custom-rules/prefer-use-terminal-size
|
|
||||||
const rows = process.stdout.rows || 24
|
|
||||||
return { cols, rows }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Flush pending recording data to disk.
|
|
||||||
* Call before reading the .cast file (e.g., during /share).
|
|
||||||
*/
|
|
||||||
export async function flushAsciicastRecorder(): Promise<void> {
|
|
||||||
await recorder?.flush()
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Install the asciicast recorder.
|
|
||||||
* Wraps process.stdout.write to capture all terminal output with timestamps.
|
|
||||||
* Must be called before Ink mounts.
|
|
||||||
*/
|
|
||||||
export function installAsciicastRecorder(): void {
|
|
||||||
const filePath = getRecordFilePath()
|
|
||||||
if (!filePath) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const { cols, rows } = getTerminalSize()
|
|
||||||
const startTime = performance.now()
|
|
||||||
|
|
||||||
// Write the asciicast v2 header
|
|
||||||
const header = jsonStringify({
|
|
||||||
version: 2,
|
|
||||||
width: cols,
|
|
||||||
height: rows,
|
|
||||||
timestamp: Math.floor(Date.now() / 1000),
|
|
||||||
env: {
|
|
||||||
SHELL: process.env.SHELL || '',
|
|
||||||
TERM: process.env.TERM || '',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
try {
|
|
||||||
// eslint-disable-next-line custom-rules/no-sync-fs -- one-time init before Ink mounts
|
|
||||||
getFsImplementation().mkdirSync(dirname(filePath))
|
|
||||||
} catch {
|
|
||||||
// Directory may already exist
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line custom-rules/no-sync-fs -- one-time init before Ink mounts
|
|
||||||
getFsImplementation().appendFileSync(filePath, header + '\n', { mode: 0o600 })
|
|
||||||
|
|
||||||
let pendingWrite: Promise<void> = Promise.resolve()
|
|
||||||
|
|
||||||
const writer = createBufferedWriter({
|
|
||||||
writeFn(content: string) {
|
|
||||||
// Use recordingState.filePath (mutable) so writes follow renames from --resume
|
|
||||||
const currentPath = recordingState.filePath
|
|
||||||
if (!currentPath) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
pendingWrite = pendingWrite
|
|
||||||
.then(() => appendFile(currentPath, content))
|
|
||||||
.catch(() => {
|
|
||||||
// Silently ignore write errors — don't break the session
|
|
||||||
})
|
|
||||||
},
|
|
||||||
flushIntervalMs: 500,
|
|
||||||
maxBufferSize: 50,
|
|
||||||
maxBufferBytes: 10 * 1024 * 1024, // 10MB
|
|
||||||
})
|
|
||||||
|
|
||||||
// Wrap process.stdout.write to capture output
|
|
||||||
const originalWrite = process.stdout.write.bind(
|
|
||||||
process.stdout,
|
|
||||||
) as typeof process.stdout.write
|
|
||||||
process.stdout.write = function (
|
|
||||||
chunk: string | Uint8Array,
|
|
||||||
encodingOrCb?: BufferEncoding | ((err?: Error) => void),
|
|
||||||
cb?: (err?: Error) => void,
|
|
||||||
): boolean {
|
|
||||||
// Record the output event
|
|
||||||
const elapsed = (performance.now() - startTime) / 1000
|
|
||||||
const text =
|
|
||||||
typeof chunk === 'string' ? chunk : Buffer.from(chunk).toString('utf-8')
|
|
||||||
writer.write(jsonStringify([elapsed, 'o', text]) + '\n')
|
|
||||||
|
|
||||||
// Pass through to the real stdout
|
|
||||||
if (typeof encodingOrCb === 'function') {
|
|
||||||
return originalWrite(chunk, encodingOrCb)
|
|
||||||
}
|
|
||||||
return originalWrite(chunk, encodingOrCb, cb)
|
|
||||||
} as typeof process.stdout.write
|
|
||||||
|
|
||||||
// Handle terminal resize events
|
|
||||||
function onResize(): void {
|
|
||||||
const elapsed = (performance.now() - startTime) / 1000
|
|
||||||
const { cols: newCols, rows: newRows } = getTerminalSize()
|
|
||||||
writer.write(jsonStringify([elapsed, 'r', `${newCols}x${newRows}`]) + '\n')
|
|
||||||
}
|
|
||||||
process.stdout.on('resize', onResize)
|
|
||||||
|
|
||||||
recorder = {
|
|
||||||
async flush(): Promise<void> {
|
|
||||||
writer.flush()
|
|
||||||
await pendingWrite
|
|
||||||
},
|
|
||||||
async dispose(): Promise<void> {
|
|
||||||
writer.dispose()
|
|
||||||
await pendingWrite
|
|
||||||
process.stdout.removeListener('resize', onResize)
|
|
||||||
process.stdout.write = originalWrite
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
registerCleanup(async () => {
|
|
||||||
await recorder?.dispose()
|
|
||||||
recorder = null
|
|
||||||
})
|
|
||||||
|
|
||||||
logForDebugging(`[asciicast] Recording to ${filePath}`)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -255,14 +255,6 @@ const updateLatestDebugLogSymlink = memoize(async (): Promise<void> => {
|
|||||||
/**
|
/**
|
||||||
* Logs errors for Ants only, always visible in production.
|
* Logs errors for Ants only, always visible in production.
|
||||||
*/
|
*/
|
||||||
export function logAntError(context: string, error: unknown): void {
|
export function logAntError(_context: string, _error: unknown): void {
|
||||||
if (process.env.USER_TYPE !== 'ant') {
|
// External build: internal-only elevated error surfacing is disabled.
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error instanceof Error && error.stack) {
|
|
||||||
logForDebugging(`[internal] ${context} stack trace:\n${error.stack}`, {
|
|
||||||
level: 'error',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,118 +1,16 @@
|
|||||||
import { readFileSync } from 'fs'
|
// External build: internal model-capabilities fetch/cache path is disabled.
|
||||||
import { mkdir, writeFile } from 'fs/promises'
|
// Preserve a stable public surface so callers can continue to import it.
|
||||||
import isEqual from 'lodash-es/isEqual.js'
|
|
||||||
import memoize from 'lodash-es/memoize.js'
|
|
||||||
import { join } from 'path'
|
|
||||||
import { z } from 'zod/v4'
|
|
||||||
import { OAUTH_BETA_HEADER } from '../../constants/oauth.js'
|
|
||||||
import { getAnthropicClient } from '../../services/api/client.js'
|
|
||||||
import { isClaudeAISubscriber } from '../auth.js'
|
|
||||||
import { logForDebugging } from '../debug.js'
|
|
||||||
import { getClaudeConfigHomeDir } from '../envUtils.js'
|
|
||||||
import { safeParseJSON } from '../json.js'
|
|
||||||
import { lazySchema } from '../lazySchema.js'
|
|
||||||
import { isEssentialTrafficOnly } from '../privacyLevel.js'
|
|
||||||
import { jsonStringify } from '../slowOperations.js'
|
|
||||||
import { getAPIProvider, isFirstPartyAnthropicBaseUrl } from './providers.js'
|
|
||||||
|
|
||||||
// .strip() — don't persist internal-only fields (mycro_deployments etc.) to disk
|
export type ModelCapability = {
|
||||||
const ModelCapabilitySchema = lazySchema(() =>
|
id: string
|
||||||
z
|
max_input_tokens?: number
|
||||||
.object({
|
max_tokens?: number
|
||||||
id: z.string(),
|
|
||||||
max_input_tokens: z.number().optional(),
|
|
||||||
max_tokens: z.number().optional(),
|
|
||||||
})
|
|
||||||
.strip(),
|
|
||||||
)
|
|
||||||
|
|
||||||
const CacheFileSchema = lazySchema(() =>
|
|
||||||
z.object({
|
|
||||||
models: z.array(ModelCapabilitySchema()),
|
|
||||||
timestamp: z.number(),
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
|
|
||||||
export type ModelCapability = z.infer<ReturnType<typeof ModelCapabilitySchema>>
|
|
||||||
|
|
||||||
function getCacheDir(): string {
|
|
||||||
return join(getClaudeConfigHomeDir(), 'cache')
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCachePath(): string {
|
export function getModelCapability(
|
||||||
return join(getCacheDir(), 'model-capabilities.json')
|
_model: string,
|
||||||
|
): ModelCapability | undefined {
|
||||||
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
function isModelCapabilitiesEligible(): boolean {
|
export async function refreshModelCapabilities(): Promise<void> {}
|
||||||
if (process.env.USER_TYPE !== 'ant') return false
|
|
||||||
if (getAPIProvider() !== 'firstParty') return false
|
|
||||||
if (!isFirstPartyAnthropicBaseUrl()) return false
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Longest-id-first so substring match prefers most specific; secondary key for stable isEqual
|
|
||||||
function sortForMatching(models: ModelCapability[]): ModelCapability[] {
|
|
||||||
return [...models].sort(
|
|
||||||
(a, b) => b.id.length - a.id.length || a.id.localeCompare(b.id),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Keyed on cache path so tests that set CLAUDE_CONFIG_DIR get a fresh read
|
|
||||||
const loadCache = memoize(
|
|
||||||
(path: string): ModelCapability[] | null => {
|
|
||||||
try {
|
|
||||||
// eslint-disable-next-line custom-rules/no-sync-fs -- memoized; called from sync getContextWindowForModel
|
|
||||||
const raw = readFileSync(path, 'utf-8')
|
|
||||||
const parsed = CacheFileSchema().safeParse(safeParseJSON(raw, false))
|
|
||||||
return parsed.success ? parsed.data.models : null
|
|
||||||
} catch {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
path => path,
|
|
||||||
)
|
|
||||||
|
|
||||||
export function getModelCapability(model: string): ModelCapability | undefined {
|
|
||||||
if (!isModelCapabilitiesEligible()) return undefined
|
|
||||||
const cached = loadCache(getCachePath())
|
|
||||||
if (!cached || cached.length === 0) return undefined
|
|
||||||
const m = model.toLowerCase()
|
|
||||||
const exact = cached.find(c => c.id.toLowerCase() === m)
|
|
||||||
if (exact) return exact
|
|
||||||
return cached.find(c => m.includes(c.id.toLowerCase()))
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function refreshModelCapabilities(): Promise<void> {
|
|
||||||
if (!isModelCapabilitiesEligible()) return
|
|
||||||
if (isEssentialTrafficOnly()) return
|
|
||||||
|
|
||||||
try {
|
|
||||||
const anthropic = await getAnthropicClient({ maxRetries: 1 })
|
|
||||||
const betas = isClaudeAISubscriber() ? [OAUTH_BETA_HEADER] : undefined
|
|
||||||
const parsed: ModelCapability[] = []
|
|
||||||
for await (const entry of anthropic.models.list({ betas })) {
|
|
||||||
const result = ModelCapabilitySchema().safeParse(entry)
|
|
||||||
if (result.success) parsed.push(result.data)
|
|
||||||
}
|
|
||||||
if (parsed.length === 0) return
|
|
||||||
|
|
||||||
const path = getCachePath()
|
|
||||||
const models = sortForMatching(parsed)
|
|
||||||
if (isEqual(loadCache(path), models)) {
|
|
||||||
logForDebugging('[modelCapabilities] cache unchanged, skipping write')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
await mkdir(getCacheDir(), { recursive: true })
|
|
||||||
await writeFile(path, jsonStringify({ models, timestamp: Date.now() }), {
|
|
||||||
encoding: 'utf-8',
|
|
||||||
mode: 0o600,
|
|
||||||
})
|
|
||||||
loadCache.cache.delete(path)
|
|
||||||
logForDebugging(`[modelCapabilities] cached ${models.length} models`)
|
|
||||||
} catch (error) {
|
|
||||||
logForDebugging(
|
|
||||||
`[modelCapabilities] fetch failed: ${error instanceof Error ? error.message : 'unknown'}`,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user