Stub internal-only recording and model capability helpers (#377)

This follow-up Phase C-lite slice replaces purely internal helper modules
with stable external no-op surfaces and collapses internal elevated error
logging to a no-op. The change removes additional USER_TYPE-gated helper
behavior without touching product-facing runtime flows.

Constraint: Keep this PR limited to isolated helper modules that are already external no-ops in practice
Rejected: Pulling in broader speculation or logging sink changes | less isolated and easier to debate during review
Confidence: high
Scope-risk: narrow
Reversibility: clean
Directive: Continue Phase C with similarly isolated helpers before moving into mixed behavior files
Tested: bun run build
Tested: bun run smoke
Tested: bun run verify:privacy
Tested: bun run test:provider
Tested: bun run test:provider-recommendation
Not-tested: Full repo typecheck (upstream baseline remains noisy)

Co-authored-by: anandh8x <test@example.com>
This commit is contained in:
Anandan
2026-04-05 10:14:03 +05:30
committed by GitHub
parent d1a2df2f69
commit 5ff34283c4
3 changed files with 21 additions and 352 deletions

View File

@@ -1,239 +1,18 @@
import { appendFile, rename } from 'fs/promises'
import { basename, dirname, join } from 'path'
import { getOriginalCwd, getSessionId } from '../bootstrap/state.js'
import { createBufferedWriter } from './bufferedWriter.js'
import { registerCleanup } from './cleanupRegistry.js'
import { logForDebugging } from './debug.js'
import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
import { getFsImplementation } from './fsOperations.js'
import { sanitizePath } from './path.js'
import { jsonStringify } from './slowOperations.js'
// External build: terminal recording is not available.
// Keep this module as a stable no-op surface so runtime imports stay valid.
// Mutable recording state — filePath is updated when session ID changes (e.g., --resume)
const recordingState: { filePath: string | null; timestamp: number } = {
filePath: null,
timestamp: 0,
}
/**
* Get the asciicast recording file path.
* For ants with CLAUDE_CODE_TERMINAL_RECORDING=1: returns a path.
* Otherwise: returns null.
* The path is computed once and cached in recordingState.
*/
export function getRecordFilePath(): string | null {
if (recordingState.filePath !== null) {
return recordingState.filePath
}
if (process.env.USER_TYPE !== 'ant') {
return null
}
if (!isEnvTruthy(process.env.CLAUDE_CODE_TERMINAL_RECORDING)) {
return null
}
// Record alongside the transcript.
// Each launch gets its own file so --continue produces multiple recordings.
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
recordingState.timestamp = Date.now()
recordingState.filePath = join(
projectDir,
`${getSessionId()}-${recordingState.timestamp}.cast`,
)
return recordingState.filePath
}
export function _resetRecordingStateForTesting(): void {
recordingState.filePath = null
recordingState.timestamp = 0
}
export function _resetRecordingStateForTesting(): void {}
/**
* Find all .cast files for the current session.
* Returns paths sorted by filename (chronological by timestamp suffix).
*/
export function getSessionRecordingPaths(): string[] {
const sessionId = getSessionId()
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
try {
// eslint-disable-next-line custom-rules/no-sync-fs -- called during /share before upload, not in hot path
const entries = getFsImplementation().readdirSync(projectDir)
const names = (
typeof entries[0] === 'string'
? entries
: (entries as { name: string }[]).map(e => e.name)
) as string[]
const files = names
.filter(f => f.startsWith(sessionId) && f.endsWith('.cast'))
.sort()
return files.map(f => join(projectDir, f))
} catch {
return []
}
}
/**
* Rename the recording file to match the current session ID.
* Called after --resume/--continue changes the session ID via switchSession().
* The recorder was installed with the initial (random) session ID; this renames
* the file so getSessionRecordingPaths() can find it by the resumed session ID.
*/
export async function renameRecordingForSession(): Promise<void> {
const oldPath = recordingState.filePath
if (!oldPath || recordingState.timestamp === 0) {
return
}
const projectsDir = join(getClaudeConfigHomeDir(), 'projects')
const projectDir = join(projectsDir, sanitizePath(getOriginalCwd()))
const newPath = join(
projectDir,
`${getSessionId()}-${recordingState.timestamp}.cast`,
)
if (oldPath === newPath) {
return
}
// Flush pending writes before renaming
await recorder?.flush()
const oldName = basename(oldPath)
const newName = basename(newPath)
try {
await rename(oldPath, newPath)
recordingState.filePath = newPath
logForDebugging(`[asciicast] Renamed recording: ${oldName}${newName}`)
} catch {
logForDebugging(
`[asciicast] Failed to rename recording from ${oldName} to ${newName}`,
)
}
}
export async function renameRecordingForSession(): Promise<void> {}
type AsciicastRecorder = {
flush(): Promise<void>
dispose(): Promise<void>
}
export async function flushAsciicastRecorder(): Promise<void> {}
let recorder: AsciicastRecorder | null = null
function getTerminalSize(): { cols: number; rows: number } {
// Direct access to stdout dimensions — not in a React component
// eslint-disable-next-line custom-rules/prefer-use-terminal-size
const cols = process.stdout.columns || 80
// eslint-disable-next-line custom-rules/prefer-use-terminal-size
const rows = process.stdout.rows || 24
return { cols, rows }
}
/**
* Flush pending recording data to disk.
* Call before reading the .cast file (e.g., during /share).
*/
export async function flushAsciicastRecorder(): Promise<void> {
await recorder?.flush()
}
/**
* Install the asciicast recorder.
* Wraps process.stdout.write to capture all terminal output with timestamps.
* Must be called before Ink mounts.
*/
export function installAsciicastRecorder(): void {
const filePath = getRecordFilePath()
if (!filePath) {
return
}
const { cols, rows } = getTerminalSize()
const startTime = performance.now()
// Write the asciicast v2 header
const header = jsonStringify({
version: 2,
width: cols,
height: rows,
timestamp: Math.floor(Date.now() / 1000),
env: {
SHELL: process.env.SHELL || '',
TERM: process.env.TERM || '',
},
})
try {
// eslint-disable-next-line custom-rules/no-sync-fs -- one-time init before Ink mounts
getFsImplementation().mkdirSync(dirname(filePath))
} catch {
// Directory may already exist
}
// eslint-disable-next-line custom-rules/no-sync-fs -- one-time init before Ink mounts
getFsImplementation().appendFileSync(filePath, header + '\n', { mode: 0o600 })
let pendingWrite: Promise<void> = Promise.resolve()
const writer = createBufferedWriter({
writeFn(content: string) {
// Use recordingState.filePath (mutable) so writes follow renames from --resume
const currentPath = recordingState.filePath
if (!currentPath) {
return
}
pendingWrite = pendingWrite
.then(() => appendFile(currentPath, content))
.catch(() => {
// Silently ignore write errors — don't break the session
})
},
flushIntervalMs: 500,
maxBufferSize: 50,
maxBufferBytes: 10 * 1024 * 1024, // 10MB
})
// Wrap process.stdout.write to capture output
const originalWrite = process.stdout.write.bind(
process.stdout,
) as typeof process.stdout.write
process.stdout.write = function (
chunk: string | Uint8Array,
encodingOrCb?: BufferEncoding | ((err?: Error) => void),
cb?: (err?: Error) => void,
): boolean {
// Record the output event
const elapsed = (performance.now() - startTime) / 1000
const text =
typeof chunk === 'string' ? chunk : Buffer.from(chunk).toString('utf-8')
writer.write(jsonStringify([elapsed, 'o', text]) + '\n')
// Pass through to the real stdout
if (typeof encodingOrCb === 'function') {
return originalWrite(chunk, encodingOrCb)
}
return originalWrite(chunk, encodingOrCb, cb)
} as typeof process.stdout.write
// Handle terminal resize events
function onResize(): void {
const elapsed = (performance.now() - startTime) / 1000
const { cols: newCols, rows: newRows } = getTerminalSize()
writer.write(jsonStringify([elapsed, 'r', `${newCols}x${newRows}`]) + '\n')
}
process.stdout.on('resize', onResize)
recorder = {
async flush(): Promise<void> {
writer.flush()
await pendingWrite
},
async dispose(): Promise<void> {
writer.dispose()
await pendingWrite
process.stdout.removeListener('resize', onResize)
process.stdout.write = originalWrite
},
}
registerCleanup(async () => {
await recorder?.dispose()
recorder = null
})
logForDebugging(`[asciicast] Recording to ${filePath}`)
}
export function installAsciicastRecorder(): void {}

View File

@@ -255,14 +255,6 @@ const updateLatestDebugLogSymlink = memoize(async (): Promise<void> => {
/**
* Logs errors for Ants only, always visible in production.
*/
export function logAntError(context: string, error: unknown): void {
if (process.env.USER_TYPE !== 'ant') {
return
}
if (error instanceof Error && error.stack) {
logForDebugging(`[internal] ${context} stack trace:\n${error.stack}`, {
level: 'error',
})
}
export function logAntError(_context: string, _error: unknown): void {
// External build: internal-only elevated error surfacing is disabled.
}

View File

@@ -1,118 +1,16 @@
import { readFileSync } from 'fs'
import { mkdir, writeFile } from 'fs/promises'
import isEqual from 'lodash-es/isEqual.js'
import memoize from 'lodash-es/memoize.js'
import { join } from 'path'
import { z } from 'zod/v4'
import { OAUTH_BETA_HEADER } from '../../constants/oauth.js'
import { getAnthropicClient } from '../../services/api/client.js'
import { isClaudeAISubscriber } from '../auth.js'
import { logForDebugging } from '../debug.js'
import { getClaudeConfigHomeDir } from '../envUtils.js'
import { safeParseJSON } from '../json.js'
import { lazySchema } from '../lazySchema.js'
import { isEssentialTrafficOnly } from '../privacyLevel.js'
import { jsonStringify } from '../slowOperations.js'
import { getAPIProvider, isFirstPartyAnthropicBaseUrl } from './providers.js'
// External build: internal model-capabilities fetch/cache path is disabled.
// Preserve a stable public surface so callers can continue to import it.
// .strip() — don't persist internal-only fields (mycro_deployments etc.) to disk
const ModelCapabilitySchema = lazySchema(() =>
z
.object({
id: z.string(),
max_input_tokens: z.number().optional(),
max_tokens: z.number().optional(),
})
.strip(),
)
const CacheFileSchema = lazySchema(() =>
z.object({
models: z.array(ModelCapabilitySchema()),
timestamp: z.number(),
}),
)
export type ModelCapability = z.infer<ReturnType<typeof ModelCapabilitySchema>>
function getCacheDir(): string {
return join(getClaudeConfigHomeDir(), 'cache')
export type ModelCapability = {
id: string
max_input_tokens?: number
max_tokens?: number
}
function getCachePath(): string {
return join(getCacheDir(), 'model-capabilities.json')
export function getModelCapability(
_model: string,
): ModelCapability | undefined {
return undefined
}
function isModelCapabilitiesEligible(): boolean {
if (process.env.USER_TYPE !== 'ant') return false
if (getAPIProvider() !== 'firstParty') return false
if (!isFirstPartyAnthropicBaseUrl()) return false
return true
}
// Longest-id-first so substring match prefers most specific; secondary key for stable isEqual
function sortForMatching(models: ModelCapability[]): ModelCapability[] {
return [...models].sort(
(a, b) => b.id.length - a.id.length || a.id.localeCompare(b.id),
)
}
// Keyed on cache path so tests that set CLAUDE_CONFIG_DIR get a fresh read
const loadCache = memoize(
(path: string): ModelCapability[] | null => {
try {
// eslint-disable-next-line custom-rules/no-sync-fs -- memoized; called from sync getContextWindowForModel
const raw = readFileSync(path, 'utf-8')
const parsed = CacheFileSchema().safeParse(safeParseJSON(raw, false))
return parsed.success ? parsed.data.models : null
} catch {
return null
}
},
path => path,
)
export function getModelCapability(model: string): ModelCapability | undefined {
if (!isModelCapabilitiesEligible()) return undefined
const cached = loadCache(getCachePath())
if (!cached || cached.length === 0) return undefined
const m = model.toLowerCase()
const exact = cached.find(c => c.id.toLowerCase() === m)
if (exact) return exact
return cached.find(c => m.includes(c.id.toLowerCase()))
}
export async function refreshModelCapabilities(): Promise<void> {
if (!isModelCapabilitiesEligible()) return
if (isEssentialTrafficOnly()) return
try {
const anthropic = await getAnthropicClient({ maxRetries: 1 })
const betas = isClaudeAISubscriber() ? [OAUTH_BETA_HEADER] : undefined
const parsed: ModelCapability[] = []
for await (const entry of anthropic.models.list({ betas })) {
const result = ModelCapabilitySchema().safeParse(entry)
if (result.success) parsed.push(result.data)
}
if (parsed.length === 0) return
const path = getCachePath()
const models = sortForMatching(parsed)
if (isEqual(loadCache(path), models)) {
logForDebugging('[modelCapabilities] cache unchanged, skipping write')
return
}
await mkdir(getCacheDir(), { recursive: true })
await writeFile(path, jsonStringify({ models, timestamp: Date.now() }), {
encoding: 'utf-8',
mode: 0o600,
})
loadCache.cache.delete(path)
logForDebugging(`[modelCapabilities] cached ${models.length} models`)
} catch (error) {
logForDebugging(
`[modelCapabilities] fetch failed: ${error instanceof Error ? error.message : 'unknown'}`,
)
}
}
export async function refreshModelCapabilities(): Promise<void> {}