fix(repl): queue prompt guidance for next turn (#333)
Keep normal prompt submissions during generation queued instead of interrupting the current turn. Add a visible next-turn banner in the prompt area so users can tell their follow-up guidance was accepted, and cover the new behavior with focused tests. Fixes #328 Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,35 @@
|
||||
import React from 'react'
|
||||
import { afterEach, beforeEach, describe, expect, it, mock } from 'bun:test'
|
||||
import { renderToString } from '../../utils/staticRender.js'
|
||||
|
||||
describe('PromptInputQueuedCommands', () => {
|
||||
beforeEach(() => {
|
||||
mock.module('../../hooks/useCommandQueue.js', () => ({
|
||||
useCommandQueue: () => [
|
||||
{
|
||||
value: 'Use another library',
|
||||
mode: 'prompt',
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
mock.module('src/state/AppState.js', () => ({
|
||||
useAppState: (
|
||||
selector: (state: { viewingAgentTaskId?: string; isBriefOnly: boolean }) => unknown,
|
||||
) => selector({ viewingAgentTaskId: undefined, isBriefOnly: false }),
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
it('shows a next-turn guidance banner for queued prompt messages', async () => {
|
||||
const { PromptInputQueuedCommands } = await import('./PromptInputQueuedCommands.js')
|
||||
|
||||
const output = await renderToString(<PromptInputQueuedCommands />, 100)
|
||||
|
||||
expect(output).toContain('1 message queued for next turn')
|
||||
expect(output).toContain('Use another library')
|
||||
})
|
||||
})
|
||||
@@ -1,13 +1,14 @@
|
||||
import { feature } from 'bun:bundle';
|
||||
import * as React from 'react';
|
||||
import { useMemo } from 'react';
|
||||
import { Box } from 'src/ink.js';
|
||||
import { Box, Text } from 'src/ink.js';
|
||||
import { useAppState } from 'src/state/AppState.js';
|
||||
import type { AppState } from 'src/state/AppState.js';
|
||||
import { STATUS_TAG, SUMMARY_TAG, TASK_NOTIFICATION_TAG } from '../../constants/xml.js';
|
||||
import { QueuedMessageProvider } from '../../context/QueuedMessageContext.js';
|
||||
import { useCommandQueue } from '../../hooks/useCommandQueue.js';
|
||||
import type { QueuedCommand } from '../../types/textInputTypes.js';
|
||||
import { isQueuedCommandVisible } from '../../utils/messageQueueManager.js';
|
||||
import { isQueuedCommandEditable, isQueuedCommandVisible } from '../../utils/messageQueueManager.js';
|
||||
import { createUserMessage, EMPTY_LOOKUPS, normalizeMessages } from '../../utils/messages.js';
|
||||
import { jsonParse } from '../../utils/slowOperations.js';
|
||||
import { Message } from '../Message.js';
|
||||
@@ -70,17 +71,25 @@ function processQueuedCommands(queuedCommands: QueuedCommand[]): QueuedCommand[]
|
||||
}
|
||||
function PromptInputQueuedCommandsImpl(): React.ReactNode {
|
||||
const queuedCommands = useCommandQueue();
|
||||
const viewingAgent = useAppState(s => !!s.viewingAgentTaskId);
|
||||
const viewingAgent = useAppState((s: AppState) => !!s.viewingAgentTaskId);
|
||||
// Brief layout: dim queue items + skip the paddingX (brief messages
|
||||
// already indent themselves). Gate mirrors the brief-spinner/message
|
||||
// check elsewhere — no teammate-view override needed since this
|
||||
// component early-returns when viewing a teammate.
|
||||
const useBriefLayout = feature('KAIROS') || feature('KAIROS_BRIEF') ?
|
||||
// biome-ignore lint/correctness/useHookAtTopLevel: feature() is a compile-time constant
|
||||
useAppState(s_0 => s_0.isBriefOnly) : false;
|
||||
useAppState((s_0: AppState) => s_0.isBriefOnly) : false;
|
||||
|
||||
// createUserMessage mints a fresh UUID per call; without memoization, streaming
|
||||
// re-renders defeat Message's areMessagePropsEqual (compares uuid) → flicker.
|
||||
const queuedPromptCount = useMemo(
|
||||
() =>
|
||||
queuedCommands.filter(
|
||||
cmd => isQueuedCommandEditable(cmd) && cmd.mode === 'prompt',
|
||||
).length,
|
||||
[queuedCommands],
|
||||
);
|
||||
|
||||
const messages = useMemo(() => {
|
||||
if (queuedCommands.length === 0) return null;
|
||||
// task-notification is shown via useInboxNotification; most isMeta commands
|
||||
@@ -108,6 +117,11 @@ function PromptInputQueuedCommandsImpl(): React.ReactNode {
|
||||
return null;
|
||||
}
|
||||
return <Box marginTop={1} flexDirection="column">
|
||||
{queuedPromptCount > 0 && <Box marginLeft={2} marginBottom={1}>
|
||||
<Text dimColor>
|
||||
{queuedPromptCount === 1 ? '1 message queued for next turn' : `${queuedPromptCount} messages queued for next turn`}
|
||||
</Text>
|
||||
</Box>}
|
||||
{messages.map((message, i) => <QueuedMessageProvider key={i} isFirst={i === 0} useBriefLayout={useBriefLayout}>
|
||||
<Message message={message} lookups={EMPTY_LOOKUPS} addMargin={false} tools={[]} commands={[]} verbose={false} inProgressToolUseIDs={EMPTY_SET} progressMessagesForMessage={[]} shouldAnimate={false} shouldShowDot={false} isTranscriptMode={false} isStatic={true} />
|
||||
</QueuedMessageProvider>)}
|
||||
|
||||
89
src/utils/handlePromptSubmit.test.ts
Normal file
89
src/utils/handlePromptSubmit.test.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, mock } from 'bun:test'
|
||||
import { getCommandQueue, resetCommandQueue } from './messageQueueManager.js'
|
||||
|
||||
describe('handlePromptSubmit', () => {
|
||||
beforeEach(() => {
|
||||
resetCommandQueue()
|
||||
mock.module('src/services/analytics/index.js', () => ({
|
||||
logEvent: () => {},
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
resetCommandQueue()
|
||||
mock.restore()
|
||||
})
|
||||
|
||||
it('queues prompt submissions during generation without interrupting the current turn', async () => {
|
||||
const { handlePromptSubmit } = await import('./handlePromptSubmit.js')
|
||||
|
||||
const abortCalls: unknown[] = []
|
||||
const inputChanges: string[] = []
|
||||
let cursorOffset = 123
|
||||
let bufferCleared = false
|
||||
let pastedContentsCleared = false
|
||||
let historyReset = false
|
||||
|
||||
await handlePromptSubmit({
|
||||
input: ' use another library ',
|
||||
mode: 'prompt',
|
||||
pastedContents: {},
|
||||
helpers: {
|
||||
setCursorOffset: offset => {
|
||||
cursorOffset = offset
|
||||
},
|
||||
clearBuffer: () => {
|
||||
bufferCleared = true
|
||||
},
|
||||
resetHistory: () => {
|
||||
historyReset = true
|
||||
},
|
||||
},
|
||||
onInputChange: value => {
|
||||
inputChanges.push(value)
|
||||
},
|
||||
setPastedContents: updater => {
|
||||
const nextValue =
|
||||
typeof updater === 'function'
|
||||
? updater({ 1: { id: 1, type: 'text', content: 'x' } })
|
||||
: updater
|
||||
pastedContentsCleared = Object.keys(nextValue).length === 0
|
||||
},
|
||||
abortController: {
|
||||
abort: (reason: unknown) => {
|
||||
abortCalls.push(reason)
|
||||
},
|
||||
} as never,
|
||||
hasInterruptibleToolInProgress: true,
|
||||
queryGuard: {
|
||||
isActive: true,
|
||||
} as never,
|
||||
isExternalLoading: false,
|
||||
commands: [],
|
||||
messages: [],
|
||||
mainLoopModel: 'sonnet',
|
||||
ideSelection: undefined,
|
||||
querySource: 'repl' as never,
|
||||
setToolJSX: () => {},
|
||||
getToolUseContext: () => ({}) as never,
|
||||
setUserInputOnProcessing: () => {},
|
||||
setAbortController: () => {},
|
||||
onQuery: async () => {},
|
||||
setAppState: () => ({}) as never,
|
||||
})
|
||||
|
||||
expect(abortCalls).toEqual([])
|
||||
expect(inputChanges).toEqual([''])
|
||||
expect(cursorOffset).toBe(0)
|
||||
expect(bufferCleared).toBe(true)
|
||||
expect(pastedContentsCleared).toBe(true)
|
||||
expect(historyReset).toBe(true)
|
||||
expect(getCommandQueue()).toMatchObject([
|
||||
{
|
||||
value: 'use another library',
|
||||
preExpansionValue: 'use another library',
|
||||
mode: 'prompt',
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -316,9 +316,10 @@ export async function handlePromptSubmit(
|
||||
return
|
||||
}
|
||||
|
||||
// Interrupt the current turn when all executing tools have
|
||||
// interruptBehavior 'cancel' (e.g. SleepTool).
|
||||
if (params.hasInterruptibleToolInProgress) {
|
||||
// Prompt submissions during generation should guide the next turn without
|
||||
// interrupting the current one. Keep the explicit interrupt path only for
|
||||
// non-prompt inputs that opt into that behavior.
|
||||
if (mode !== 'prompt' && params.hasInterruptibleToolInProgress) {
|
||||
logForDebugging(
|
||||
`[interrupt] Aborting current turn: streamMode=${params.streamMode}`,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user