feat(vscode-ide-companion): 改进消息排序和显示逻辑

- 添加时间戳支持,确保消息按时间顺序排列
- 更新工具调用处理逻辑,自动添加和保留时间戳
- 修改消息渲染逻辑,将所有类型的消息合并排序后统一渲染
- 优化完成的工具调用显示,修复显示顺序问题
- 调整进行中的工具调用显示,统一到消息流中展示
- 移除重复的计划展示逻辑,避免最新块重复出现
- 重构消息处理和渲染代码,提高可维护性
This commit is contained in:
yiliang114
2025-11-28 09:55:06 +08:00
parent dc340daf8b
commit 9cc48f12da
44 changed files with 2445 additions and 767 deletions

View File

@@ -27,10 +27,10 @@ export const useMessageHandling = () => {
const [isStreaming, setIsStreaming] = useState(false);
const [isWaitingForResponse, setIsWaitingForResponse] = useState(false);
const [loadingMessage, setLoadingMessage] = useState('');
const [currentStreamContent, setCurrentStreamContent] = useState('');
// Use ref to store current stream content, avoiding useEffect dependency issues
const currentStreamContentRef = useRef<string>('');
// Track the index of the assistant placeholder message during streaming
const streamingMessageIndexRef = useRef<number | null>(null);
// Track the index of the current aggregated thinking message
const thinkingMessageIndexRef = useRef<number | null>(null);
/**
* Add message
@@ -49,41 +49,75 @@ export const useMessageHandling = () => {
/**
* Start streaming response
*/
const startStreaming = useCallback(() => {
const startStreaming = useCallback((timestamp?: number) => {
// Create an assistant placeholder message immediately so tool calls won't jump before it
setMessages((prev) => {
// Record index of the placeholder to update on chunks
streamingMessageIndexRef.current = prev.length;
return [
...prev,
{
role: 'assistant',
content: '',
// Use provided timestamp (from extension) to keep ordering stable
timestamp: typeof timestamp === 'number' ? timestamp : Date.now(),
},
];
});
setIsStreaming(true);
setCurrentStreamContent('');
currentStreamContentRef.current = '';
}, []);
/**
* Add stream chunk
*/
const appendStreamChunk = useCallback((chunk: string) => {
setCurrentStreamContent((prev) => {
const newContent = prev + chunk;
currentStreamContentRef.current = newContent;
return newContent;
setMessages((prev) => {
let idx = streamingMessageIndexRef.current;
const next = prev.slice();
// If there is no active placeholder (e.g., after a tool call), start a new one
if (idx === null) {
idx = next.length;
streamingMessageIndexRef.current = idx;
next.push({ role: 'assistant', content: '', timestamp: Date.now() });
}
if (idx < 0 || idx >= next.length) {
return prev;
}
const target = next[idx];
next[idx] = { ...target, content: (target.content || '') + chunk };
return next;
});
}, []);
/**
* Break current assistant stream segment (e.g., when a tool call starts/updates)
* Next incoming chunk will create a new assistant placeholder
*/
const breakAssistantSegment = useCallback(() => {
streamingMessageIndexRef.current = null;
}, []);
/**
* End streaming response
*/
const endStreaming = useCallback(() => {
// If there is streaming content, add it as complete assistant message
if (currentStreamContentRef.current) {
const assistantMessage: TextMessage = {
role: 'assistant',
content: currentStreamContentRef.current,
timestamp: Date.now(),
};
setMessages((prev) => [...prev, assistantMessage]);
}
// Finalize streaming; content already lives in the placeholder message
setIsStreaming(false);
setIsWaitingForResponse(false);
setCurrentStreamContent('');
currentStreamContentRef.current = '';
streamingMessageIndexRef.current = null;
// Remove the thinking message if it exists (collapse thoughts)
setMessages((prev) => {
const idx = thinkingMessageIndexRef.current;
thinkingMessageIndexRef.current = null;
if (idx === null || idx < 0 || idx >= prev.length) {
return prev;
}
const next = prev.slice();
next.splice(idx, 1);
return next;
});
}, []);
/**
@@ -108,7 +142,6 @@ export const useMessageHandling = () => {
isStreaming,
isWaitingForResponse,
loadingMessage,
currentStreamContent,
// Operations
addMessage,
@@ -116,6 +149,36 @@ export const useMessageHandling = () => {
startStreaming,
appendStreamChunk,
endStreaming,
// Thought handling
appendThinkingChunk: (chunk: string) => {
setMessages((prev) => {
let idx = thinkingMessageIndexRef.current;
const next = prev.slice();
if (idx === null) {
idx = next.length;
thinkingMessageIndexRef.current = idx;
next.push({ role: 'thinking', content: '', timestamp: Date.now() });
}
if (idx >= 0 && idx < next.length) {
const target = next[idx];
next[idx] = { ...target, content: (target.content || '') + chunk };
}
return next;
});
},
clearThinking: () => {
setMessages((prev) => {
const idx = thinkingMessageIndexRef.current;
thinkingMessageIndexRef.current = null;
if (idx === null || idx < 0 || idx >= prev.length) {
return prev;
}
const next = prev.slice();
next.splice(idx, 1);
return next;
});
},
breakAssistantSegment,
setWaitingForResponse,
clearWaitingForResponse,
setMessages,

View File

@@ -0,0 +1,212 @@
/**
* @license
* Copyright 2025 Qwen Team
* SPDX-License-Identifier: Apache-2.0
*/
import { renderHook, act } from '@testing-library/react';
import { useCompletionTrigger } from './useCompletionTrigger';
// Mock CompletionItem type
interface CompletionItem {
id: string;
label: string;
description?: string;
icon?: React.ReactNode;
type: 'file' | 'symbol' | 'command' | 'variable';
value?: unknown;
}
describe('useCompletionTrigger', () => {
let mockInputRef: React.RefObject<HTMLDivElement>;
let mockGetCompletionItems: (
trigger: '@' | '/',
query: string,
) => Promise<CompletionItem[]>;
beforeEach(() => {
mockInputRef = {
current: document.createElement('div'),
};
mockGetCompletionItems = jest.fn();
});
afterEach(() => {
jest.clearAllTimers();
});
it('should trigger completion when @ is typed at word boundary', async () => {
mockGetCompletionItems.mockResolvedValue([
{ id: '1', label: 'test.txt', type: 'file' },
]);
const { result } = renderHook(() =>
useCompletionTrigger(mockInputRef, mockGetCompletionItems),
);
// Simulate typing @ at the beginning
mockInputRef.current.textContent = '@';
// Mock window.getSelection to return a valid range
const mockRange = {
getBoundingClientRect: () => ({ top: 100, left: 50 }),
};
window.getSelection = jest.fn().mockReturnValue({
rangeCount: 1,
getRangeAt: () => mockRange,
} as unknown as Selection);
// Trigger input event
await act(async () => {
const event = new Event('input', { bubbles: true });
mockInputRef.current.dispatchEvent(event);
// Wait for async operations
await new Promise((resolve) => setTimeout(resolve, 0));
});
expect(result.current.isOpen).toBe(true);
expect(result.current.triggerChar).toBe('@');
expect(mockGetCompletionItems).toHaveBeenCalledWith('@', '');
});
it('should show loading state initially', async () => {
// Simulate slow file loading
mockGetCompletionItems.mockImplementation(
() =>
new Promise((resolve) =>
setTimeout(
() => resolve([{ id: '1', label: 'test.txt', type: 'file' }]),
100,
),
),
);
const { result } = renderHook(() =>
useCompletionTrigger(mockInputRef, mockGetCompletionItems),
);
// Simulate typing @ at the beginning
mockInputRef.current.textContent = '@';
const mockRange = {
getBoundingClientRect: () => ({ top: 100, left: 50 }),
};
window.getSelection = jest.fn().mockReturnValue({
rangeCount: 1,
getRangeAt: () => mockRange,
} as unknown as Selection);
// Trigger input event
await act(async () => {
const event = new Event('input', { bubbles: true });
mockInputRef.current.dispatchEvent(event);
// Wait for async operations but not for the slow promise
await new Promise((resolve) => setTimeout(resolve, 0));
});
// Should show loading state immediately
expect(result.current.isOpen).toBe(true);
expect(result.current.items).toHaveLength(1);
expect(result.current.items[0].id).toBe('loading');
});
it('should timeout if loading takes too long', async () => {
// Simulate very slow file loading
mockGetCompletionItems.mockImplementation(
() =>
new Promise(
(resolve) =>
setTimeout(
() => resolve([{ id: '1', label: 'test.txt', type: 'file' }]),
10000,
), // 10 seconds
),
);
const { result } = renderHook(() =>
useCompletionTrigger(mockInputRef, mockGetCompletionItems),
);
// Simulate typing @ at the beginning
mockInputRef.current.textContent = '@';
const mockRange = {
getBoundingClientRect: () => ({ top: 100, left: 50 }),
};
window.getSelection = jest.fn().mockReturnValue({
rangeCount: 1,
getRangeAt: () => mockRange,
} as unknown as Selection);
// Trigger input event
await act(async () => {
const event = new Event('input', { bubbles: true });
mockInputRef.current.dispatchEvent(event);
// Wait for async operations
await new Promise((resolve) => setTimeout(resolve, 0));
});
// Should show loading state initially
expect(result.current.isOpen).toBe(true);
expect(result.current.items).toHaveLength(1);
expect(result.current.items[0].id).toBe('loading');
// Wait for timeout (5 seconds)
await act(async () => {
await new Promise((resolve) => setTimeout(resolve, 5100)); // 5.1 seconds
});
// Should show timeout message
expect(result.current.items).toHaveLength(1);
expect(result.current.items[0].id).toBe('timeout');
expect(result.current.items[0].label).toBe('Timeout');
});
it('should close completion when cursor moves away from trigger', async () => {
mockGetCompletionItems.mockResolvedValue([
{ id: '1', label: 'test.txt', type: 'file' },
]);
const { result } = renderHook(() =>
useCompletionTrigger(mockInputRef, mockGetCompletionItems),
);
// Simulate typing @ at the beginning
mockInputRef.current.textContent = '@';
const mockRange = {
getBoundingClientRect: () => ({ top: 100, left: 50 }),
};
window.getSelection = jest.fn().mockReturnValue({
rangeCount: 1,
getRangeAt: () => mockRange,
} as unknown as Selection);
// Trigger input event to open completion
await act(async () => {
const event = new Event('input', { bubbles: true });
mockInputRef.current.dispatchEvent(event);
await new Promise((resolve) => setTimeout(resolve, 0));
});
expect(result.current.isOpen).toBe(true);
// Simulate moving cursor away (typing space after @)
mockInputRef.current.textContent = '@ ';
// Trigger input event to close completion
await act(async () => {
const event = new Event('input', { bubbles: true });
mockInputRef.current.dispatchEvent(event);
await new Promise((resolve) => setTimeout(resolve, 0));
});
// Should close completion when query contains space
expect(result.current.isOpen).toBe(false);
});
});

View File

@@ -63,6 +63,14 @@ export function useCompletionTrigger(
[getCompletionItems],
);
const refreshCompletion = useCallback(async () => {
if (!state.isOpen || !state.triggerChar) {
return;
}
const items = await getCompletionItems(state.triggerChar, state.query);
setState((prev) => ({ ...prev, items }));
}, [state.isOpen, state.triggerChar, state.query, getCompletionItems]);
useEffect(() => {
const inputElement = inputRef.current;
if (!inputElement) {
@@ -217,5 +225,6 @@ export function useCompletionTrigger(
items: state.items,
closeCompletion,
openCompletion,
refreshCompletion,
};
}

View File

@@ -0,0 +1,93 @@
/**
* @license
* Copyright 2025 Qwen Team
* SPDX-License-Identifier: Apache-2.0
*/
import { renderHook, act } from '@testing-library/react';
import { useToolCalls } from './useToolCalls';
import type { ToolCallUpdate } from '../types/toolCall.js';
describe('useToolCalls', () => {
it('should add timestamp when creating tool call', () => {
const { result } = renderHook(() => useToolCalls());
const toolCallUpdate: ToolCallUpdate = {
type: 'tool_call',
toolCallId: 'test-1',
kind: 'read',
title: 'Read file',
status: 'pending',
};
act(() => {
result.current.handleToolCallUpdate(toolCallUpdate);
});
const toolCalls = Array.from(result.current.toolCalls.values());
expect(toolCalls).toHaveLength(1);
expect(toolCalls[0].timestamp).toBeDefined();
expect(typeof toolCalls[0].timestamp).toBe('number');
});
it('should preserve timestamp when updating tool call', () => {
const { result } = renderHook(() => useToolCalls());
const timestamp = Date.now() - 1000; // 1 second ago
// Create tool call with specific timestamp
const toolCallUpdate: ToolCallUpdate = {
type: 'tool_call',
toolCallId: 'test-1',
kind: 'read',
title: 'Read file',
status: 'pending',
timestamp,
};
act(() => {
result.current.handleToolCallUpdate(toolCallUpdate);
});
// Update tool call without timestamp
const toolCallUpdate2: ToolCallUpdate = {
type: 'tool_call_update',
toolCallId: 'test-1',
status: 'completed',
};
act(() => {
result.current.handleToolCallUpdate(toolCallUpdate2);
});
const toolCalls = Array.from(result.current.toolCalls.values());
expect(toolCalls).toHaveLength(1);
expect(toolCalls[0].timestamp).toBe(timestamp);
});
it('should use current time as default timestamp', () => {
const { result } = renderHook(() => useToolCalls());
const before = Date.now();
const toolCallUpdate: ToolCallUpdate = {
type: 'tool_call',
toolCallId: 'test-1',
kind: 'read',
title: 'Read file',
status: 'pending',
// No timestamp provided
};
act(() => {
result.current.handleToolCallUpdate(toolCallUpdate);
});
const after = Date.now();
const toolCalls = Array.from(result.current.toolCalls.values());
expect(toolCalls).toHaveLength(1);
expect(toolCalls[0].timestamp).toBeGreaterThanOrEqual(before);
expect(toolCalls[0].timestamp).toBeLessThanOrEqual(after);
});
});

View File

@@ -25,6 +25,70 @@ export const useToolCalls = () => {
const newMap = new Map(prevToolCalls);
const existing = newMap.get(update.toolCallId);
// Helpers for todo/todos plan merging & content replacement
const isTodoWrite = (kind?: string) =>
(kind || '').toLowerCase() === 'todo_write' ||
(kind || '').toLowerCase() === 'todowrite' ||
(kind || '').toLowerCase() === 'update_todos';
const normTitle = (t: unknown) =>
typeof t === 'string' ? t.trim().toLowerCase() : '';
const isTodoTitleMergeable = (t?: unknown) => {
const nt = normTitle(t);
return nt === 'updated plan' || nt === 'update todos';
};
const extractText = (
content?: Array<{
type: 'content' | 'diff';
content?: { text?: string };
}>,
): string => {
if (!content || content.length === 0) {
return '';
}
const parts: string[] = [];
for (const item of content) {
if (item.type === 'content' && item.content?.text) {
parts.push(String(item.content.text));
}
}
return parts.join('\n');
};
const normalizeTodoLines = (text: string): string[] => {
if (!text) {
return [];
}
const lines = text
.split(/\r?\n/)
.map((l) => l.trim())
.filter(Boolean);
return lines.map((line) => {
const idx = line.indexOf('] ');
return idx >= 0 ? line.slice(idx + 2).trim() : line;
});
};
const isSameOrSupplement = (
prevText: string,
nextText: string,
): { same: boolean; supplement: boolean } => {
const prev = normalizeTodoLines(prevText);
const next = normalizeTodoLines(nextText);
if (prev.length === next.length) {
const same = prev.every((l, i) => l === next[i]);
if (same) {
return { same: true, supplement: false };
}
}
// supplement = prev set is subset of next set
const setNext = new Set(next);
const subset = prev.every((l) => setNext.has(l));
return { same: false, supplement: subset };
};
const safeTitle = (title: unknown): string => {
if (typeof title === 'string') {
return title;
@@ -44,6 +108,49 @@ export const useToolCalls = () => {
newText: item.newText,
}));
// 合并策略:对于 todo_write + mergeable 标题Updated Plan/Update Todos
// 如果与最近一条同类卡片相同或是补充,则合并更新而不是新增。
if (isTodoWrite(update.kind) && isTodoTitleMergeable(update.title)) {
const nextText = extractText(content);
// 找最近一条 todo_write + 可合并标题 的卡片
let lastId: string | null = null;
let lastText = '';
let lastTimestamp = 0;
for (const tc of newMap.values()) {
if (
isTodoWrite(tc.kind) &&
isTodoTitleMergeable(tc.title) &&
typeof tc.timestamp === 'number' &&
tc.timestamp >= lastTimestamp
) {
lastId = tc.toolCallId;
lastText = extractText(tc.content);
lastTimestamp = tc.timestamp || 0;
}
}
if (lastId) {
const cmp = isSameOrSupplement(lastText, nextText);
if (cmp.same) {
// 完全相同:忽略本次新增
return newMap;
}
if (cmp.supplement) {
// 补充:替换内容到上一条(使用更新语义)
const prev = newMap.get(lastId);
if (prev) {
newMap.set(lastId, {
...prev,
content, // 覆盖(不追加)
status: update.status || prev.status,
timestamp: update.timestamp || Date.now(),
});
return newMap;
}
}
}
}
newMap.set(update.toolCallId, {
toolCallId: update.toolCallId,
kind: update.kind || 'other',
@@ -52,6 +159,7 @@ export const useToolCalls = () => {
rawInput: update.rawInput as string | object | undefined,
content,
locations: update.locations,
timestamp: update.timestamp || Date.now(), // 添加时间戳
});
} else if (update.type === 'tool_call_update') {
const updatedContent = update.content
@@ -65,9 +173,25 @@ export const useToolCalls = () => {
: undefined;
if (existing) {
const mergedContent = updatedContent
? [...(existing.content || []), ...updatedContent]
: existing.content;
// 默认行为是追加;但对于 todo_write + 可合并标题,使用替换避免堆叠重复
let mergedContent = existing.content;
if (updatedContent) {
if (
isTodoWrite(update.kind || existing.kind) &&
(isTodoTitleMergeable(update.title) ||
isTodoTitleMergeable(existing.title))
) {
mergedContent = updatedContent; // 覆盖
} else {
mergedContent = [...(existing.content || []), ...updatedContent];
}
}
// If tool call has just completed/failed, bump timestamp to now for correct ordering
const isFinal =
update.status === 'completed' || update.status === 'failed';
const nextTimestamp = isFinal
? Date.now()
: update.timestamp || existing.timestamp || Date.now();
newMap.set(update.toolCallId, {
...existing,
@@ -76,6 +200,7 @@ export const useToolCalls = () => {
...(update.status && { status: update.status }),
content: mergedContent,
...(update.locations && { locations: update.locations }),
timestamp: nextTimestamp, // 更新时间戳(完成/失败时以完成时间为准)
});
} else {
newMap.set(update.toolCallId, {
@@ -86,6 +211,7 @@ export const useToolCalls = () => {
rawInput: update.rawInput as string | object | undefined,
content: updatedContent,
locations: update.locations,
timestamp: update.timestamp || Date.now(), // 添加时间戳
});
}
}

View File

@@ -66,9 +66,12 @@ interface UseWebViewMessagesProps {
timestamp: number;
}) => void;
clearMessages: () => void;
startStreaming: () => void;
startStreaming: (timestamp?: number) => void;
appendStreamChunk: (chunk: string) => void;
endStreaming: () => void;
breakAssistantSegment: () => void;
appendThinkingChunk: (chunk: string) => void;
clearThinking: () => void;
clearWaitingForResponse: () => void;
};
@@ -116,6 +119,38 @@ export const useWebViewMessages = ({
handlePermissionRequest,
});
// Track last "Updated Plan" snapshot toolcall to support merge/dedupe
const lastPlanSnapshotRef = useRef<{
id: string;
text: string; // joined lines
lines: string[];
} | null>(null);
const buildPlanLines = (entries: PlanEntry[]): string[] =>
entries.map((e) => {
const mark =
e.status === 'completed' ? 'x' : e.status === 'in_progress' ? '-' : ' ';
return `- [${mark}] ${e.content}`.trim();
});
const isSupplementOf = (
prevLines: string[],
nextLines: string[],
): boolean => {
// 认为“补充” = 旧内容的文本集合(忽略状态)被新内容包含
const key = (line: string) => {
const idx = line.indexOf('] ');
return idx >= 0 ? line.slice(idx + 2).trim() : line.trim();
};
const nextSet = new Set(nextLines.map(key));
for (const pl of prevLines) {
if (!nextSet.has(key(pl))) {
return false;
}
}
return true;
};
// Update refs
useEffect(() => {
handlersRef.current = {
@@ -202,12 +237,42 @@ export const useWebViewMessages = ({
}
case 'message': {
handlers.messageHandling.addMessage(message.data);
const msg = message.data as {
role?: 'user' | 'assistant' | 'thinking';
content?: string;
timestamp?: number;
};
handlers.messageHandling.addMessage(
msg as unknown as Parameters<
typeof handlers.messageHandling.addMessage
>[0],
);
// Robustness: if an assistant message arrives outside the normal stream
// pipeline (no explicit streamEnd), ensure we clear streaming/waiting states
if (msg.role === 'assistant') {
try {
handlers.messageHandling.endStreaming();
} catch (err) {
// no-op: stream might not have been started
console.warn('[PanelManager] Failed to end streaming:', err);
}
try {
handlers.messageHandling.clearWaitingForResponse();
} catch (err) {
// no-op: already cleared
console.warn(
'[PanelManager] Failed to clear waiting for response:',
err,
);
}
}
break;
}
case 'streamStart':
handlers.messageHandling.startStreaming();
handlers.messageHandling.startStreaming(
(message.data as { timestamp?: number } | undefined)?.timestamp,
);
break;
case 'streamChunk': {
@@ -216,17 +281,14 @@ export const useWebViewMessages = ({
}
case 'thoughtChunk': {
const thinkingMessage = {
role: 'thinking' as const,
content: message.data.content || message.data.chunk || '',
timestamp: Date.now(),
};
handlers.messageHandling.addMessage(thinkingMessage);
const chunk = message.data.content || message.data.chunk || '';
handlers.messageHandling.appendThinkingChunk(chunk);
break;
}
case 'streamEnd':
handlers.messageHandling.endStreaming();
handlers.messageHandling.clearThinking();
break;
case 'error':
@@ -276,13 +338,76 @@ export const useWebViewMessages = ({
content: permToolCall.content as ToolCallUpdate['content'],
locations: permToolCall.locations,
});
// Split assistant stream so subsequent chunks start a new assistant message
handlers.messageHandling.breakAssistantSegment();
}
break;
}
case 'plan':
if (message.data.entries && Array.isArray(message.data.entries)) {
handlers.setPlanEntries(message.data.entries as PlanEntry[]);
const entries = message.data.entries as PlanEntry[];
handlers.setPlanEntries(entries);
// 生成新的快照文本
const lines = buildPlanLines(entries);
const text = lines.join('\n');
const prev = lastPlanSnapshotRef.current;
// 1) 完全相同 -> 跳过
if (prev && prev.text === text) {
break;
}
try {
const ts = Date.now();
// 2) 补充或状态更新 -> 合并到上一条(使用 tool_call_update 覆盖内容)
if (prev && isSupplementOf(prev.lines, lines)) {
handlers.handleToolCallUpdate({
type: 'tool_call_update',
toolCallId: prev.id,
kind: 'todo_write',
title: 'Updated Plan',
status: 'completed',
content: [
{
type: 'content',
content: { type: 'text', text },
},
],
timestamp: ts,
});
lastPlanSnapshotRef.current = { id: prev.id, text, lines };
} else {
// 3) 其他情况 -> 新增一条历史卡片
const toolCallId = `plan-snapshot-${ts}`;
handlers.handleToolCallUpdate({
type: 'tool_call',
toolCallId,
kind: 'todo_write',
title: 'Updated Plan',
status: 'completed',
content: [
{
type: 'content',
content: { type: 'text', text },
},
],
timestamp: ts,
});
lastPlanSnapshotRef.current = { id: toolCallId, text, lines };
}
// 分割助手消息段,保持渲染块独立
handlers.messageHandling.breakAssistantSegment?.();
} catch (err) {
console.warn(
'[useWebViewMessages] failed to push/merge plan snapshot toolcall:',
err,
);
}
}
break;
@@ -293,6 +418,15 @@ export const useWebViewMessages = ({
toolCallData.type = toolCallData.sessionUpdate;
}
handlers.handleToolCallUpdate(toolCallData);
// Split assistant stream at tool boundaries similar to Claude/GPT rhythm
const status = (toolCallData.status || '').toString();
const isStart = toolCallData.type === 'tool_call';
const isFinalUpdate =
toolCallData.type === 'tool_call_update' &&
(status === 'completed' || status === 'failed');
if (isStart || isFinalUpdate) {
handlers.messageHandling.breakAssistantSegment();
}
break;
}
@@ -343,6 +477,7 @@ export const useWebViewMessages = ({
}
handlers.clearToolCalls();
handlers.setPlanEntries([]);
lastPlanSnapshotRef.current = null;
break;
case 'conversationCleared':
@@ -352,6 +487,7 @@ export const useWebViewMessages = ({
handlers.sessionManagement.setCurrentSessionTitle(
'Past Conversations',
);
lastPlanSnapshotRef.current = null;
break;
case 'sessionTitleUpdated': {