feat: Show model thoughts while loading (#992)

This commit is contained in:
Asad Memon
2025-06-15 11:19:05 -07:00
committed by GitHub
parent b3d89a1075
commit 123ad20e9b
7 changed files with 153 additions and 24 deletions

View File

@@ -21,6 +21,7 @@ import {
logUserPrompt,
GitService,
EditorType,
ThoughtSummary,
} from '@gemini-cli/core';
import { type Part, type PartListUnion } from '@google/genai';
import {
@@ -90,6 +91,7 @@ export const useGeminiStream = (
const [initError, setInitError] = useState<string | null>(null);
const abortControllerRef = useRef<AbortController | null>(null);
const [isResponding, setIsResponding] = useState<boolean>(false);
const [thought, setThought] = useState<ThoughtSummary | null>(null);
const [pendingHistoryItemRef, setPendingHistoryItem] =
useStateAndRef<HistoryItemWithoutId | null>(null);
const logger = useLogger();
@@ -393,6 +395,9 @@ export const useGeminiStream = (
const toolCallRequests: ToolCallRequestInfo[] = [];
for await (const event of stream) {
switch (event.type) {
case ServerGeminiEventType.Thought:
setThought(event.value);
break;
case ServerGeminiEventType.Content:
geminiMessageBuffer = handleContentEvent(
event.value,
@@ -730,5 +735,6 @@ export const useGeminiStream = (
submitQuery,
initError,
pendingHistoryItems,
thought,
};
};