Refactor(chat): Introduce custom Chat class for future modifications

- Copied the `Chat` class from `@google/genai` into `packages/server/src/core/geminiChat.ts`.
- This change is in preparation for future modifications to the chat handling logic.
- Updated relevant files to use the new `GeminiChat` class.

Part of https://github.com/google-gemini/gemini-cli/issues/551
This commit is contained in:
Taylor Mullen
2025-05-26 14:17:56 -07:00
committed by N. Taylor Mullen
parent 02503a3248
commit 480549e02e
7 changed files with 339 additions and 19 deletions

View File

@@ -5,10 +5,11 @@
*/
import { describe, it, expect, vi, beforeEach, Mock, afterEach } from 'vitest';
import { Chat, Content } from '@google/genai';
import { Content } from '@google/genai';
import { GeminiClient } from '../core/client.js';
import { Config } from '../config/config.js'; // Added Config import
import { checkNextSpeaker, NextSpeakerResponse } from './nextSpeakerChecker.js';
import { GeminiChat } from '../core/geminiChat.js';
// Mock GeminiClient and Config constructor
vi.mock('../core/client.js');
@@ -39,7 +40,7 @@ vi.mock('@google/genai', async () => {
});
describe('checkNextSpeaker', () => {
let mockChat: Chat;
let mockChat: GeminiChat;
let mockGeminiClient: GeminiClient;
let MockConfig: Mock;
@@ -64,7 +65,7 @@ describe('checkNextSpeaker', () => {
mockGeminiClient = new GeminiClient(mockConfigInstance);
// Simulate chat creation as done in GeminiClient
mockChat = { getHistory: mockGetHistory } as unknown as Chat;
mockChat = { getHistory: mockGetHistory } as unknown as GeminiChat;
});
afterEach(() => {

View File

@@ -4,8 +4,9 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { Chat, Content, SchemaUnion, Type } from '@google/genai';
import { Content, SchemaUnion, Type } from '@google/genai';
import { GeminiClient } from '../core/client.js';
import { GeminiChat } from '../core/geminiChat.js';
const CHECK_PROMPT = `Analyze *only* the content and structure of your immediately preceding response (your last turn in the conversation history). Based *strictly* on that response, determine who should logically speak next: the 'user' or the 'model' (you).
**Decision Rules (apply in order):**
@@ -57,7 +58,7 @@ export interface NextSpeakerResponse {
}
export async function checkNextSpeaker(
chat: Chat,
chat: GeminiChat,
geminiClient: GeminiClient,
): Promise<NextSpeakerResponse | null> {
// We need to capture the curated history because there are many moments when the model will return invalid turns