Adding TurnId to Tool call and API responses and error logs. (#3039)

Co-authored-by: Scott Densmore <scottdensmore@mac.com>
This commit is contained in:
uttamkanodia14
2025-07-10 00:19:30 +05:30
committed by GitHub
parent 6c12f9e0d9
commit 063481faa4
26 changed files with 289 additions and 72 deletions

View File

@@ -450,7 +450,7 @@ describe('Gemini Client (client.ts)', () => {
});
const initialChat = client.getChat();
const result = await client.tryCompressChat();
const result = await client.tryCompressChat('prompt-id-2');
const newChat = client.getChat();
expect(tokenLimit).toHaveBeenCalled();
@@ -476,7 +476,7 @@ describe('Gemini Client (client.ts)', () => {
});
const initialChat = client.getChat();
const result = await client.tryCompressChat();
const result = await client.tryCompressChat('prompt-id-3');
const newChat = client.getChat();
expect(tokenLimit).toHaveBeenCalled();
@@ -507,7 +507,7 @@ describe('Gemini Client (client.ts)', () => {
});
const initialChat = client.getChat();
const result = await client.tryCompressChat(true); // force = true
const result = await client.tryCompressChat('prompt-id-1', true); // force = true
const newChat = client.getChat();
expect(mockSendMessage).toHaveBeenCalled();
@@ -545,6 +545,7 @@ describe('Gemini Client (client.ts)', () => {
const stream = client.sendMessageStream(
[{ text: 'Hi' }],
new AbortController().signal,
'prompt-id-1',
);
// Consume the stream manually to get the final return value.
@@ -597,6 +598,7 @@ describe('Gemini Client (client.ts)', () => {
const stream = client.sendMessageStream(
[{ text: 'Start conversation' }],
signal,
'prompt-id-2',
);
// Count how many stream events we get
@@ -697,6 +699,7 @@ describe('Gemini Client (client.ts)', () => {
const stream = client.sendMessageStream(
[{ text: 'Start conversation' }],
signal,
'prompt-id-3',
Number.MAX_SAFE_INTEGER, // Bypass the MAX_TURNS protection
);
@@ -806,7 +809,7 @@ describe('Gemini Client (client.ts)', () => {
client['contentGenerator'] = mockGenerator as ContentGenerator;
client['startChat'] = vi.fn().mockResolvedValue(mockChat);
const result = await client.tryCompressChat(true);
const result = await client.tryCompressChat('prompt-id-4', true);
expect(mockCountTokens).toHaveBeenCalledTimes(2);
expect(mockCountTokens).toHaveBeenNthCalledWith(1, {

View File

@@ -261,23 +261,25 @@ export class GeminiClient {
async *sendMessageStream(
request: PartListUnion,
signal: AbortSignal,
prompt_id: string,
turns: number = this.MAX_TURNS,
originalModel?: string,
): AsyncGenerator<ServerGeminiStreamEvent, Turn> {
// Ensure turns never exceeds MAX_TURNS to prevent infinite loops
const boundedTurns = Math.min(turns, this.MAX_TURNS);
if (!boundedTurns) {
return new Turn(this.getChat());
return new Turn(this.getChat(), prompt_id);
}
// Track the original model from the first call to detect model switching
const initialModel = originalModel || this.config.getModel();
const compressed = await this.tryCompressChat();
const compressed = await this.tryCompressChat(prompt_id);
if (compressed) {
yield { type: GeminiEventType.ChatCompressed, value: compressed };
}
const turn = new Turn(this.getChat());
const turn = new Turn(this.getChat(), prompt_id);
const resultStream = turn.run(request, signal);
for await (const event of resultStream) {
yield event;
@@ -303,6 +305,7 @@ export class GeminiClient {
yield* this.sendMessageStream(
nextRequest,
signal,
prompt_id,
boundedTurns - 1,
initialModel,
);
@@ -492,6 +495,7 @@ export class GeminiClient {
}
async tryCompressChat(
prompt_id: string,
force: boolean = false,
): Promise<ChatCompressionInfo | null> {
const curatedHistory = this.getChat().getHistory(true);
@@ -538,14 +542,17 @@ export class GeminiClient {
this.getChat().setHistory(historyToCompress);
const { text: summary } = await this.getChat().sendMessage({
message: {
text: 'First, reason in your scratchpad. Then, generate the <state_snapshot>.',
const { text: summary } = await this.getChat().sendMessage(
{
message: {
text: 'First, reason in your scratchpad. Then, generate the <state_snapshot>.',
},
config: {
systemInstruction: { text: getCompressionPrompt() },
},
},
config: {
systemInstruction: { text: getCompressionPrompt() },
},
});
prompt_id,
);
this.chat = await this.startChat([
{
role: 'user',

View File

@@ -139,6 +139,7 @@ describe('CoreToolScheduler', () => {
name: 'mockTool',
args: {},
isClientInitiated: false,
prompt_id: 'prompt-id-1',
};
abortController.abort();
@@ -206,6 +207,7 @@ describe('CoreToolScheduler with payload', () => {
name: 'mockModifiableTool',
args: {},
isClientInitiated: false,
prompt_id: 'prompt-id-2',
};
await scheduler.schedule([request], abortController.signal);

View File

@@ -77,7 +77,7 @@ describe('GeminiChat', () => {
} as unknown as GenerateContentResponse;
vi.mocked(mockModelsModule.generateContent).mockResolvedValue(response);
await chat.sendMessage({ message: 'hello' });
await chat.sendMessage({ message: 'hello' }, 'prompt-id-1');
expect(mockModelsModule.generateContent).toHaveBeenCalledWith({
model: 'gemini-pro',
@@ -109,7 +109,7 @@ describe('GeminiChat', () => {
response,
);
await chat.sendMessageStream({ message: 'hello' });
await chat.sendMessageStream({ message: 'hello' }, 'prompt-id-1');
expect(mockModelsModule.generateContentStream).toHaveBeenCalledWith({
model: 'gemini-pro',

View File

@@ -151,13 +151,18 @@ export class GeminiChat {
private async _logApiRequest(
contents: Content[],
model: string,
prompt_id: string,
): Promise<void> {
const requestText = this._getRequestTextFromContents(contents);
logApiRequest(this.config, new ApiRequestEvent(model, requestText));
logApiRequest(
this.config,
new ApiRequestEvent(model, prompt_id, requestText),
);
}
private async _logApiResponse(
durationMs: number,
prompt_id: string,
usageMetadata?: GenerateContentResponseUsageMetadata,
responseText?: string,
): Promise<void> {
@@ -166,13 +171,18 @@ export class GeminiChat {
new ApiResponseEvent(
this.config.getModel(),
durationMs,
prompt_id,
usageMetadata,
responseText,
),
);
}
private _logApiError(durationMs: number, error: unknown): void {
private _logApiError(
durationMs: number,
error: unknown,
prompt_id: string,
): void {
const errorMessage = error instanceof Error ? error.message : String(error);
const errorType = error instanceof Error ? error.name : 'unknown';
@@ -182,6 +192,7 @@ export class GeminiChat {
this.config.getModel(),
errorMessage,
durationMs,
prompt_id,
errorType,
),
);
@@ -255,12 +266,13 @@ export class GeminiChat {
*/
async sendMessage(
params: SendMessageParameters,
prompt_id: string,
): Promise<GenerateContentResponse> {
await this.sendPromise;
const userContent = createUserContent(params.message);
const requestContents = this.getHistory(true).concat(userContent);
this._logApiRequest(requestContents, this.config.getModel());
this._logApiRequest(requestContents, this.config.getModel(), prompt_id);
const startTime = Date.now();
let response: GenerateContentResponse;
@@ -301,6 +313,7 @@ export class GeminiChat {
const durationMs = Date.now() - startTime;
await this._logApiResponse(
durationMs,
prompt_id,
response.usageMetadata,
getStructuredResponse(response),
);
@@ -332,7 +345,7 @@ export class GeminiChat {
return response;
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error);
this._logApiError(durationMs, error, prompt_id);
this.sendPromise = Promise.resolve();
throw error;
}
@@ -362,11 +375,12 @@ export class GeminiChat {
*/
async sendMessageStream(
params: SendMessageParameters,
prompt_id: string,
): Promise<AsyncGenerator<GenerateContentResponse>> {
await this.sendPromise;
const userContent = createUserContent(params.message);
const requestContents = this.getHistory(true).concat(userContent);
this._logApiRequest(requestContents, this.config.getModel());
this._logApiRequest(requestContents, this.config.getModel(), prompt_id);
const startTime = Date.now();
@@ -420,11 +434,12 @@ export class GeminiChat {
streamResponse,
userContent,
startTime,
prompt_id,
);
return result;
} catch (error) {
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error);
this._logApiError(durationMs, error, prompt_id);
this.sendPromise = Promise.resolve();
throw error;
}
@@ -496,6 +511,7 @@ export class GeminiChat {
streamResponse: AsyncGenerator<GenerateContentResponse>,
inputContent: Content,
startTime: number,
prompt_id: string,
) {
const outputContent: Content[] = [];
const chunks: GenerateContentResponse[] = [];
@@ -519,7 +535,7 @@ export class GeminiChat {
} catch (error) {
errorOccurred = true;
const durationMs = Date.now() - startTime;
this._logApiError(durationMs, error);
this._logApiError(durationMs, error, prompt_id);
throw error;
}
@@ -534,6 +550,7 @@ export class GeminiChat {
const fullText = getStructuredResponseFromParts(allParts);
await this._logApiResponse(
durationMs,
prompt_id,
this.getFinalUsageMetadata(chunks),
fullText,
);

View File

@@ -67,6 +67,7 @@ describe('executeToolCall', () => {
name: 'testTool',
args: { param1: 'value1' },
isClientInitiated: false,
prompt_id: 'prompt-id-1',
};
const toolResult: ToolResult = {
llmContent: 'Tool executed successfully',
@@ -105,6 +106,7 @@ describe('executeToolCall', () => {
name: 'nonExistentTool',
args: {},
isClientInitiated: false,
prompt_id: 'prompt-id-2',
};
vi.mocked(mockToolRegistry.getTool).mockReturnValue(undefined);
@@ -140,6 +142,7 @@ describe('executeToolCall', () => {
name: 'testTool',
args: { param1: 'value1' },
isClientInitiated: false,
prompt_id: 'prompt-id-3',
};
const executionError = new Error('Tool execution failed');
vi.mocked(mockToolRegistry.getTool).mockReturnValue(mockTool);
@@ -172,6 +175,7 @@ describe('executeToolCall', () => {
name: 'testTool',
args: { param1: 'value1' },
isClientInitiated: false,
prompt_id: 'prompt-id-4',
};
const cancellationError = new Error('Operation cancelled');
vi.mocked(mockToolRegistry.getTool).mockReturnValue(mockTool);
@@ -215,6 +219,7 @@ describe('executeToolCall', () => {
name: 'testTool',
args: {},
isClientInitiated: false,
prompt_id: 'prompt-id-5',
};
const imageDataPart: Part = {
inlineData: { mimeType: 'image/png', data: 'base64data' },

View File

@@ -40,6 +40,7 @@ export async function executeToolCall(
duration_ms: durationMs,
success: false,
error: error.message,
prompt_id: toolCallRequest.prompt_id,
});
// Ensure the response structure matches what the API expects for an error
return {
@@ -75,6 +76,7 @@ export async function executeToolCall(
function_args: toolCallRequest.args,
duration_ms: durationMs,
success: true,
prompt_id: toolCallRequest.prompt_id,
});
const response = convertToFunctionResponse(
@@ -100,6 +102,7 @@ export async function executeToolCall(
duration_ms: durationMs,
success: false,
error: error.message,
prompt_id: toolCallRequest.prompt_id,
});
return {
callId: toolCallRequest.callId,

View File

@@ -55,7 +55,7 @@ describe('Turn', () => {
sendMessageStream: mockSendMessageStream,
getHistory: mockGetHistory,
};
turn = new Turn(mockChatInstance as unknown as GeminiChat);
turn = new Turn(mockChatInstance as unknown as GeminiChat, 'prompt-id-1');
mockGetHistory.mockReturnValue([]);
mockSendMessageStream.mockResolvedValue((async function* () {})());
});
@@ -92,10 +92,13 @@ describe('Turn', () => {
events.push(event);
}
expect(mockSendMessageStream).toHaveBeenCalledWith({
message: reqParts,
config: { abortSignal: expect.any(AbortSignal) },
});
expect(mockSendMessageStream).toHaveBeenCalledWith(
{
message: reqParts,
config: { abortSignal: expect.any(AbortSignal) },
},
'prompt-id-1',
);
expect(events).toEqual([
{ type: GeminiEventType.Content, value: 'Hello' },

View File

@@ -64,6 +64,7 @@ export interface ToolCallRequestInfo {
name: string;
args: Record<string, unknown>;
isClientInitiated: boolean;
prompt_id: string;
}
export interface ToolCallResponseInfo {
@@ -143,7 +144,10 @@ export class Turn {
readonly pendingToolCalls: ToolCallRequestInfo[];
private debugResponses: GenerateContentResponse[];
constructor(private readonly chat: GeminiChat) {
constructor(
private readonly chat: GeminiChat,
private readonly prompt_id: string,
) {
this.pendingToolCalls = [];
this.debugResponses = [];
}
@@ -153,12 +157,15 @@ export class Turn {
signal: AbortSignal,
): AsyncGenerator<ServerGeminiStreamEvent> {
try {
const responseStream = await this.chat.sendMessageStream({
message: req,
config: {
abortSignal: signal,
const responseStream = await this.chat.sendMessageStream(
{
message: req,
config: {
abortSignal: signal,
},
},
});
this.prompt_id,
);
for await (const resp of responseStream) {
if (signal?.aborted) {
@@ -252,6 +259,7 @@ export class Turn {
name,
args,
isClientInitiated: false,
prompt_id: this.prompt_id,
};
this.pendingToolCalls.push(toolCallRequest);

View File

@@ -265,6 +265,10 @@ export class ClearcutLogger {
gemini_cli_key: EventMetadataKey.GEMINI_CLI_USER_PROMPT_LENGTH,
value: JSON.stringify(event.prompt_length),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_PROMPT_ID,
value: JSON.stringify(event.prompt_id),
},
];
this.enqueueLogEvent(this.createLogEvent(new_prompt_event_name, data));
@@ -279,6 +283,10 @@ export class ClearcutLogger {
gemini_cli_key: EventMetadataKey.GEMINI_CLI_TOOL_CALL_NAME,
value: JSON.stringify(event.function_name),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_PROMPT_ID,
value: JSON.stringify(event.prompt_id),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_TOOL_CALL_DECISION,
value: JSON.stringify(event.decision),
@@ -313,6 +321,10 @@ export class ClearcutLogger {
gemini_cli_key: EventMetadataKey.GEMINI_CLI_API_REQUEST_MODEL,
value: JSON.stringify(event.model),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_PROMPT_ID,
value: JSON.stringify(event.prompt_id),
},
];
this.enqueueLogEvent(this.createLogEvent(api_request_event_name, data));
@@ -327,6 +339,10 @@ export class ClearcutLogger {
gemini_cli_key: EventMetadataKey.GEMINI_CLI_API_RESPONSE_MODEL,
value: JSON.stringify(event.model),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_PROMPT_ID,
value: JSON.stringify(event.prompt_id),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_API_RESPONSE_STATUS_CODE,
value: JSON.stringify(event.status_code),
@@ -378,6 +394,10 @@ export class ClearcutLogger {
gemini_cli_key: EventMetadataKey.GEMINI_CLI_API_ERROR_MODEL,
value: JSON.stringify(event.model),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_PROMPT_ID,
value: JSON.stringify(event.prompt_id),
},
{
gemini_cli_key: EventMetadataKey.GEMINI_CLI_API_ERROR_TYPE,
value: JSON.stringify(event.error_type),

View File

@@ -137,6 +137,13 @@ export enum EventMetadataKey {
// Logs the end of a session.
GEMINI_CLI_END_SESSION_ID = 34,
// ==========================================================================
// Shared Keys
// ===========================================================================
// Logs the Prompt Id
GEMINI_CLI_PROMPT_ID = 35,
}
export function getEventMetadataKey(

View File

@@ -127,7 +127,7 @@ describe('loggers', () => {
} as unknown as Config;
it('should log a user prompt', () => {
const event = new UserPromptEvent(11, 'test-prompt');
const event = new UserPromptEvent(11, 'prompt-id-8', 'test-prompt');
logUserPrompt(mockConfig, event);
@@ -201,6 +201,7 @@ describe('loggers', () => {
const event = new ApiResponseEvent(
'test-model',
100,
'prompt-id-1',
usageData,
'test-response',
);
@@ -224,6 +225,7 @@ describe('loggers', () => {
tool_token_count: 2,
total_token_count: 0,
response_text: 'test-response',
prompt_id: 'prompt-id-1',
},
});
@@ -260,6 +262,7 @@ describe('loggers', () => {
const event = new ApiResponseEvent(
'test-model',
100,
'prompt-id-1',
usageData,
'test-response',
'test-error',
@@ -296,7 +299,11 @@ describe('loggers', () => {
} as Config;
it('should log an API request with request_text', () => {
const event = new ApiRequestEvent('test-model', 'This is a test request');
const event = new ApiRequestEvent(
'test-model',
'prompt-id-7',
'This is a test request',
);
logApiRequest(mockConfig, event);
@@ -308,12 +315,13 @@ describe('loggers', () => {
'event.timestamp': '2025-01-01T00:00:00.000Z',
model: 'test-model',
request_text: 'This is a test request',
prompt_id: 'prompt-id-7',
},
});
});
it('should log an API request without request_text', () => {
const event = new ApiRequestEvent('test-model');
const event = new ApiRequestEvent('test-model', 'prompt-id-6');
logApiRequest(mockConfig, event);
@@ -324,6 +332,7 @@ describe('loggers', () => {
'event.name': EVENT_API_REQUEST,
'event.timestamp': '2025-01-01T00:00:00.000Z',
model: 'test-model',
prompt_id: 'prompt-id-6',
},
});
});
@@ -394,6 +403,7 @@ describe('loggers', () => {
},
callId: 'test-call-id',
isClientInitiated: true,
prompt_id: 'prompt-id-1',
},
response: {
callId: 'test-call-id',
@@ -427,6 +437,7 @@ describe('loggers', () => {
duration_ms: 100,
success: true,
decision: ToolCallDecision.ACCEPT,
prompt_id: 'prompt-id-1',
},
});
@@ -455,6 +466,7 @@ describe('loggers', () => {
},
callId: 'test-call-id',
isClientInitiated: true,
prompt_id: 'prompt-id-2',
},
response: {
callId: 'test-call-id',
@@ -487,6 +499,7 @@ describe('loggers', () => {
duration_ms: 100,
success: false,
decision: ToolCallDecision.REJECT,
prompt_id: 'prompt-id-2',
},
});
@@ -516,6 +529,7 @@ describe('loggers', () => {
},
callId: 'test-call-id',
isClientInitiated: true,
prompt_id: 'prompt-id-3',
},
response: {
callId: 'test-call-id',
@@ -549,6 +563,7 @@ describe('loggers', () => {
duration_ms: 100,
success: true,
decision: ToolCallDecision.MODIFY,
prompt_id: 'prompt-id-3',
},
});
@@ -578,6 +593,7 @@ describe('loggers', () => {
},
callId: 'test-call-id',
isClientInitiated: true,
prompt_id: 'prompt-id-4',
},
response: {
callId: 'test-call-id',
@@ -609,6 +625,7 @@ describe('loggers', () => {
),
duration_ms: 100,
success: true,
prompt_id: 'prompt-id-4',
},
});
@@ -638,6 +655,7 @@ describe('loggers', () => {
},
callId: 'test-call-id',
isClientInitiated: true,
prompt_id: 'prompt-id-5',
},
response: {
callId: 'test-call-id',
@@ -675,6 +693,7 @@ describe('loggers', () => {
'error.message': 'test-error',
error_type: 'test-error-type',
'error.type': 'test-error-type',
prompt_id: 'prompt-id-5',
},
});

View File

@@ -95,12 +95,14 @@ export class UserPromptEvent {
'event.name': 'user_prompt';
'event.timestamp': string; // ISO 8601
prompt_length: number;
prompt_id: string;
prompt?: string;
constructor(prompt_length: number, prompt?: string) {
constructor(prompt_length: number, prompt_Id: string, prompt?: string) {
this['event.name'] = 'user_prompt';
this['event.timestamp'] = new Date().toISOString();
this.prompt_length = prompt_length;
this.prompt_id = prompt_Id;
this.prompt = prompt;
}
}
@@ -115,6 +117,7 @@ export class ToolCallEvent {
decision?: ToolCallDecision;
error?: string;
error_type?: string;
prompt_id: string;
constructor(call: CompletedToolCall) {
this['event.name'] = 'tool_call';
@@ -128,6 +131,7 @@ export class ToolCallEvent {
: undefined;
this.error = call.response.error?.message;
this.error_type = call.response.error?.name;
this.prompt_id = call.request.prompt_id;
}
}
@@ -135,12 +139,14 @@ export class ApiRequestEvent {
'event.name': 'api_request';
'event.timestamp': string; // ISO 8601
model: string;
prompt_id: string;
request_text?: string;
constructor(model: string, request_text?: string) {
constructor(model: string, prompt_id: string, request_text?: string) {
this['event.name'] = 'api_request';
this['event.timestamp'] = new Date().toISOString();
this.model = model;
this.prompt_id = prompt_id;
this.request_text = request_text;
}
}
@@ -153,11 +159,13 @@ export class ApiErrorEvent {
error_type?: string;
status_code?: number | string;
duration_ms: number;
prompt_id: string;
constructor(
model: string,
error: string,
duration_ms: number,
prompt_id: string,
error_type?: string,
status_code?: number | string,
) {
@@ -168,6 +176,7 @@ export class ApiErrorEvent {
this.error_type = error_type;
this.status_code = status_code;
this.duration_ms = duration_ms;
this.prompt_id = prompt_id;
}
}
@@ -185,10 +194,12 @@ export class ApiResponseEvent {
tool_token_count: number;
total_token_count: number;
response_text?: string;
prompt_id: string;
constructor(
model: string,
duration_ms: number,
prompt_id: string,
usage_data?: GenerateContentResponseUsageMetadata,
response_text?: string,
error?: string,
@@ -206,6 +217,7 @@ export class ApiResponseEvent {
this.total_token_count = usage_data?.totalTokenCount ?? 0;
this.response_text = response_text;
this.error = error;
this.prompt_id = prompt_id;
}
}

View File

@@ -36,6 +36,7 @@ const createFakeCompletedToolCall = (
name,
args: { foo: 'bar' },
isClientInitiated: false,
prompt_id: 'prompt-id-1',
};
if (success) {