Propagate user_prompt_id to GenerateConentRequest for logging (#4741)

This commit is contained in:
owenofbrien
2025-07-27 16:34:39 -05:00
committed by GitHub
parent 36e1e57252
commit b497791c59
11 changed files with 245 additions and 102 deletions

View File

@@ -24,7 +24,12 @@ describe('converter', () => {
model: 'gemini-pro',
contents: [{ role: 'user', parts: [{ text: 'Hello' }] }],
};
const codeAssistReq = toGenerateContentRequest(genaiReq, 'my-project');
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq).toEqual({
model: 'gemini-pro',
project: 'my-project',
@@ -37,8 +42,9 @@ describe('converter', () => {
labels: undefined,
safetySettings: undefined,
generationConfig: undefined,
session_id: undefined,
session_id: 'my-session',
},
user_prompt_id: 'my-prompt',
});
});
@@ -47,7 +53,12 @@ describe('converter', () => {
model: 'gemini-pro',
contents: [{ role: 'user', parts: [{ text: 'Hello' }] }],
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
undefined,
'my-session',
);
expect(codeAssistReq).toEqual({
model: 'gemini-pro',
project: undefined,
@@ -60,8 +71,9 @@ describe('converter', () => {
labels: undefined,
safetySettings: undefined,
generationConfig: undefined,
session_id: undefined,
session_id: 'my-session',
},
user_prompt_id: 'my-prompt',
});
});
@@ -72,6 +84,7 @@ describe('converter', () => {
};
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'session-123',
);
@@ -89,6 +102,7 @@ describe('converter', () => {
generationConfig: undefined,
session_id: 'session-123',
},
user_prompt_id: 'my-prompt',
});
});
@@ -97,7 +111,12 @@ describe('converter', () => {
model: 'gemini-pro',
contents: 'Hello',
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq.request.contents).toEqual([
{ role: 'user', parts: [{ text: 'Hello' }] },
]);
@@ -108,7 +127,12 @@ describe('converter', () => {
model: 'gemini-pro',
contents: [{ text: 'Hello' }, { text: 'World' }],
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq.request.contents).toEqual([
{ role: 'user', parts: [{ text: 'Hello' }] },
{ role: 'user', parts: [{ text: 'World' }] },
@@ -123,7 +147,12 @@ describe('converter', () => {
systemInstruction: 'You are a helpful assistant.',
},
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq.request.systemInstruction).toEqual({
role: 'user',
parts: [{ text: 'You are a helpful assistant.' }],
@@ -139,7 +168,12 @@ describe('converter', () => {
topK: 40,
},
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq.request.generationConfig).toEqual({
temperature: 0.8,
topK: 40,
@@ -165,7 +199,12 @@ describe('converter', () => {
responseMimeType: 'application/json',
},
};
const codeAssistReq = toGenerateContentRequest(genaiReq);
const codeAssistReq = toGenerateContentRequest(
genaiReq,
'my-prompt',
'my-project',
'my-session',
);
expect(codeAssistReq.request.generationConfig).toEqual({
temperature: 0.1,
topP: 0.2,