support merge ChatCompletionContentPart && add filterEmptyMessages

This commit is contained in:
Weaxs
2025-12-18 00:46:48 +08:00
parent c0839dceac
commit 84eb5c562f
2 changed files with 285 additions and 9 deletions

View File

@@ -542,4 +542,206 @@ describe('OpenAIContentConverter', () => {
expect(original).toEqual(originalCopy);
});
});
describe('mergeConsecutiveAssistantMessages', () => {
it('should merge two consecutive assistant messages with string content', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First part' }],
},
{
role: 'model',
parts: [{ text: 'Second part' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
expect(messages[0].role).toBe('assistant');
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(2);
expect(content[0]).toEqual({ type: 'text', text: 'First part' });
expect(content[1]).toEqual({ type: 'text', text: 'Second part' });
});
it('should merge multiple consecutive assistant messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'Part 1' }],
},
{
role: 'model',
parts: [{ text: 'Part 2' }],
},
{
role: 'model',
parts: [{ text: 'Part 3' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
expect(messages[0].role).toBe('assistant');
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(3);
});
it('should merge tool_calls from consecutive assistant messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [
{
functionCall: {
id: 'call_1',
name: 'tool_1',
args: {},
},
},
],
},
{
role: 'user',
parts: [
{
functionResponse: {
id: 'call_1',
name: 'tool_1',
response: { output: 'result_1' },
},
},
],
},
{
role: 'model',
parts: [
{
functionCall: {
id: 'call_2',
name: 'tool_2',
args: {},
},
},
],
},
{
role: 'user',
parts: [
{
functionResponse: {
id: 'call_2',
name: 'tool_2',
response: { output: 'result_2' },
},
},
],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
// Should have: assistant (tool_call_1), tool (result_1), assistant (tool_call_2), tool (result_2)
expect(messages).toHaveLength(4);
expect(messages[0].role).toBe('assistant');
expect(messages[1].role).toBe('tool');
expect(messages[2].role).toBe('assistant');
expect(messages[3].role).toBe('tool');
});
it('should not merge assistant messages separated by user messages', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First assistant' }],
},
{
role: 'user',
parts: [{ text: 'User message' }],
},
{
role: 'model',
parts: [{ text: 'Second assistant' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(3);
expect(messages[0].role).toBe('assistant');
expect(messages[1].role).toBe('user');
expect(messages[2].role).toBe('assistant');
});
it('should handle merging when one message has array content and another has string', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'Text part' }],
},
{
role: 'model',
parts: [{ text: 'Another text' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
expect(messages).toHaveLength(1);
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(Array.isArray(content)).toBe(true);
expect(content).toHaveLength(2);
});
it('should merge empty content correctly', () => {
const request: GenerateContentParameters = {
model: 'models/test',
contents: [
{
role: 'model',
parts: [{ text: 'First' }],
},
{
role: 'model',
parts: [],
},
{
role: 'model',
parts: [{ text: 'Second' }],
},
],
};
const messages = converter.convertGeminiRequestToOpenAI(request);
// Empty messages should be filtered out
expect(messages).toHaveLength(1);
const content = messages[0]
.content as OpenAI.Chat.ChatCompletionContentPart[];
expect(content).toHaveLength(2);
expect(content[0]).toEqual({ type: 'text', text: 'First' });
expect(content[1]).toEqual({ type: 'text', text: 'Second' });
});
});
});

View File

@@ -250,7 +250,7 @@ export class OpenAIContentConverter {
const mergedMessages =
this.mergeConsecutiveAssistantMessages(cleanedMessages);
return mergedMessages;
return this.filterEmptyMessages(mergedMessages);
}
/**
@@ -1124,12 +1124,44 @@ export class OpenAIContentConverter {
// If the last message is also an assistant message, merge them
if (lastMessage.role === 'assistant') {
// Combine content
const combinedContent = [
typeof lastMessage.content === 'string' ? lastMessage.content : '',
typeof message.content === 'string' ? message.content : '',
]
.filter(Boolean)
.join('');
const lastContent = lastMessage.content;
const currentContent = message.content;
// Determine if we should use array format (if either content is an array)
const useArrayFormat =
Array.isArray(lastContent) || Array.isArray(currentContent);
let combinedContent:
| string
| OpenAI.Chat.ChatCompletionContentPart[]
| null;
if (useArrayFormat) {
// Convert both to array format and merge
const lastParts = Array.isArray(lastContent)
? lastContent
: typeof lastContent === 'string' && lastContent
? [{ type: 'text' as const, text: lastContent }]
: [];
const currentParts = Array.isArray(currentContent)
? currentContent
: typeof currentContent === 'string' && currentContent
? [{ type: 'text' as const, text: currentContent }]
: [];
combinedContent = [
...lastParts,
...currentParts,
] as OpenAI.Chat.ChatCompletionContentPart[];
} else {
// Both are strings or null, merge as strings
const lastText = typeof lastContent === 'string' ? lastContent : '';
const currentText =
typeof currentContent === 'string' ? currentContent : '';
const mergedText = [lastText, currentText].filter(Boolean).join('');
combinedContent = mergedText || null;
}
// Combine tool calls
const lastToolCalls =
@@ -1141,14 +1173,17 @@ export class OpenAIContentConverter {
// Update the last message with combined data
(
lastMessage as OpenAI.Chat.ChatCompletionMessageParam & {
content: string | null;
content: string | OpenAI.Chat.ChatCompletionContentPart[] | null;
tool_calls?: OpenAI.Chat.ChatCompletionMessageToolCall[];
}
).content = combinedContent || null;
if (combinedToolCalls.length > 0) {
(
lastMessage as OpenAI.Chat.ChatCompletionMessageParam & {
content: string | null;
content:
| string
| OpenAI.Chat.ChatCompletionContentPart[]
| null;
tool_calls?: OpenAI.Chat.ChatCompletionMessageToolCall[];
}
).tool_calls = combinedToolCalls;
@@ -1164,4 +1199,43 @@ export class OpenAIContentConverter {
return merged;
}
/**
* Filter out messages that have neither content nor tool_calls
* to prevent API errors
*/
private filterEmptyMessages(
messages: OpenAI.Chat.ChatCompletionMessageParam[],
): OpenAI.Chat.ChatCompletionMessageParam[] {
return messages.filter((message) => {
// Keep system, user, and tool messages if they have content
if (
message.role === 'system' ||
message.role === 'user' ||
message.role === 'tool'
) {
return (
message.content !== null &&
message.content !== undefined &&
message.content !== ''
);
}
// For assistant messages, keep if they have content or tool_calls
if (message.role === 'assistant') {
const hasContent =
message.content !== null &&
message.content !== undefined &&
message.content !== '';
const hasToolCalls =
'tool_calls' in message &&
message.tool_calls &&
message.tool_calls.length > 0;
return hasContent || hasToolCalls;
}
// Keep other message types by default
return true;
});
}
}