fix: revert trimEnd on LLM response content

This commit is contained in:
tanzhenxin
2025-08-20 20:23:02 +08:00
parent a7c8c4c2fa
commit 64ce8c1d1e

View File

@@ -563,7 +563,7 @@ export class OpenAIContentGenerator implements ContentGenerator {
// Add combined text if any
if (combinedText) {
combinedParts.push({ text: combinedText.trimEnd() });
combinedParts.push({ text: combinedText });
}
// Add function calls
@@ -1164,11 +1164,7 @@ export class OpenAIContentGenerator implements ContentGenerator {
// Handle text content
if (choice.message.content) {
if (typeof choice.message.content === 'string') {
parts.push({ text: choice.message.content.trimEnd() });
} else {
parts.push({ text: choice.message.content });
}
parts.push({ text: choice.message.content });
}
// Handle tool calls
@@ -1253,11 +1249,7 @@ export class OpenAIContentGenerator implements ContentGenerator {
// Handle text content
if (choice.delta?.content) {
if (typeof choice.delta.content === 'string') {
parts.push({ text: choice.delta.content.trimEnd() });
} else {
parts.push({ text: choice.delta.content });
}
parts.push({ text: choice.delta.content });
}
// Handle tool calls - only accumulate during streaming, emit when complete
@@ -1776,7 +1768,7 @@ export class OpenAIContentGenerator implements ContentGenerator {
}
}
messageContent = textParts.join('').trimEnd();
messageContent = textParts.join('');
}
const choice: OpenAIChoice = {