From 64ce8c1d1e3996c95af35a5dcf6bec6b57ca5da8 Mon Sep 17 00:00:00 2001 From: tanzhenxin Date: Wed, 20 Aug 2025 20:23:02 +0800 Subject: [PATCH] fix: revert trimEnd on LLM response content --- packages/core/src/core/openaiContentGenerator.ts | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/packages/core/src/core/openaiContentGenerator.ts b/packages/core/src/core/openaiContentGenerator.ts index eeba5db7..e24bd0c3 100644 --- a/packages/core/src/core/openaiContentGenerator.ts +++ b/packages/core/src/core/openaiContentGenerator.ts @@ -563,7 +563,7 @@ export class OpenAIContentGenerator implements ContentGenerator { // Add combined text if any if (combinedText) { - combinedParts.push({ text: combinedText.trimEnd() }); + combinedParts.push({ text: combinedText }); } // Add function calls @@ -1164,11 +1164,7 @@ export class OpenAIContentGenerator implements ContentGenerator { // Handle text content if (choice.message.content) { - if (typeof choice.message.content === 'string') { - parts.push({ text: choice.message.content.trimEnd() }); - } else { - parts.push({ text: choice.message.content }); - } + parts.push({ text: choice.message.content }); } // Handle tool calls @@ -1253,11 +1249,7 @@ export class OpenAIContentGenerator implements ContentGenerator { // Handle text content if (choice.delta?.content) { - if (typeof choice.delta.content === 'string') { - parts.push({ text: choice.delta.content.trimEnd() }); - } else { - parts.push({ text: choice.delta.content }); - } + parts.push({ text: choice.delta.content }); } // Handle tool calls - only accumulate during streaming, emit when complete @@ -1776,7 +1768,7 @@ export class OpenAIContentGenerator implements ContentGenerator { } } - messageContent = textParts.join('').trimEnd(); + messageContent = textParts.join(''); } const choice: OpenAIChoice = {