diff --git a/packages/core/src/core/geminiChat.ts b/packages/core/src/core/geminiChat.ts index 6b4e9bde..5db2a286 100644 --- a/packages/core/src/core/geminiChat.ts +++ b/packages/core/src/core/geminiChat.ts @@ -50,9 +50,13 @@ const INVALID_CONTENT_RETRY_OPTIONS: ContentRetryOptions = { }; /** * Returns true if the response is valid, false otherwise. + * + * The DashScope provider may return the last 2 chunks as: + * 1. A choice(candidate) with finishReason and empty content + * 2. Empty choices with usage metadata + * We'll check separately for both of these cases. */ function isValidResponse(response: GenerateContentResponse): boolean { - // The Dashscope provider returns empty content with usage metadata at the end of the stream if (response.usageMetadata) { return true; } @@ -61,6 +65,10 @@ function isValidResponse(response: GenerateContentResponse): boolean { return false; } + if (response.candidates.some((candidate) => candidate.finishReason)) { + return true; + } + const content = response.candidates[0]?.content; return content !== undefined && isValidContent(content); } diff --git a/packages/core/src/core/openaiContentGenerator/converter.ts b/packages/core/src/core/openaiContentGenerator/converter.ts index e4b9c220..b70f65f6 100644 --- a/packages/core/src/core/openaiContentGenerator/converter.ts +++ b/packages/core/src/core/openaiContentGenerator/converter.ts @@ -18,6 +18,7 @@ import { ContentListUnion, ContentUnion, PartUnion, + Candidate, } from '@google/genai'; import OpenAI from 'openai'; import { safeJsonParse } from '../../utils/safeJsonParse.js'; @@ -652,19 +653,21 @@ export class OpenAIContentConverter { this.streamingToolCallParser.reset(); } - response.candidates = [ - { - content: { - parts, - role: 'model' as const, - }, - finishReason: choice.finish_reason - ? this.mapOpenAIFinishReasonToGemini(choice.finish_reason) - : FinishReason.FINISH_REASON_UNSPECIFIED, - index: 0, - safetyRatings: [], + // Only include finishReason key if finish_reason is present + const candidate: Candidate = { + content: { + parts, + role: 'model' as const, }, - ]; + index: 0, + safetyRatings: [], + }; + if (choice.finish_reason) { + candidate.finishReason = this.mapOpenAIFinishReasonToGemini( + choice.finish_reason, + ); + } + response.candidates = [candidate]; } else { response.candidates = []; } diff --git a/packages/core/src/core/openaiContentGenerator/pipeline.ts b/packages/core/src/core/openaiContentGenerator/pipeline.ts index 306344a3..6d86811a 100644 --- a/packages/core/src/core/openaiContentGenerator/pipeline.ts +++ b/packages/core/src/core/openaiContentGenerator/pipeline.ts @@ -119,6 +119,7 @@ export class ContentGenerationPipeline { // Stage 2b: Filter empty responses to avoid downstream issues if ( response.candidates?.[0]?.content?.parts?.length === 0 && + !response.candidates?.[0]?.finishReason && !response.usageMetadata ) { continue;