fix: relax chunk validation to avoid unnecessary retry

This commit is contained in:
mingholy.lmh
2025-09-11 22:43:18 +08:00
parent 03eb1faf0a
commit 636b1044b8
3 changed files with 25 additions and 13 deletions

View File

@@ -50,9 +50,13 @@ const INVALID_CONTENT_RETRY_OPTIONS: ContentRetryOptions = {
}; };
/** /**
* Returns true if the response is valid, false otherwise. * Returns true if the response is valid, false otherwise.
*
* The DashScope provider may return the last 2 chunks as:
* 1. A choice(candidate) with finishReason and empty content
* 2. Empty choices with usage metadata
* We'll check separately for both of these cases.
*/ */
function isValidResponse(response: GenerateContentResponse): boolean { function isValidResponse(response: GenerateContentResponse): boolean {
// The Dashscope provider returns empty content with usage metadata at the end of the stream
if (response.usageMetadata) { if (response.usageMetadata) {
return true; return true;
} }
@@ -61,6 +65,10 @@ function isValidResponse(response: GenerateContentResponse): boolean {
return false; return false;
} }
if (response.candidates.some((candidate) => candidate.finishReason)) {
return true;
}
const content = response.candidates[0]?.content; const content = response.candidates[0]?.content;
return content !== undefined && isValidContent(content); return content !== undefined && isValidContent(content);
} }

View File

@@ -18,6 +18,7 @@ import {
ContentListUnion, ContentListUnion,
ContentUnion, ContentUnion,
PartUnion, PartUnion,
Candidate,
} from '@google/genai'; } from '@google/genai';
import OpenAI from 'openai'; import OpenAI from 'openai';
import { safeJsonParse } from '../../utils/safeJsonParse.js'; import { safeJsonParse } from '../../utils/safeJsonParse.js';
@@ -652,19 +653,21 @@ export class OpenAIContentConverter {
this.streamingToolCallParser.reset(); this.streamingToolCallParser.reset();
} }
response.candidates = [ // Only include finishReason key if finish_reason is present
{ const candidate: Candidate = {
content: { content: {
parts, parts,
role: 'model' as const, role: 'model' as const,
}, },
finishReason: choice.finish_reason
? this.mapOpenAIFinishReasonToGemini(choice.finish_reason)
: FinishReason.FINISH_REASON_UNSPECIFIED,
index: 0, index: 0,
safetyRatings: [], safetyRatings: [],
}, };
]; if (choice.finish_reason) {
candidate.finishReason = this.mapOpenAIFinishReasonToGemini(
choice.finish_reason,
);
}
response.candidates = [candidate];
} else { } else {
response.candidates = []; response.candidates = [];
} }

View File

@@ -119,6 +119,7 @@ export class ContentGenerationPipeline {
// Stage 2b: Filter empty responses to avoid downstream issues // Stage 2b: Filter empty responses to avoid downstream issues
if ( if (
response.candidates?.[0]?.content?.parts?.length === 0 && response.candidates?.[0]?.content?.parts?.length === 0 &&
!response.candidates?.[0]?.finishReason &&
!response.usageMetadata !response.usageMetadata
) { ) {
continue; continue;