fix(core): add modelscope provider to handle stream_options (#848)

* fix(core): add modelscope provider to handle stream_options

---------

Co-authored-by: Qwen Code <qwen-code@alibaba-inc.com>
Co-authored-by: mingholy.lmh <mingholy.lmh@alibaba-inc.com>
This commit is contained in:
DS-Controller2
2025-11-18 00:47:20 -05:00
committed by GitHub
parent efca0bc795
commit f0bbeac04a
4 changed files with 138 additions and 0 deletions

View File

@@ -13,6 +13,7 @@ import { OpenAIContentGenerator } from './openaiContentGenerator.js';
import { import {
DashScopeOpenAICompatibleProvider, DashScopeOpenAICompatibleProvider,
DeepSeekOpenAICompatibleProvider, DeepSeekOpenAICompatibleProvider,
ModelScopeOpenAICompatibleProvider,
OpenRouterOpenAICompatibleProvider, OpenRouterOpenAICompatibleProvider,
type OpenAICompatibleProvider, type OpenAICompatibleProvider,
DefaultOpenAICompatibleProvider, DefaultOpenAICompatibleProvider,
@@ -78,6 +79,14 @@ export function determineProvider(
); );
} }
// Check for ModelScope provider
if (ModelScopeOpenAICompatibleProvider.isModelScopeProvider(config)) {
return new ModelScopeOpenAICompatibleProvider(
contentGeneratorConfig,
cliConfig,
);
}
// Default provider for standard OpenAI-compatible APIs // Default provider for standard OpenAI-compatible APIs
return new DefaultOpenAICompatibleProvider(contentGeneratorConfig, cliConfig); return new DefaultOpenAICompatibleProvider(contentGeneratorConfig, cliConfig);
} }

View File

@@ -1,3 +1,4 @@
export { ModelScopeOpenAICompatibleProvider } from './modelscope.js';
export { DashScopeOpenAICompatibleProvider } from './dashscope.js'; export { DashScopeOpenAICompatibleProvider } from './dashscope.js';
export { DeepSeekOpenAICompatibleProvider } from './deepseek.js'; export { DeepSeekOpenAICompatibleProvider } from './deepseek.js';
export { OpenRouterOpenAICompatibleProvider } from './openrouter.js'; export { OpenRouterOpenAICompatibleProvider } from './openrouter.js';

View File

@@ -0,0 +1,96 @@
/**
* @license
* Copyright 2025 Qwen
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, beforeEach } from 'vitest';
import type OpenAI from 'openai';
import { ModelScopeOpenAICompatibleProvider } from './modelscope.js';
import type { Config } from '../../../config/config.js';
import type { ContentGeneratorConfig } from '../../contentGenerator.js';
vi.mock('openai');
describe('ModelScopeOpenAICompatibleProvider', () => {
let provider: ModelScopeOpenAICompatibleProvider;
let mockContentGeneratorConfig: ContentGeneratorConfig;
let mockCliConfig: Config;
beforeEach(() => {
mockContentGeneratorConfig = {
apiKey: 'test-api-key',
baseUrl: 'https://api.modelscope.cn/v1',
model: 'qwen-max',
} as ContentGeneratorConfig;
mockCliConfig = {
getCliVersion: vi.fn().mockReturnValue('1.0.0'),
} as unknown as Config;
provider = new ModelScopeOpenAICompatibleProvider(
mockContentGeneratorConfig,
mockCliConfig,
);
});
describe('isModelScopeProvider', () => {
it('should return true if baseUrl includes "modelscope"', () => {
const config = { baseUrl: 'https://api.modelscope.cn/v1' };
expect(
ModelScopeOpenAICompatibleProvider.isModelScopeProvider(
config as ContentGeneratorConfig,
),
).toBe(true);
});
it('should return false if baseUrl does not include "modelscope"', () => {
const config = { baseUrl: 'https://api.openai.com/v1' };
expect(
ModelScopeOpenAICompatibleProvider.isModelScopeProvider(
config as ContentGeneratorConfig,
),
).toBe(false);
});
});
describe('buildRequest', () => {
it('should remove stream_options when stream is false', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: false,
stream_options: { include_usage: true },
};
const result = provider.buildRequest(originalRequest, 'prompt-id');
expect(result).not.toHaveProperty('stream_options');
});
it('should keep stream_options when stream is true', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: true,
stream_options: { include_usage: true },
};
const result = provider.buildRequest(originalRequest, 'prompt-id');
expect(result).toHaveProperty('stream_options');
});
it('should handle requests without stream_options', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: false,
};
const result = provider.buildRequest(originalRequest, 'prompt-id');
expect(result).not.toHaveProperty('stream_options');
});
});
});

View File

@@ -0,0 +1,32 @@
import type OpenAI from 'openai';
import { DefaultOpenAICompatibleProvider } from './default.js';
import type { ContentGeneratorConfig } from '../../contentGenerator.js';
/**
* Provider for ModelScope API
*/
export class ModelScopeOpenAICompatibleProvider extends DefaultOpenAICompatibleProvider {
/**
* Checks if the configuration is for ModelScope.
*/
static isModelScopeProvider(config: ContentGeneratorConfig): boolean {
return !!config.baseUrl?.includes('modelscope');
}
/**
* ModelScope does not support `stream_options` when `stream` is false.
* This method removes `stream_options` if `stream` is not true.
*/
override buildRequest(
request: OpenAI.Chat.ChatCompletionCreateParams,
userPromptId: string,
): OpenAI.Chat.ChatCompletionCreateParams {
const newRequest = super.buildRequest(request, userPromptId);
if (!newRequest.stream) {
delete (newRequest as OpenAI.Chat.ChatCompletionCreateParamsNonStreaming)
.stream_options;
}
return newRequest;
}
}