diff --git a/packages/core/src/core/openaiContentGenerator/index.ts b/packages/core/src/core/openaiContentGenerator/index.ts index 192bf096..8559258c 100644 --- a/packages/core/src/core/openaiContentGenerator/index.ts +++ b/packages/core/src/core/openaiContentGenerator/index.ts @@ -13,6 +13,7 @@ import { OpenAIContentGenerator } from './openaiContentGenerator.js'; import { DashScopeOpenAICompatibleProvider, DeepSeekOpenAICompatibleProvider, + ModelScopeOpenAICompatibleProvider, OpenRouterOpenAICompatibleProvider, type OpenAICompatibleProvider, DefaultOpenAICompatibleProvider, @@ -78,6 +79,14 @@ export function determineProvider( ); } + // Check for ModelScope provider + if (ModelScopeOpenAICompatibleProvider.isModelScopeProvider(config)) { + return new ModelScopeOpenAICompatibleProvider( + contentGeneratorConfig, + cliConfig, + ); + } + // Default provider for standard OpenAI-compatible APIs return new DefaultOpenAICompatibleProvider(contentGeneratorConfig, cliConfig); } diff --git a/packages/core/src/core/openaiContentGenerator/provider/index.ts b/packages/core/src/core/openaiContentGenerator/provider/index.ts index 9886b70f..cb33834d 100644 --- a/packages/core/src/core/openaiContentGenerator/provider/index.ts +++ b/packages/core/src/core/openaiContentGenerator/provider/index.ts @@ -1,3 +1,4 @@ +export { ModelScopeOpenAICompatibleProvider } from './modelscope.js'; export { DashScopeOpenAICompatibleProvider } from './dashscope.js'; export { DeepSeekOpenAICompatibleProvider } from './deepseek.js'; export { OpenRouterOpenAICompatibleProvider } from './openrouter.js'; diff --git a/packages/core/src/core/openaiContentGenerator/provider/modelscope.test.ts b/packages/core/src/core/openaiContentGenerator/provider/modelscope.test.ts new file mode 100644 index 00000000..da5a71a8 --- /dev/null +++ b/packages/core/src/core/openaiContentGenerator/provider/modelscope.test.ts @@ -0,0 +1,96 @@ +/** + * @license + * Copyright 2025 Qwen + * SPDX-License-Identifier: Apache-2.0 + */ + +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type OpenAI from 'openai'; +import { ModelScopeOpenAICompatibleProvider } from './modelscope.js'; +import type { Config } from '../../../config/config.js'; +import type { ContentGeneratorConfig } from '../../contentGenerator.js'; + +vi.mock('openai'); + +describe('ModelScopeOpenAICompatibleProvider', () => { + let provider: ModelScopeOpenAICompatibleProvider; + let mockContentGeneratorConfig: ContentGeneratorConfig; + let mockCliConfig: Config; + + beforeEach(() => { + mockContentGeneratorConfig = { + apiKey: 'test-api-key', + baseUrl: 'https://api.modelscope.cn/v1', + model: 'qwen-max', + } as ContentGeneratorConfig; + + mockCliConfig = { + getCliVersion: vi.fn().mockReturnValue('1.0.0'), + } as unknown as Config; + + provider = new ModelScopeOpenAICompatibleProvider( + mockContentGeneratorConfig, + mockCliConfig, + ); + }); + + describe('isModelScopeProvider', () => { + it('should return true if baseUrl includes "modelscope"', () => { + const config = { baseUrl: 'https://api.modelscope.cn/v1' }; + expect( + ModelScopeOpenAICompatibleProvider.isModelScopeProvider( + config as ContentGeneratorConfig, + ), + ).toBe(true); + }); + + it('should return false if baseUrl does not include "modelscope"', () => { + const config = { baseUrl: 'https://api.openai.com/v1' }; + expect( + ModelScopeOpenAICompatibleProvider.isModelScopeProvider( + config as ContentGeneratorConfig, + ), + ).toBe(false); + }); + }); + + describe('buildRequest', () => { + it('should remove stream_options when stream is false', () => { + const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = { + model: 'qwen-max', + messages: [{ role: 'user', content: 'Hello!' }], + stream: false, + stream_options: { include_usage: true }, + }; + + const result = provider.buildRequest(originalRequest, 'prompt-id'); + + expect(result).not.toHaveProperty('stream_options'); + }); + + it('should keep stream_options when stream is true', () => { + const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = { + model: 'qwen-max', + messages: [{ role: 'user', content: 'Hello!' }], + stream: true, + stream_options: { include_usage: true }, + }; + + const result = provider.buildRequest(originalRequest, 'prompt-id'); + + expect(result).toHaveProperty('stream_options'); + }); + + it('should handle requests without stream_options', () => { + const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = { + model: 'qwen-max', + messages: [{ role: 'user', content: 'Hello!' }], + stream: false, + }; + + const result = provider.buildRequest(originalRequest, 'prompt-id'); + + expect(result).not.toHaveProperty('stream_options'); + }); + }); +}); diff --git a/packages/core/src/core/openaiContentGenerator/provider/modelscope.ts b/packages/core/src/core/openaiContentGenerator/provider/modelscope.ts new file mode 100644 index 00000000..1afb2d03 --- /dev/null +++ b/packages/core/src/core/openaiContentGenerator/provider/modelscope.ts @@ -0,0 +1,32 @@ +import type OpenAI from 'openai'; +import { DefaultOpenAICompatibleProvider } from './default.js'; +import type { ContentGeneratorConfig } from '../../contentGenerator.js'; + +/** + * Provider for ModelScope API + */ +export class ModelScopeOpenAICompatibleProvider extends DefaultOpenAICompatibleProvider { + /** + * Checks if the configuration is for ModelScope. + */ + static isModelScopeProvider(config: ContentGeneratorConfig): boolean { + return !!config.baseUrl?.includes('modelscope'); + } + + /** + * ModelScope does not support `stream_options` when `stream` is false. + * This method removes `stream_options` if `stream` is not true. + */ + override buildRequest( + request: OpenAI.Chat.ChatCompletionCreateParams, + userPromptId: string, + ): OpenAI.Chat.ChatCompletionCreateParams { + const newRequest = super.buildRequest(request, userPromptId); + if (!newRequest.stream) { + delete (newRequest as OpenAI.Chat.ChatCompletionCreateParamsNonStreaming) + .stream_options; + } + + return newRequest; + } +}