feat: add default systemPromptMappings for Qwen models

- Add default systemPromptMappings configuration for qwen3-coder-plus model
- Support DashScope compatible mode API endpoints
- Include Qwen coder system prompt template with git repository and sandbox placeholders
- Add comprehensive test coverage for default and custom systemPromptMappings
- Update documentation to reflect the new default configuration behavior
- Ensure backward compatibility with existing user configurations
This commit is contained in:
pomelo-nwu
2025-07-25 12:23:37 +08:00
parent 09bafda05b
commit 782e9d2314
7 changed files with 225 additions and 17 deletions

View File

@@ -215,6 +215,38 @@ In addition to a project settings file, a project's `.gemini` directory can cont
"enableOpenAILogging": true "enableOpenAILogging": true
``` ```
- **`systemPromptMappings`** (array):
- **Description:** Configures custom system prompt templates for specific model names and base URLs. This allows you to use different system prompts for different AI models or API endpoints.
- **Default:** `undefined` (uses default system prompt)
- **Properties:**
- **`baseUrls`** (array of strings, optional): Array of base URLs to exactly match against `OPENAI_BASE_URL` environment variable. If not specified, matches any base URL.
- **`modelNames`** (array of strings, optional): Array of model names to exactly match against `OPENAI_MODEL` environment variable. If not specified, matches any model.
- **`template`** (string): The system prompt template to use when both baseUrl and modelNames match. Supports placeholders:
- `{RUNTIME_VARS_IS_GIT_REPO}`: Replaced with `true` or `false` based on whether the current directory is a git repository
- `{RUNTIME_VARS_SANDBOX}`: Replaced with the sandbox type (e.g., `"sandbox-exec"`, `"docker"`, or empty string)
- **Example:**
```json
"systemPromptMappings": [
{
"baseUrls": [
"https://dashscope.aliyuncs.com/compatible-mode/v1",
"https://dashscope-intl.aliyuncs.com/compatible-mode/v1"
],
"modelNames": ["qwen3-coder-plus"],
"template": "SYSTEM_TEMPLATE:{\"name\":\"qwen3_coder\",\"params\":{\"is_git_repository\":{RUNTIME_VARS_IS_GIT_REPO},\"sandbox\":\"{RUNTIME_VARS_SANDBOX}\"}}"
},
{
"modelNames": ["gpt-4"],
"template": "You are a helpful AI assistant specialized in coding tasks. Current sandbox: {RUNTIME_VARS_SANDBOX}"
},
{
"baseUrls": ["api.openai.com"],
"template": "You are an AI coding assistant. Working in git repository: {RUNTIME_VARS_IS_GIT_REPO}"
}
]
```
### Example `settings.json`: ### Example `settings.json`:
```json ```json
@@ -242,7 +274,22 @@ In addition to a project settings file, a project's `.gemini` directory can cont
"hideTips": false, "hideTips": false,
"hideBanner": false, "hideBanner": false,
"maxSessionTurns": 10, "maxSessionTurns": 10,
"enableOpenAILogging": true "enableOpenAILogging": true,
"systemPromptMappings": [
{
"baseUrl": "dashscope",
"modelNames": ["qwen3"],
"template": "SYSTEM_TEMPLATE:{\"name\":\"qwen3_coder\",\"params\":{\"VARS_IS_GIT_REPO\":{VARS_IS_GIT_REPO},\"sandbox\":\"{sandbox}\"}}"
},
{
"modelNames": ["gpt-4"],
"template": "You are a helpful AI assistant specialized in coding tasks. Current sandbox: {sandbox}"
},
{
"baseUrl": "api.openai.com",
"template": "You are an AI coding assistant. Working in git repository: {VARS_IS_GIT_REPO}"
}
]
} }
``` ```

View File

@@ -6,7 +6,7 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as os from 'os'; import * as os from 'os';
import { loadCliConfig, parseArguments } from './config.js'; import { loadCliConfig, parseArguments, CliArgs } from './config.js';
import { Settings } from './settings.js'; import { Settings } from './settings.js';
import { Extension } from './extension.js'; import { Extension } from './extension.js';
import * as ServerConfig from '@qwen-code/qwen-code-core'; import * as ServerConfig from '@qwen-code/qwen-code-core';
@@ -1001,9 +1001,73 @@ describe('loadCliConfig ideMode', () => {
const config = await loadCliConfig(settings, [], 'test-session', argv); const config = await loadCliConfig(settings, [], 'test-session', argv);
expect(config.getIdeMode()).toBe(true); expect(config.getIdeMode()).toBe(true);
const mcpServers = config.getMcpServers(); const mcpServers = config.getMcpServers();
expect(mcpServers['_ide_server']).toBeDefined(); expect(mcpServers?.['_ide_server']).toBeDefined();
expect(mcpServers['_ide_server'].httpUrl).toBe('http://localhost:3000/mcp'); expect(mcpServers?.['_ide_server']?.httpUrl).toBe(
expect(mcpServers['_ide_server'].description).toBe('IDE connection'); 'http://localhost:3000/mcp',
expect(mcpServers['_ide_server'].trust).toBe(false); );
expect(mcpServers?.['_ide_server']?.description).toBe('IDE connection');
expect(mcpServers?.['_ide_server']?.trust).toBe(false);
});
});
describe('loadCliConfig systemPromptMappings', () => {
it('should use default systemPromptMappings when not provided in settings', async () => {
const mockSettings: Settings = {
theme: 'dark',
};
const mockExtensions: Extension[] = [];
const mockSessionId = 'test-session';
const mockArgv: CliArgs = {
model: 'test-model',
} as CliArgs;
const config = await loadCliConfig(
mockSettings,
mockExtensions,
mockSessionId,
mockArgv,
);
expect(config.getSystemPromptMappings()).toEqual([
{
baseUrls: [
'https://dashscope.aliyuncs.com/compatible-mode/v1/',
'https://dashscope-intl.aliyuncs.com/compatible-mode/v1/',
],
modelNames: ['qwen3-coder-plus'],
template:
'SYSTEM_TEMPLATE:{"name":"qwen3_coder","params":{"is_git_repository":{RUNTIME_VARS_IS_GIT_REPO},"sandbox":"{RUNTIME_VARS_SANDBOX}"}}',
},
]);
});
it('should use custom systemPromptMappings when provided in settings', async () => {
const customSystemPromptMappings = [
{
baseUrls: ['https://custom-api.com'],
modelNames: ['custom-model'],
template: 'Custom template',
},
];
const mockSettings: Settings = {
theme: 'dark',
systemPromptMappings: customSystemPromptMappings,
};
const mockExtensions: Extension[] = [];
const mockSessionId = 'test-session';
const mockArgv: CliArgs = {
model: 'test-model',
} as CliArgs;
const config = await loadCliConfig(
mockSettings,
mockExtensions,
mockSessionId,
mockArgv,
);
expect(config.getSystemPromptMappings()).toEqual(
customSystemPromptMappings,
);
}); });
}); });

View File

@@ -394,6 +394,17 @@ export async function loadCliConfig(
? settings.enableOpenAILogging ? settings.enableOpenAILogging
: argv.openaiLogging) ?? false, : argv.openaiLogging) ?? false,
sampling_params: settings.sampling_params, sampling_params: settings.sampling_params,
systemPromptMappings: settings.systemPromptMappings || [
{
baseUrls: [
'https://dashscope.aliyuncs.com/compatible-mode/v1/',
'https://dashscope-intl.aliyuncs.com/compatible-mode/v1/',
],
modelNames: ['qwen3-coder-plus'],
template:
'SYSTEM_TEMPLATE:{"name":"qwen3_coder","params":{"is_git_repository":{RUNTIME_VARS_IS_GIT_REPO},"sandbox":"{RUNTIME_VARS_SANDBOX}"}}',
},
],
}); });
} }

View File

@@ -96,6 +96,13 @@ export interface Settings {
max_tokens?: number; max_tokens?: number;
}; };
// System prompt mappings for different base URLs and model names
systemPromptMappings?: Array<{
baseUrls?: string[];
modelNames?: string[];
template?: string;
}>;
// Add other settings here. // Add other settings here.
ideMode?: boolean; ideMode?: boolean;
} }

View File

@@ -154,6 +154,11 @@ export interface ConfigParameters {
temperature?: number; temperature?: number;
max_tokens?: number; max_tokens?: number;
}; };
systemPromptMappings?: Array<{
baseUrls?: string[];
modelNames?: string[];
template?: string;
}>;
} }
export class Config { export class Config {
@@ -204,6 +209,11 @@ export class Config {
temperature?: number; temperature?: number;
max_tokens?: number; max_tokens?: number;
}; };
private readonly systemPromptMappings?: Array<{
baseUrls?: string[];
modelNames?: string[];
template?: string;
}>;
private modelSwitchedDuringSession: boolean = false; private modelSwitchedDuringSession: boolean = false;
private readonly maxSessionTurns: number; private readonly maxSessionTurns: number;
private readonly listExtensions: boolean; private readonly listExtensions: boolean;
@@ -258,6 +268,7 @@ export class Config {
this.ideMode = params.ideMode ?? false; this.ideMode = params.ideMode ?? false;
this.enableOpenAILogging = params.enableOpenAILogging ?? false; this.enableOpenAILogging = params.enableOpenAILogging ?? false;
this.sampling_params = params.sampling_params; this.sampling_params = params.sampling_params;
this.systemPromptMappings = params.systemPromptMappings;
if (params.contextFileName) { if (params.contextFileName) {
setGeminiMdFilename(params.contextFileName); setGeminiMdFilename(params.contextFileName);
@@ -540,6 +551,16 @@ export class Config {
return this.enableOpenAILogging; return this.enableOpenAILogging;
} }
getSystemPromptMappings():
| Array<{
baseUrls?: string[];
modelNames?: string[];
template?: string;
}>
| undefined {
return this.systemPromptMappings;
}
async refreshMemory(): Promise<{ memoryContent: string; fileCount: number }> { async refreshMemory(): Promise<{ memoryContent: string; fileCount: number }> {
const { memoryContent, fileCount } = await loadServerHierarchicalMemory( const { memoryContent, fileCount } = await loadServerHierarchicalMemory(
this.getWorkingDir(), this.getWorkingDir(),

View File

@@ -238,7 +238,10 @@ export class GeminiClient {
]; ];
try { try {
const userMemory = this.config.getUserMemory(); const userMemory = this.config.getUserMemory();
const systemInstruction = getCoreSystemPrompt(userMemory); const systemPromptMappings = this.config.getSystemPromptMappings();
const systemInstruction = getCoreSystemPrompt(userMemory, {
systemPromptMappings,
});
const generateContentConfigWithThinking = isThinkingSupported( const generateContentConfigWithThinking = isThinkingSupported(
this.config.getModel(), this.config.getModel(),
) )
@@ -354,7 +357,10 @@ export class GeminiClient {
model || this.config.getModel() || DEFAULT_GEMINI_FLASH_MODEL; model || this.config.getModel() || DEFAULT_GEMINI_FLASH_MODEL;
try { try {
const userMemory = this.config.getUserMemory(); const userMemory = this.config.getUserMemory();
const systemInstruction = getCoreSystemPrompt(userMemory); const systemPromptMappings = this.config.getSystemPromptMappings();
const systemInstruction = getCoreSystemPrompt(userMemory, {
systemPromptMappings,
});
const requestConfig = { const requestConfig = {
abortSignal, abortSignal,
...this.generateContentConfig, ...this.generateContentConfig,
@@ -470,7 +476,10 @@ export class GeminiClient {
try { try {
const userMemory = this.config.getUserMemory(); const userMemory = this.config.getUserMemory();
const systemInstruction = getCoreSystemPrompt(userMemory); const systemPromptMappings = this.config.getSystemPromptMappings();
const systemInstruction = getCoreSystemPrompt(userMemory, {
systemPromptMappings,
});
const requestConfig = { const requestConfig = {
abortSignal, abortSignal,

View File

@@ -18,7 +18,20 @@ import process from 'node:process';
import { isGitRepository } from '../utils/gitUtils.js'; import { isGitRepository } from '../utils/gitUtils.js';
import { MemoryTool, GEMINI_CONFIG_DIR } from '../tools/memoryTool.js'; import { MemoryTool, GEMINI_CONFIG_DIR } from '../tools/memoryTool.js';
export function getCoreSystemPrompt(userMemory?: string): string { export interface ModelTemplateMapping {
baseUrls?: string[];
modelNames?: string[];
template?: string;
}
export interface SystemPromptConfig {
systemPromptMappings?: ModelTemplateMapping[];
}
export function getCoreSystemPrompt(
userMemory?: string,
config?: SystemPromptConfig,
): string {
// if GEMINI_SYSTEM_MD is set (and not 0|false), override system prompt from file // if GEMINI_SYSTEM_MD is set (and not 0|false), override system prompt from file
// default path is .qwen/system.md but can be modified via custom path in GEMINI_SYSTEM_MD // default path is .qwen/system.md but can be modified via custom path in GEMINI_SYSTEM_MD
let systemMdEnabled = false; let systemMdEnabled = false;
@@ -34,13 +47,49 @@ export function getCoreSystemPrompt(userMemory?: string): string {
throw new Error(`missing system prompt file '${systemMdPath}'`); throw new Error(`missing system prompt file '${systemMdPath}'`);
} }
} }
if (
process.env.OPENAI_MODEL?.startsWith('qwen3') && // Check for system prompt mappings from global config
process.env.OPENAI_BASE_URL?.includes('dashscope') if (config?.systemPromptMappings) {
) { const currentModel = process.env.OPENAI_MODEL || '';
const sandbox = const currentBaseUrl = process.env.OPENAI_BASE_URL || '';
process.env.SANDBOX === 'sandbox-exec' ? 'sandbox-exec' : '';
return `SYSTEM_TEMPLATE:{"name":"qwen3_coder","params":{"is_git_repository":${isGitRepository(process.cwd())},"sandbox":"${sandbox}"}}`; const matchedMapping = config.systemPromptMappings.find((mapping) => {
const { baseUrls, modelNames } = mapping;
// Check if baseUrl matches (when specified)
if (
baseUrls &&
modelNames &&
baseUrls.includes(currentBaseUrl) &&
modelNames.includes(currentModel)
) {
return true;
}
if (baseUrls && baseUrls.includes(currentBaseUrl) && !modelNames) {
return true;
}
if (modelNames && modelNames.includes(currentModel) && !baseUrls) {
return true;
}
return false;
});
if (matchedMapping?.template) {
const sandbox =
process.env.SANDBOX === 'sandbox-exec' ? 'sandbox-exec' : '';
const isGitRepo = isGitRepository(process.cwd());
// Replace placeholders in template
let template = matchedMapping.template;
template = template.replace(
'{RUNTIME_VARS_IS_GIT_REPO}',
String(isGitRepo),
);
template = template.replace('{RUNTIME_VARS_SANDBOX}', sandbox);
return template;
}
} }
const basePrompt = systemMdEnabled const basePrompt = systemMdEnabled