mirror of
https://github.com/QwenLM/qwen-code.git
synced 2025-12-21 01:07:46 +00:00
Merge tag 'v0.1.15' into feature/yiheng/sync-gemini-cli-0.1.15
This commit is contained in:
@@ -6,15 +6,10 @@
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as os from 'os';
|
||||
import { loadCliConfig, parseArguments, CliArgs } from './config.js';
|
||||
import { loadCliConfig, parseArguments } from './config.js';
|
||||
import { Settings } from './settings.js';
|
||||
import { Extension } from './extension.js';
|
||||
import * as ServerConfig from '@qwen-code/qwen-code-core';
|
||||
import {
|
||||
TelemetryTarget,
|
||||
ConfigParameters,
|
||||
DEFAULT_TELEMETRY_TARGET,
|
||||
} from '@qwen-code/qwen-code-core';
|
||||
|
||||
vi.mock('os', async (importOriginal) => {
|
||||
const actualOs = await importOriginal<typeof os>();
|
||||
@@ -42,63 +37,19 @@ vi.mock('@qwen-code/qwen-code-core', async () => {
|
||||
...actualServer,
|
||||
loadEnvironment: vi.fn(),
|
||||
loadServerHierarchicalMemory: vi.fn(
|
||||
(cwd, debug, fileService, extensionPaths) =>
|
||||
(cwd, debug, fileService, extensionPaths, _maxDirs) =>
|
||||
Promise.resolve({
|
||||
memoryContent: extensionPaths?.join(',') || '',
|
||||
fileCount: extensionPaths?.length || 0,
|
||||
}),
|
||||
),
|
||||
Config: class MockConfig extends actualServer.Config {
|
||||
private enableOpenAILogging: boolean;
|
||||
|
||||
constructor(params: ConfigParameters) {
|
||||
super(params);
|
||||
this.enableOpenAILogging = params.enableOpenAILogging ?? false;
|
||||
}
|
||||
|
||||
getEnableOpenAILogging(): boolean {
|
||||
return this.enableOpenAILogging;
|
||||
}
|
||||
|
||||
// Override other methods to ensure they work correctly
|
||||
getShowMemoryUsage(): boolean {
|
||||
return (
|
||||
(this as unknown as { showMemoryUsage?: boolean }).showMemoryUsage ??
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
getTelemetryEnabled(): boolean {
|
||||
return (
|
||||
(this as unknown as { telemetrySettings?: { enabled?: boolean } })
|
||||
.telemetrySettings?.enabled ?? false
|
||||
);
|
||||
}
|
||||
|
||||
getTelemetryLogPromptsEnabled(): boolean {
|
||||
return (
|
||||
(this as unknown as { telemetrySettings?: { logPrompts?: boolean } })
|
||||
.telemetrySettings?.logPrompts ?? false
|
||||
);
|
||||
}
|
||||
|
||||
getTelemetryOtlpEndpoint(): string {
|
||||
return (
|
||||
(this as unknown as { telemetrySettings?: { otlpEndpoint?: string } })
|
||||
.telemetrySettings?.otlpEndpoint ??
|
||||
'http://tracing-analysis-dc-hz.aliyuncs.com:8090'
|
||||
);
|
||||
}
|
||||
|
||||
getTelemetryTarget(): TelemetryTarget {
|
||||
return (
|
||||
(
|
||||
this as unknown as {
|
||||
telemetrySettings?: { target?: TelemetryTarget };
|
||||
}
|
||||
).telemetrySettings?.target ?? DEFAULT_TELEMETRY_TARGET
|
||||
);
|
||||
}
|
||||
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS: {
|
||||
respectGitIgnore: false,
|
||||
respectGeminiIgnore: true,
|
||||
},
|
||||
DEFAULT_FILE_FILTERING_OPTIONS: {
|
||||
respectGitIgnore: true,
|
||||
respectGeminiIgnore: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
@@ -244,6 +195,85 @@ describe('loadCliConfig', () => {
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getShowMemoryUsage()).toBe(true);
|
||||
});
|
||||
|
||||
it(`should leave proxy to empty by default`, async () => {
|
||||
// Clear all proxy environment variables to ensure clean test
|
||||
delete process.env.https_proxy;
|
||||
delete process.env.http_proxy;
|
||||
delete process.env.HTTPS_PROXY;
|
||||
delete process.env.HTTP_PROXY;
|
||||
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getProxy()).toBeFalsy();
|
||||
});
|
||||
|
||||
const proxy_url = 'http://localhost:7890';
|
||||
const testCases = [
|
||||
{
|
||||
input: {
|
||||
env_name: 'https_proxy',
|
||||
proxy_url,
|
||||
},
|
||||
expected: proxy_url,
|
||||
},
|
||||
{
|
||||
input: {
|
||||
env_name: 'http_proxy',
|
||||
proxy_url,
|
||||
},
|
||||
expected: proxy_url,
|
||||
},
|
||||
{
|
||||
input: {
|
||||
env_name: 'HTTPS_PROXY',
|
||||
proxy_url,
|
||||
},
|
||||
expected: proxy_url,
|
||||
},
|
||||
{
|
||||
input: {
|
||||
env_name: 'HTTP_PROXY',
|
||||
proxy_url,
|
||||
},
|
||||
expected: proxy_url,
|
||||
},
|
||||
];
|
||||
testCases.forEach(({ input, expected }) => {
|
||||
it(`should set proxy to ${expected} according to environment variable [${input.env_name}]`, async () => {
|
||||
// Clear all proxy environment variables first
|
||||
delete process.env.https_proxy;
|
||||
delete process.env.http_proxy;
|
||||
delete process.env.HTTPS_PROXY;
|
||||
delete process.env.HTTP_PROXY;
|
||||
|
||||
process.env[input.env_name] = input.proxy_url;
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getProxy()).toBe(expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('should set proxy when --proxy flag is present', async () => {
|
||||
process.argv = ['node', 'script.js', '--proxy', 'http://localhost:7890'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getProxy()).toBe('http://localhost:7890');
|
||||
});
|
||||
|
||||
it('should prioritize CLI flag over environment variable for proxy (CLI http://localhost:7890, environment variable http://localhost:7891)', async () => {
|
||||
process.env['http_proxy'] = 'http://localhost:7891';
|
||||
process.argv = ['node', 'script.js', '--proxy', 'http://localhost:7890'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getProxy()).toBe('http://localhost:7890');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCliConfig telemetry', () => {
|
||||
@@ -350,9 +380,7 @@ describe('loadCliConfig telemetry', () => {
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = { telemetry: { enabled: true } };
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getTelemetryOtlpEndpoint()).toBe(
|
||||
'http://tracing-analysis-dc-hz.aliyuncs.com:8090',
|
||||
);
|
||||
expect(config.getTelemetryOtlpEndpoint()).toBe('http://localhost:4317');
|
||||
});
|
||||
|
||||
it('should use telemetry target from settings if CLI flag is not present', async () => {
|
||||
@@ -411,81 +439,12 @@ describe('loadCliConfig telemetry', () => {
|
||||
expect(config.getTelemetryLogPromptsEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('should use default log prompts (false) if no value is provided via CLI or settings', async () => {
|
||||
it('should use default log prompts (true) if no value is provided via CLI or settings', async () => {
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = { telemetry: { enabled: true } };
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getTelemetryLogPromptsEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('should set enableOpenAILogging to true when --openai-logging flag is present', async () => {
|
||||
const settings: Settings = {};
|
||||
const argv = await parseArguments();
|
||||
argv.openaiLogging = true;
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should set enableOpenAILogging to false when --openai-logging flag is not present', async () => {
|
||||
const settings: Settings = {};
|
||||
const argv = await parseArguments();
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('should use enableOpenAILogging value from settings if CLI flag is not present (settings true)', async () => {
|
||||
const settings: Settings = { enableOpenAILogging: true };
|
||||
const argv = await parseArguments();
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should use enableOpenAILogging value from settings if CLI flag is not present (settings false)', async () => {
|
||||
const settings: Settings = { enableOpenAILogging: false };
|
||||
const argv = await parseArguments();
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('should prioritize --openai-logging CLI flag (true) over settings (false)', async () => {
|
||||
const settings: Settings = { enableOpenAILogging: false };
|
||||
const argv = await parseArguments();
|
||||
argv.openaiLogging = true;
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should prioritize --openai-logging CLI flag (false) over settings (true)', async () => {
|
||||
const settings: Settings = { enableOpenAILogging: true };
|
||||
const argv = await parseArguments();
|
||||
argv.openaiLogging = false;
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(
|
||||
(
|
||||
config as unknown as { getEnableOpenAILogging(): boolean }
|
||||
).getEnableOpenAILogging(),
|
||||
).toBe(false);
|
||||
expect(config.getTelemetryLogPromptsEnabled()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -540,6 +499,11 @@ describe('Hierarchical Memory Loading (config.ts) - Placeholder Suite', () => {
|
||||
'/path/to/ext3/context1.md',
|
||||
'/path/to/ext3/context2.md',
|
||||
],
|
||||
{
|
||||
respectGitIgnore: false,
|
||||
respectGeminiIgnore: true,
|
||||
},
|
||||
undefined, // maxDirs
|
||||
);
|
||||
});
|
||||
|
||||
@@ -853,6 +817,66 @@ describe('loadCliConfig with allowed-mcp-server-names', () => {
|
||||
const config = await loadCliConfig(baseSettings, [], 'test-session', argv);
|
||||
expect(config.getMcpServers()).toEqual({});
|
||||
});
|
||||
|
||||
it('should read allowMCPServers from settings', async () => {
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {
|
||||
...baseSettings,
|
||||
allowMCPServers: ['server1', 'server2'],
|
||||
};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getMcpServers()).toEqual({
|
||||
server1: { url: 'http://localhost:8080' },
|
||||
server2: { url: 'http://localhost:8081' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should read excludeMCPServers from settings', async () => {
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {
|
||||
...baseSettings,
|
||||
excludeMCPServers: ['server1', 'server2'],
|
||||
};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getMcpServers()).toEqual({
|
||||
server3: { url: 'http://localhost:8082' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should override allowMCPServers with excludeMCPServers if overlapping ', async () => {
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {
|
||||
...baseSettings,
|
||||
excludeMCPServers: ['server1'],
|
||||
allowMCPServers: ['server1', 'server2'],
|
||||
};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getMcpServers()).toEqual({
|
||||
server2: { url: 'http://localhost:8081' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should prioritize mcp server flag if set ', async () => {
|
||||
process.argv = [
|
||||
'node',
|
||||
'script.js',
|
||||
'--allowed-mcp-server-names',
|
||||
'server1',
|
||||
];
|
||||
const argv = await parseArguments();
|
||||
const settings: Settings = {
|
||||
...baseSettings,
|
||||
excludeMCPServers: ['server1'],
|
||||
allowMCPServers: ['server2'],
|
||||
};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getMcpServers()).toEqual({
|
||||
server1: { url: 'http://localhost:8080' },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCliConfig extensions', () => {
|
||||
@@ -908,6 +932,7 @@ describe('loadCliConfig ideMode', () => {
|
||||
// Explicitly delete TERM_PROGRAM and SANDBOX before each test
|
||||
delete process.env.TERM_PROGRAM;
|
||||
delete process.env.SANDBOX;
|
||||
delete process.env.GEMINI_CLI_IDE_SERVER_PORT;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -944,6 +969,7 @@ describe('loadCliConfig ideMode', () => {
|
||||
process.argv = ['node', 'script.js', '--ide-mode'];
|
||||
const argv = await parseArguments();
|
||||
process.env.TERM_PROGRAM = 'vscode';
|
||||
process.env.GEMINI_CLI_IDE_SERVER_PORT = '3000';
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getIdeMode()).toBe(true);
|
||||
@@ -953,6 +979,7 @@ describe('loadCliConfig ideMode', () => {
|
||||
process.argv = ['node', 'script.js'];
|
||||
const argv = await parseArguments();
|
||||
process.env.TERM_PROGRAM = 'vscode';
|
||||
process.env.GEMINI_CLI_IDE_SERVER_PORT = '3000';
|
||||
const settings: Settings = { ideMode: true };
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getIdeMode()).toBe(true);
|
||||
@@ -962,6 +989,7 @@ describe('loadCliConfig ideMode', () => {
|
||||
process.argv = ['node', 'script.js', '--ide-mode'];
|
||||
const argv = await parseArguments();
|
||||
process.env.TERM_PROGRAM = 'vscode';
|
||||
process.env.GEMINI_CLI_IDE_SERVER_PORT = '3000';
|
||||
const settings: Settings = { ideMode: false };
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getIdeMode()).toBe(true);
|
||||
@@ -995,82 +1023,4 @@ describe('loadCliConfig ideMode', () => {
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getIdeMode()).toBe(false);
|
||||
});
|
||||
|
||||
it('should add __ide_server when ideMode is true', async () => {
|
||||
process.argv = ['node', 'script.js', '--ide-mode'];
|
||||
const argv = await parseArguments();
|
||||
process.env.TERM_PROGRAM = 'vscode';
|
||||
const settings: Settings = {};
|
||||
const config = await loadCliConfig(settings, [], 'test-session', argv);
|
||||
expect(config.getIdeMode()).toBe(true);
|
||||
const mcpServers = config.getMcpServers();
|
||||
expect(mcpServers?.['_ide_server']).toBeDefined();
|
||||
expect(mcpServers?.['_ide_server']?.httpUrl).toBe(
|
||||
'http://localhost:3000/mcp',
|
||||
);
|
||||
expect(mcpServers?.['_ide_server']?.description).toBe('IDE connection');
|
||||
expect(mcpServers?.['_ide_server']?.trust).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCliConfig systemPromptMappings', () => {
|
||||
it('should use default systemPromptMappings when not provided in settings', async () => {
|
||||
const mockSettings: Settings = {
|
||||
theme: 'dark',
|
||||
};
|
||||
const mockExtensions: Extension[] = [];
|
||||
const mockSessionId = 'test-session';
|
||||
const mockArgv: CliArgs = {
|
||||
model: 'test-model',
|
||||
} as CliArgs;
|
||||
|
||||
const config = await loadCliConfig(
|
||||
mockSettings,
|
||||
mockExtensions,
|
||||
mockSessionId,
|
||||
mockArgv,
|
||||
);
|
||||
|
||||
expect(config.getSystemPromptMappings()).toEqual([
|
||||
{
|
||||
baseUrls: [
|
||||
'https://dashscope.aliyuncs.com/compatible-mode/v1/',
|
||||
'https://dashscope-intl.aliyuncs.com/compatible-mode/v1/',
|
||||
],
|
||||
modelNames: ['qwen3-coder-plus'],
|
||||
template:
|
||||
'SYSTEM_TEMPLATE:{"name":"qwen3_coder","params":{"is_git_repository":{RUNTIME_VARS_IS_GIT_REPO},"sandbox":"{RUNTIME_VARS_SANDBOX}"}}',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use custom systemPromptMappings when provided in settings', async () => {
|
||||
const customSystemPromptMappings = [
|
||||
{
|
||||
baseUrls: ['https://custom-api.com'],
|
||||
modelNames: ['custom-model'],
|
||||
template: 'Custom template',
|
||||
},
|
||||
];
|
||||
const mockSettings: Settings = {
|
||||
theme: 'dark',
|
||||
systemPromptMappings: customSystemPromptMappings,
|
||||
};
|
||||
const mockExtensions: Extension[] = [];
|
||||
const mockSessionId = 'test-session';
|
||||
const mockArgv: CliArgs = {
|
||||
model: 'test-model',
|
||||
} as CliArgs;
|
||||
|
||||
const config = await loadCliConfig(
|
||||
mockSettings,
|
||||
mockExtensions,
|
||||
mockSessionId,
|
||||
mockArgv,
|
||||
);
|
||||
|
||||
expect(config.getSystemPromptMappings()).toEqual(
|
||||
customSystemPromptMappings,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user