Merge tag 'v0.1.15' into feature/yiheng/sync-gemini-cli-0.1.15

This commit is contained in:
奕桁
2025-08-01 23:06:11 +08:00
340 changed files with 36528 additions and 22931 deletions

View File

@@ -15,13 +15,15 @@ import {
ApprovalMode,
DEFAULT_GEMINI_MODEL,
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
FileDiscoveryService,
TelemetryTarget,
MCPServerConfig,
FileFilteringOptions,
IdeClient,
} from '@qwen-code/qwen-code-core';
import { Settings } from './settings.js';
import { Extension, filterActiveExtensions } from './extension.js';
import { Extension, annotateActiveExtensions } from './extension.js';
import { getCliVersion } from '../utils/version.js';
import { loadSandboxConfig } from './sandboxConfig.js';
@@ -52,13 +54,16 @@ export interface CliArgs {
telemetryTarget: string | undefined;
telemetryOtlpEndpoint: string | undefined;
telemetryLogPrompts: boolean | undefined;
telemetryOutfile: string | undefined;
allowedMcpServerNames: string[] | undefined;
experimentalAcp: boolean | undefined;
extensions: string[] | undefined;
listExtensions: boolean | undefined;
ideMode: boolean | undefined;
openaiLogging: boolean | undefined;
openaiApiKey: string | undefined;
openaiBaseUrl: string | undefined;
proxy: string | undefined;
}
export async function parseArguments(): Promise<CliArgs> {
@@ -157,12 +162,20 @@ export async function parseArguments(): Promise<CliArgs> {
description:
'Enable or disable logging of user prompts for telemetry. Overrides settings files.',
})
.option('telemetry-outfile', {
type: 'string',
description: 'Redirect all telemetry output to the specified file.',
})
.option('checkpointing', {
alias: 'c',
type: 'boolean',
description: 'Enables checkpointing of file edits',
default: false,
})
.option('experimental-acp', {
type: 'boolean',
description: 'Starts the agent in ACP mode',
})
.option('allowed-mcp-server-names', {
type: 'array',
string: true,
@@ -197,7 +210,11 @@ export async function parseArguments(): Promise<CliArgs> {
type: 'string',
description: 'OpenAI base URL (for custom endpoints)',
})
.option('proxy', {
type: 'string',
description:
'Proxy for gemini client, like schema://user:password@host:port',
})
.version(await getCliVersion()) // This will enable the --version flag based on package.json
.alias('v', 'version')
.help()
@@ -223,13 +240,16 @@ export async function loadHierarchicalGeminiMemory(
currentWorkingDirectory: string,
debugMode: boolean,
fileService: FileDiscoveryService,
settings: Settings,
extensionContextFilePaths: string[] = [],
fileFilteringOptions?: FileFilteringOptions,
): Promise<{ memoryContent: string; fileCount: number }> {
if (debugMode) {
logger.debug(
`CLI: Delegating hierarchical memory load to server for CWD: ${currentWorkingDirectory}`,
);
}
// Directly call the server function.
// The server function will use its own homedir() for the global path.
return loadServerHierarchicalMemory(
@@ -237,6 +257,8 @@ export async function loadHierarchicalGeminiMemory(
debugMode,
fileService,
extensionContextFilePaths,
fileFilteringOptions,
settings.memoryDiscoveryMaxDirs,
);
}
@@ -257,11 +279,19 @@ export async function loadCliConfig(
process.env.TERM_PROGRAM === 'vscode' &&
!process.env.SANDBOX;
const activeExtensions = filterActiveExtensions(
let ideClient: IdeClient | undefined;
if (ideMode) {
ideClient = new IdeClient();
}
const allExtensions = annotateActiveExtensions(
extensions,
argv.extensions || [],
);
const activeExtensions = extensions.filter(
(_, i) => allExtensions[i].isActive,
);
// Handle OpenAI API key from command line
if (argv.openaiApiKey) {
process.env.OPENAI_API_KEY = argv.openaiApiKey;
@@ -288,46 +318,72 @@ export async function loadCliConfig(
);
const fileService = new FileDiscoveryService(process.cwd());
const fileFiltering = {
...DEFAULT_MEMORY_FILE_FILTERING_OPTIONS,
...settings.fileFiltering,
};
// Call the (now wrapper) loadHierarchicalGeminiMemory which calls the server's version
const { memoryContent, fileCount } = await loadHierarchicalGeminiMemory(
process.cwd(),
debugMode,
fileService,
settings,
extensionContextFilePaths,
fileFiltering,
);
let mcpServers = mergeMcpServers(settings, activeExtensions);
const excludeTools = mergeExcludeTools(settings, activeExtensions);
const blockedMcpServers: Array<{ name: string; extensionName: string }> = [];
if (!argv.allowedMcpServerNames) {
if (settings.allowMCPServers) {
const allowedNames = new Set(settings.allowMCPServers.filter(Boolean));
if (allowedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => allowedNames.has(key)),
);
}
}
if (settings.excludeMCPServers) {
const excludedNames = new Set(settings.excludeMCPServers.filter(Boolean));
if (excludedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => !excludedNames.has(key)),
);
}
}
}
if (argv.allowedMcpServerNames) {
const allowedNames = new Set(argv.allowedMcpServerNames.filter(Boolean));
if (allowedNames.size > 0) {
mcpServers = Object.fromEntries(
Object.entries(mcpServers).filter(([key]) => allowedNames.has(key)),
Object.entries(mcpServers).filter(([key, server]) => {
const isAllowed = allowedNames.has(key);
if (!isAllowed) {
blockedMcpServers.push({
name: key,
extensionName: server.extensionName || '',
});
}
return isAllowed;
}),
);
} else {
blockedMcpServers.push(
...Object.entries(mcpServers).map(([key, server]) => ({
name: key,
extensionName: server.extensionName || '',
})),
);
mcpServers = {};
}
}
if (ideMode) {
mcpServers['_ide_server'] = new MCPServerConfig(
undefined, // command
undefined, // args
undefined, // env
undefined, // cwd
undefined, // url
'http://localhost:3000/mcp', // httpUrl
undefined, // headers
undefined, // tcp
undefined, // timeout
false, // trust
'IDE connection', // description
undefined, // includeTools
undefined, // excludeTools
);
}
const sandboxConfig = await loadSandboxConfig(settings, argv);
return new Config({
@@ -362,16 +418,19 @@ export async function loadCliConfig(
process.env.OTEL_EXPORTER_OTLP_ENDPOINT ??
settings.telemetry?.otlpEndpoint,
logPrompts: argv.telemetryLogPrompts ?? settings.telemetry?.logPrompts,
outfile: argv.telemetryOutfile ?? settings.telemetry?.outfile,
},
usageStatisticsEnabled: settings.usageStatisticsEnabled ?? true,
// Git-aware file filtering settings
fileFiltering: {
respectGitIgnore: settings.fileFiltering?.respectGitIgnore,
respectGeminiIgnore: settings.fileFiltering?.respectGeminiIgnore,
enableRecursiveFileSearch:
settings.fileFiltering?.enableRecursiveFileSearch,
},
checkpointing: argv.checkpointing || settings.checkpointing?.enabled,
proxy:
argv.proxy ||
process.env.HTTPS_PROXY ||
process.env.https_proxy ||
process.env.HTTP_PROXY ||
@@ -382,15 +441,14 @@ export async function loadCliConfig(
model: argv.model!,
extensionContextFilePaths,
maxSessionTurns: settings.maxSessionTurns ?? -1,
sessionTokenLimit: settings.sessionTokenLimit ?? 32000,
maxFolderItems: settings.maxFolderItems ?? 20,
experimentalAcp: argv.experimentalAcp || false,
listExtensions: argv.listExtensions || false,
activeExtensions: activeExtensions.map((e) => ({
name: e.config.name,
version: e.config.version,
})),
extensions: allExtensions,
blockedMcpServers,
noBrowser: !!process.env.NO_BROWSER,
summarizeToolOutput: settings.summarizeToolOutput,
ideMode,
ideClient,
enableOpenAILogging:
(typeof argv.openaiLogging === 'undefined'
? settings.enableOpenAILogging
@@ -421,7 +479,10 @@ function mergeMcpServers(settings: Settings, extensions: Extension[]) {
);
return;
}
mcpServers[key] = server;
mcpServers[key] = {
...server,
extensionName: extension.config.name,
};
},
);
}